java.io.DataInputStream

Here are the examples of the java api class java.io.DataInputStream taken from open source projects.

1. CodecIBXM#loadModule()

Project: MinecraftForge
File: CodecIBXM.java
/**
 * Decodes the data in the specified InputStream into an instance of
 * ibxm.Module.
 * @param input an InputStream containing the module file to be decoded.
 * @throws IllegalArgumentException if the data is not recognised as a module file.
 */
private static Module loadModule(InputStream input) throws IllegalArgumentException, IOException {
    DataInputStream data_input_stream = new DataInputStream(input);
    // Check if data is in XM format:
    byte[] xm_header = new byte[60];
    data_input_stream.readFully(xm_header);
    if (FastTracker2.is_xm(xm_header))
        return FastTracker2.load_xm(xm_header, data_input_stream);
    // Check if data is in ScreamTracker 3 format:
    byte[] s3m_header = new byte[96];
    System.arraycopy(xm_header, 0, s3m_header, 0, 60);
    data_input_stream.readFully(s3m_header, 60, 36);
    if (ScreamTracker3.is_s3m(s3m_header))
        return ScreamTracker3.load_s3m(s3m_header, data_input_stream);
    // Check if data is in ProTracker format:
    byte[] mod_header = new byte[1084];
    System.arraycopy(s3m_header, 0, mod_header, 0, 96);
    data_input_stream.readFully(mod_header, 96, 988);
    return ProTracker.load_mod(mod_header, data_input_stream);
}

2. CodecIBXM#loadModule()

Project: Kingdoms
File: CodecIBXM.java
/**
 * Decodes the data in the specified InputStream into an instance of
 * ibxm.Module.
 * @param input an InputStream containing the module file to be decoded.
 * @throws IllegalArgumentException if the data is not recognised as a module file.
 */
private static Module loadModule(InputStream input) throws IllegalArgumentException, IOException {
    DataInputStream data_input_stream = new DataInputStream(input);
    // Check if data is in XM format:
    byte[] xm_header = new byte[60];
    data_input_stream.readFully(xm_header);
    if (FastTracker2.is_xm(xm_header))
        return FastTracker2.load_xm(xm_header, data_input_stream);
    // Check if data is in ScreamTracker 3 format:
    byte[] s3m_header = new byte[96];
    System.arraycopy(xm_header, 0, s3m_header, 0, 60);
    data_input_stream.readFully(s3m_header, 60, 36);
    if (ScreamTracker3.is_s3m(s3m_header))
        return ScreamTracker3.load_s3m(s3m_header, data_input_stream);
    // Check if data is in ProTracker format:
    byte[] mod_header = new byte[1084];
    System.arraycopy(s3m_header, 0, mod_header, 0, 96);
    data_input_stream.readFully(mod_header, 96, 988);
    return ProTracker.load_mod(mod_header, data_input_stream);
}

3. PFBParser#parsePFB()

Project: fop
File: PFBParser.java
/**
     * Parses a PFB file into a PFBData object.
     * @param in InputStream to load the PFB file from
     * @return PFBData memory representation of the font
     * @throws IOException In case of an I/O problem
     */
public PFBData parsePFB(InputStream in) throws IOException {
    PFBData pfb = new PFBData();
    BufferedInputStream bin = new BufferedInputStream(in);
    DataInputStream din = new DataInputStream(bin);
    din.mark(32);
    int firstByte = din.readUnsignedByte();
    din.reset();
    if (firstByte == 128) {
        pfb.setPFBFormat(PFBData.PFB_PC);
        parsePCFormat(pfb, din);
    } else {
        pfb.setPFBFormat(PFBData.PFB_RAW);
        parseRAWFormat(pfb, bin);
    }
    return pfb;
}

4. ByteCodeTranslator#replaceInFile()

Project: CodenameOne
File: ByteCodeTranslator.java
private static void replaceInFile(File sourceFile, String... values) throws IOException {
    DataInputStream dis = new DataInputStream(new FileInputStream(sourceFile));
    byte[] data = new byte[(int) sourceFile.length()];
    dis.readFully(data);
    dis.close();
    FileWriter fios = new FileWriter(sourceFile);
    String str = new String(data);
    for (int iter = 0; iter < values.length; iter += 2) {
        str = str.replace(values[iter], values[iter + 1]);
    }
    fios.write(str);
    fios.close();
}

5. FileValueSerializerTest#testReadFilenameAndByteArrayValue()

Project: camunda-bpm-platform
File: FileValueSerializerTest.java
@Test
public void testReadFilenameAndByteArrayValue() throws IOException {
    InputStream is = this.getClass().getClassLoader().getResourceAsStream("org/camunda/bpm/engine/test/standalone/variables/simpleFile.txt");
    byte[] data = new byte[is.available()];
    DataInputStream dataInputStream = new DataInputStream(is);
    dataInputStream.readFully(data);
    dataInputStream.close();
    MockValueFields valueFields = new MockValueFields();
    String filename = "file.txt";
    valueFields.setTextValue(filename);
    valueFields.setByteArrayValue(data);
    FileValue fileValue = serializer.readValue(valueFields, true);
    assertThat(fileValue.getFilename(), is(filename));
    assertThat(fileValue.getMimeType(), is(nullValue()));
    checkStreamFromValue(fileValue, "text");
}

6. ResourceClassLoader#loadClassData()

Project: hydra
File: ResourceClassLoader.java
/**
     * Loads a given file (presumably .class) into a byte array.
     * The file should be accessible as a resource, for example
     * it could be located on the classpath.
     *
     * @param name File name to load
     * @return Byte array read from the file
     * @throws IOException Is thrown when there
     *                     was some problem reading the file
     */
private byte[] loadClassData(String name) throws IOException {
    // Opening the file
    InputStream stream = getClass().getClassLoader().getResourceAsStream(name);
    int size = stream.available();
    byte[] buff = new byte[size];
    DataInputStream in = new DataInputStream(stream);
    // Reading the binary data
    in.readFully(buff);
    in.close();
    return buff;
}

7. GeneratorParametersRecorder#getClassName()

Project: HotswapAgent
File: GeneratorParametersRecorder.java
/**
	 * http://stackoverflow.com/questions/1649674/resolve-class-name-from-bytecode
	 * 
	 * @param bytes
	 * @return
	 * @throws Exception
	 */
public static String getClassName(byte[] bytes) throws Exception {
    DataInputStream dis = new DataInputStream(new ByteArrayInputStream(bytes));
    // skip header and class version
    dis.readLong();
    int cpcnt = (dis.readShort() & 0xffff) - 1;
    int[] classes = new int[cpcnt];
    String[] strings = new String[cpcnt];
    for (int i = 0; i < cpcnt; i++) {
        int t = dis.read();
        if (t == 7)
            classes[i] = dis.readShort() & 0xffff;
        else if (t == 1)
            strings[i] = dis.readUTF();
        else if (t == 5 || t == 6) {
            dis.readLong();
            i++;
        } else if (t == 8)
            dis.readShort();
        else
            dis.readInt();
    }
    // skip access flags
    dis.readShort();
    return strings[classes[(dis.readShort() & 0xffff) - 1] - 1].replace('/', '.');
}

8. FileValueSerializerTest#testReadFullValue()

Project: camunda-bpm-platform
File: FileValueSerializerTest.java
@Test
public void testReadFullValue() throws IOException {
    InputStream is = this.getClass().getClassLoader().getResourceAsStream("org/camunda/bpm/engine/test/standalone/variables/simpleFile.txt");
    byte[] data = new byte[is.available()];
    DataInputStream dataInputStream = new DataInputStream(is);
    dataInputStream.readFully(data);
    dataInputStream.close();
    MockValueFields valueFields = new MockValueFields();
    String filename = "file.txt";
    valueFields.setTextValue(filename);
    valueFields.setByteArrayValue(data);
    String mimeType = "text/plain";
    String encoding = "UTF-16";
    valueFields.setTextValue2(mimeType + SEPARATOR + encoding);
    FileValue fileValue = serializer.readValue(valueFields, true);
    assertThat(fileValue.getFilename(), is(filename));
    assertThat(fileValue.getMimeType(), is(mimeType));
    assertThat(fileValue.getEncoding(), is("UTF-16"));
    assertThat(fileValue.getEncodingAsCharset(), is(Charset.forName("UTF-16")));
    checkStreamFromValue(fileValue, "text");
}

9. FileValueSerializerTest#testReadFileNameEncodingAndByteArray()

Project: camunda-bpm-platform
File: FileValueSerializerTest.java
@Test
public void testReadFileNameEncodingAndByteArray() throws IOException {
    InputStream is = this.getClass().getClassLoader().getResourceAsStream("org/camunda/bpm/engine/test/standalone/variables/simpleFile.txt");
    byte[] data = new byte[is.available()];
    DataInputStream dataInputStream = new DataInputStream(is);
    dataInputStream.readFully(data);
    dataInputStream.close();
    MockValueFields valueFields = new MockValueFields();
    String filename = "file.txt";
    valueFields.setTextValue(filename);
    valueFields.setByteArrayValue(data);
    String encoding = SEPARATOR + "UTF-8";
    valueFields.setTextValue2(encoding);
    FileValue fileValue = serializer.readValue(valueFields, true);
    assertThat(fileValue.getFilename(), is(filename));
    assertThat(fileValue.getEncoding(), is("UTF-8"));
    assertThat(fileValue.getEncodingAsCharset(), is(Charset.forName("UTF-8")));
    checkStreamFromValue(fileValue, "text");
}

10. FileValueSerializerTest#testReadFileNameMimeTypeAndByteArray()

Project: camunda-bpm-platform
File: FileValueSerializerTest.java
@Test
public void testReadFileNameMimeTypeAndByteArray() throws IOException {
    InputStream is = this.getClass().getClassLoader().getResourceAsStream("org/camunda/bpm/engine/test/standalone/variables/simpleFile.txt");
    byte[] data = new byte[is.available()];
    DataInputStream dataInputStream = new DataInputStream(is);
    dataInputStream.readFully(data);
    dataInputStream.close();
    MockValueFields valueFields = new MockValueFields();
    String filename = "file.txt";
    valueFields.setTextValue(filename);
    valueFields.setByteArrayValue(data);
    String mimeType = "text/plain";
    valueFields.setTextValue2(mimeType + SEPARATOR);
    FileValue fileValue = serializer.readValue(valueFields, true);
    assertThat(fileValue.getFilename(), is(filename));
    assertThat(fileValue.getMimeType(), is(mimeType));
    checkStreamFromValue(fileValue, "text");
}

11. DerUtils#decodePrivateKey()

Project: Resteasy
File: DerUtils.java
public static PrivateKey decodePrivateKey(InputStream is) throws Exception {
    DataInputStream dis = new DataInputStream(is);
    byte[] keyBytes = new byte[dis.available()];
    dis.readFully(keyBytes);
    dis.close();
    PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
    KeyFactory kf = KeyFactory.getInstance("RSA", "BC");
    return kf.generatePrivate(spec);
}

12. ExampleSignTest#getPrivate()

Project: Resteasy
File: ExampleSignTest.java
public static PrivateKey getPrivate(InputStream is) throws Exception {
    DataInputStream dis = new DataInputStream(is);
    byte[] keyBytes = new byte[dis.available()];
    dis.readFully(keyBytes);
    dis.close();
    PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
    KeyFactory kf = KeyFactory.getInstance("RSA");
    return kf.generatePrivate(spec);
}

13. DerUtils#decodePrivateKey()

Project: Resteasy
File: DerUtils.java
public static PrivateKey decodePrivateKey(InputStream is) throws Exception {
    DataInputStream dis = new DataInputStream(is);
    byte[] keyBytes = new byte[dis.available()];
    dis.readFully(keyBytes);
    dis.close();
    PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
    KeyFactory kf = KeyFactory.getInstance("RSA", "BC");
    return kf.generatePrivate(spec);
}

14. TopKPhaseMapOutputKey#fromBytes()

Project: pinot
File: TopKPhaseMapOutputKey.java
public static TopKPhaseMapOutputKey fromBytes(byte[] buffer) throws IOException {
    DataInputStream dis = new DataInputStream(new ByteArrayInputStream(buffer));
    int length;
    byte[] bytes;
    // dimension name
    length = dis.readInt();
    bytes = new byte[length];
    dis.read(bytes);
    String dimensionName = new String(bytes);
    // dimension value
    length = dis.readInt();
    bytes = new byte[length];
    dis.read(bytes);
    String dimensionValue = new String(bytes);
    TopKPhaseMapOutputKey wrapper;
    wrapper = new TopKPhaseMapOutputKey(dimensionName, dimensionValue);
    return wrapper;
}

15. ParquetInputSplit#readFields()

Project: parquet-mr
File: ParquetInputSplit.java
/**
   * {@inheritDoc}
   */
@Override
public void readFields(DataInput hin) throws IOException {
    byte[] bytes = readArray(hin);
    DataInputStream in = new DataInputStream(new GZIPInputStream(new ByteArrayInputStream(bytes)));
    super.readFields(in);
    this.end = in.readLong();
    if (in.readBoolean()) {
        this.rowGroupOffsets = new long[in.readInt()];
        for (int i = 0; i < rowGroupOffsets.length; i++) {
            rowGroupOffsets[i] = in.readLong();
        }
    }
    in.close();
}

16. ModelByteBuffer#load()

Project: openjdk
File: ModelByteBuffer.java
public void load() throws IOException {
    if (root != this) {
        root.load();
        return;
    }
    if (buffer != null)
        return;
    if (file == null) {
        throw new IllegalStateException("No file associated with this ByteBuffer!");
    }
    DataInputStream is = new DataInputStream(getInputStream());
    buffer = new byte[(int) capacity()];
    offset = 0;
    is.readFully(buffer);
    is.close();
}

17. CustomClassLoader#loadClassData()

Project: kryo-serializers
File: CustomClassLoader.java
/**
     * Loads a given file (presumably .class) into a byte array.
     * The file should be accessible as a resource, for example
     * it could be located on the classpath.
     *
     * @param name File name to load
     * @return Byte array read from the file
     * @throws IOException Is thrown when there
     *               was some problem reading the file
     */
private byte[] loadClassData(final String name) throws IOException {
    // Opening the file
    final InputStream stream = getClass().getClassLoader().getResourceAsStream(name);
    final int size = stream.available();
    final byte buff[] = new byte[size];
    final DataInputStream in = new DataInputStream(stream);
    // Reading the binary data
    in.readFully(buff);
    in.close();
    return buff;
}

18. DerUtils#decodePrivateKey()

Project: keycloak
File: DerUtils.java
public static PrivateKey decodePrivateKey(InputStream is) throws Exception {
    DataInputStream dis = new DataInputStream(is);
    byte[] keyBytes = new byte[dis.available()];
    dis.readFully(keyBytes);
    dis.close();
    PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
    KeyFactory kf = KeyFactory.getInstance("RSA", "BC");
    return kf.generatePrivate(spec);
}

19. MessageBatchTest#testSize()

Project: JGroups
File: MessageBatchTest.java
public void testSize() throws Exception {
    List<Message> msgs = createMessages();
    ByteArrayOutputStream output = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(output);
    Util.writeMessageList(b, a, "cluster".getBytes(), msgs, out, false, UDP_ID);
    out.flush();
    byte[] buf = output.toByteArray();
    System.out.println("size=" + buf.length + " bytes, " + msgs.size() + " messages");
    DataInputStream in = new DataInputStream(new ByteArrayInputStream(buf));
    // version
    in.readShort();
    // flags
    in.readByte();
    List<Message> list = Util.readMessageList(in, UDP_ID);
    assert msgs.size() == list.size();
}

20. ModelByteBuffer#load()

Project: jdk7u-jdk
File: ModelByteBuffer.java
public void load() throws IOException {
    if (root != this) {
        root.load();
        return;
    }
    if (buffer != null)
        return;
    if (file == null) {
        throw new IllegalStateException("No file associated with this ByteBuffer!");
    }
    DataInputStream is = new DataInputStream(getInputStream());
    buffer = new byte[(int) capacity()];
    offset = 0;
    is.readFully(buffer);
    is.close();
}

21. TestCompile#loadTrie()

Project: lucene-solr
File: TestCompile.java
static Trie loadTrie(Path path) throws IOException {
    Trie trie;
    DataInputStream is = new DataInputStream(new BufferedInputStream(Files.newInputStream(path)));
    String method = is.readUTF().toUpperCase(Locale.ROOT);
    if (method.indexOf('M') < 0) {
        trie = new Trie(is);
    } else {
        trie = new MultiTrie(is);
    }
    is.close();
    return trie;
}

22. StorageTest#testAppender()

Project: intellij-community
File: StorageTest.java
public void testAppender() throws Exception {
    final int r = myStorage.createNewRecord();
    DataOutputStream out = new DataOutputStream(myStorage.appendStream(r));
    for (int i = 0; i < 10000; i++) {
        out.writeInt(i);
        if (i % 100 == 0) {
            // Drop the appenders cache
            myStorage.readStream(r);
            out.close();
            out = new DataOutputStream(myStorage.appendStream(r));
        }
    }
    out.close();
    DataInputStream in = new DataInputStream(myStorage.readStream(r));
    for (int i = 0; i < 10000; i++) {
        assertEquals(i, in.readInt());
    }
    in.close();
}

23. AccumuloStorage#getSplitComparable()

Project: incubator-rya
File: AccumuloStorage.java
@Override
public WritableComparable<?> getSplitComparable(InputSplit inputSplit) throws IOException {
    //cannot get access to the range directly
    AccumuloInputFormat.RangeInputSplit rangeInputSplit = (AccumuloInputFormat.RangeInputSplit) inputSplit;
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(baos);
    rangeInputSplit.write(out);
    out.close();
    DataInputStream stream = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
    Range range = new Range();
    range.readFields(stream);
    stream.close();
    return range;
}

24. Vocabulary#read()

Project: incubator-joshua
File: Vocabulary.java
/**
   * Reads a vocabulary from file. This deletes any additions to the vocabulary made prior to
   * reading the file.
   *
   * @param vocab_file path to a vocabulary file
   * @return Returns true if vocabulary was read without mismatches or collisions.
   * @throws IOException of the file cannot be found or read properly
   */
public static boolean read(final File vocab_file) throws IOException {
    DataInputStream vocab_stream = new DataInputStream(new BufferedInputStream(new FileInputStream(vocab_file)));
    int size = vocab_stream.readInt();
    LOG.info("Read {} entries from the vocabulary", size);
    clear();
    for (int i = 0; i < size; i++) {
        int id = vocab_stream.readInt();
        String token = vocab_stream.readUTF();
        if (id != Math.abs(id(token))) {
            vocab_stream.close();
            return false;
        }
    }
    vocab_stream.close();
    return (size + 1 == idToString.size());
}

25. TrafficAnalyzer#main()

Project: hudson-2.x
File: TrafficAnalyzer.java
public static void main(String[] args) throws Exception {
    File f = new File("/home/kohsuke/ws/hudson/investigations/javafx-windows-hang/out.log");
    DataInputStream fin = new DataInputStream(new FileInputStream(f));
    // skip preamble
    fin.readFully(new byte[4]);
    ObjectInputStream ois = new ObjectInputStream(fin);
    for (int n = 0; ; n++) {
        Command o = (Command) ois.readObject();
        System.out.println("#" + n + " : " + o);
        if (o instanceof RPCRequest) {
            RPCRequest request = (RPCRequest) o;
            System.out.print("  (");
            boolean first = true;
            for (Object argument : request.getArguments()) {
                if (first)
                    first = false;
                else
                    System.out.print(",");
                System.out.print(argument);
            }
            System.out.println(")");
        }
        if (o.createdAt != null)
            o.createdAt.printStackTrace(System.out);
    }
}

26. DataOperations#extractRawLogFromDump()

Project: HiTune
File: DataOperations.java
public static void extractRawLogFromDump(String directory, String fileName) throws Exception {
    File inputFile = new File(directory + fileName + ".bin");
    File outputFile = new File(directory + fileName + ".raw");
    DataInputStream dis = new DataInputStream(new FileInputStream(inputFile));
    Chunk chunk = null;
    FileWriter out = new FileWriter(outputFile);
    boolean eof = false;
    do {
        try {
            chunk = ChunkImpl.read(dis);
            out.write(new String(chunk.getData()));
        } catch (EOFException e) {
            eof = true;
        }
    } while (!eof);
    dis.close();
    out.close();
}

27. TestHbaseObjectWritable#doType()

Project: hindex
File: TestHbaseObjectWritable.java
private Object doType(final Configuration conf, final Object value, final Class<?> clazz) throws IOException {
    ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(byteStream);
    HbaseObjectWritable.writeObject(out, value, clazz, conf);
    out.close();
    ByteArrayInputStream bais = new ByteArrayInputStream(byteStream.toByteArray());
    DataInputStream dis = new DataInputStream(bais);
    Object product = HbaseObjectWritable.readObject(dis, conf);
    dis.close();
    return product;
}

28. ClusterSetup#readFile()

Project: helix
File: ClusterSetup.java
// TODO: remove this. has moved to ZkHelixAdmin
private static byte[] readFile(String filePath) throws IOException {
    File file = new File(filePath);
    int size = (int) file.length();
    byte[] bytes = new byte[size];
    DataInputStream dis = new DataInputStream(new FileInputStream(file));
    int read = 0;
    int numRead = 0;
    while (read < bytes.length && (numRead = dis.read(bytes, read, bytes.length - read)) >= 0) {
        read = read + numRead;
    }
    dis.close();
    return bytes;
}

29. TestIO#testCustomTypesIO()

Project: hadoop-mapreduce
File: TestIO.java
public void testCustomTypesIO() throws IOException {
    byte[] rawBytes = new byte[] { 100, 0, 0, 0, 3, 1, 2, 3 };
    FileOutputStream ostream = new FileOutputStream(tmpfile);
    DataOutputStream dostream = new DataOutputStream(ostream);
    TypedBytesOutput out = new TypedBytesOutput(dostream);
    out.writeRaw(rawBytes);
    dostream.close();
    ostream.close();
    FileInputStream istream = new FileInputStream(tmpfile);
    DataInputStream distream = new DataInputStream(istream);
    TypedBytesInput in = new TypedBytesInput(distream);
    assertTrue(Arrays.equals(rawBytes, in.readRaw()));
    distream.close();
    istream.close();
}

30. TestStorageRestore#getFileMD5()

Project: hadoop-hdfs
File: TestStorageRestore.java
/**
   * This function returns a md5 hash of a file.
   * 
   * @param FileToMd5
   * @return The md5 string
   */
public String getFileMD5(File file) throws Exception {
    String res = new String();
    MessageDigest mD = MessageDigest.getInstance("MD5");
    DataInputStream dis = new DataInputStream(new FileInputStream(file));
    try {
        while (true) {
            mD.update(dis.readByte());
        }
    } catch (EOFException eof) {
    }
    BigInteger bigInt = new BigInteger(1, mD.digest());
    res = bigInt.toString(16);
    dis.close();
    return res;
}

31. TestMapWritable#testMultipleCallsToReadFieldsAreSafe()

Project: hadoop-common
File: TestMapWritable.java
/**
   * Assert MapWritable does not grow across calls to readFields.
   * @throws Exception
   * @see <a href="https://issues.apache.org/jira/browse/HADOOP-2244">HADOOP-2244</a>
   */
public void testMultipleCallsToReadFieldsAreSafe() throws Exception {
    // Create an instance and add a key/value.
    MapWritable m = new MapWritable();
    final Text t = new Text(getName());
    m.put(t, t);
    // Get current size of map.  Key values are 't'.
    int count = m.size();
    // Now serialize... save off the bytes.
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    m.write(dos);
    dos.close();
    // Now add new values to the MapWritable.
    m.put(new Text("key1"), new Text("value1"));
    m.put(new Text("key2"), new Text("value2"));
    // Now deserialize the original MapWritable.  Ensure count and key values
    // match original state.
    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
    DataInputStream dis = new DataInputStream(bais);
    m.readFields(dis);
    assertEquals(count, m.size());
    assertTrue(m.get(t).equals(t));
    dis.close();
}

32. TaskSplitReader#main()

Project: hadoop-book
File: TaskSplitReader.java
public static void main(String... args) throws IOException {
    String taskSplitFile = args[0];
    Configuration conf = new Configuration();
    DataInputStream is = new DataInputStream(new FileInputStream(taskSplitFile));
    JobSplit.TaskSplitIndex taskSplitIndex = new JobSplit.TaskSplitIndex();
    taskSplitIndex.readFields(is);
    is.close();
    Object split = getSplitDetails(conf, new Path(taskSplitIndex.getSplitLocation()), taskSplitIndex.getStartOffset());
    System.out.println("InputSplit instance class = " + split.getClass().getName());
    System.out.println("ToString on split = " + split);
    System.out.println("Reflection fields = " + ToStringBuilder.reflectionToString(split, ToStringStyle.SHORT_PREFIX_STYLE));
}

33. TestMapWritable#testMultipleCallsToReadFieldsAreSafe()

Project: hadoop-20
File: TestMapWritable.java
/**
   * Assert MapWritable does not grow across calls to readFields.
   * @throws Exception
   * @see <a href="https://issues.apache.org/jira/browse/HADOOP-2244">HADOOP-2244</a>
   */
public void testMultipleCallsToReadFieldsAreSafe() throws Exception {
    // Create an instance and add a key/value.
    MapWritable m = new MapWritable();
    final Text t = new Text(getName());
    m.put(t, t);
    // Get current size of map.  Key values are 't'.
    int count = m.size();
    // Now serialize... save off the bytes.
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    m.write(dos);
    dos.close();
    // Now add new values to the MapWritable.
    m.put(new Text("key1"), new Text("value1"));
    m.put(new Text("key2"), new Text("value2"));
    // Now deserialize the original MapWritable.  Ensure count and key values
    // match original state.
    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
    DataInputStream dis = new DataInputStream(bais);
    m.readFields(dis);
    assertEquals(count, m.size());
    assertTrue(m.get(t).equals(t));
    dis.close();
}

34. TestTFile#someReadingWithMetaBlock()

Project: hadoop-20
File: TestTFile.java
private void someReadingWithMetaBlock(Reader reader) throws IOException {
    DataInputStream din = null;
    readNumMetablocks(reader, 10);
    try {
        din = reader.getMetaBlock("NO ONE");
        assertTrue(false);
    } catch (MetaBlockDoesNotExist me) {
    }
    din = reader.getMetaBlock("TFileMeta100");
    int read = din.read();
    assertTrue("check for status", (read == -1));
    din.close();
}

35. TestOpenFilesInfo#testSerialize()

Project: hadoop-20
File: TestOpenFilesInfo.java
@Test
public void testSerialize() throws Exception {
    createOpenFiles(10, "testSerialize");
    FSNamesystem ns = cluster.getNameNode().namesystem;
    OpenFilesInfo info = ns.getOpenFiles();
    // Serialize object
    ByteArrayOutputStream bout = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(bout);
    info.write(out);
    // Deserialize object.
    ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());
    DataInputStream in = new DataInputStream(bin);
    OpenFilesInfo info1 = new OpenFilesInfo();
    info1.readFields(in);
    // Verify and cleanup.
    verifyLease(info);
    assertEquals(info, info1);
    bout.close();
    bin.close();
    out.close();
    in.close();
}

36. TestMessageStores#doCheckpoint()

Project: giraph
File: TestMessageStores.java
private <S extends MessageStore<IntWritable, IntWritable>> S doCheckpoint(MessageStoreFactory<IntWritable, IntWritable, S> messageStoreFactory, S messageStore) throws IOException {
    File file = new File(directory, "messageStoreTest");
    if (file.exists()) {
        file.delete();
    }
    file.createNewFile();
    DataOutputStream out = new DataOutputStream(new BufferedOutputStream((new FileOutputStream(file))));
    messageStore.write(out);
    out.close();
    messageStore = messageStoreFactory.newStore();
    DataInputStream in = new DataInputStream(new BufferedInputStream((new FileInputStream(file))));
    messageStore.readFields(in);
    in.close();
    file.delete();
    return messageStore;
}

37. FetchContextTest#testPersistence()

Project: fred
File: FetchContextTest.java
public void testPersistence() throws IOException, StorageFormatException {
    FetchContext context = HighLevelSimpleClientImpl.makeDefaultFetchContext(Long.MAX_VALUE, Long.MAX_VALUE, new ArrayBucketFactory(), new SimpleEventProducer());
    ArrayBucket bucket = new ArrayBucket();
    DataOutputStream dos = new DataOutputStream(bucket.getOutputStream());
    context.writeTo(dos);
    dos.close();
    assert (bucket.size() != 0);
    DataInputStream dis = new DataInputStream(bucket.getInputStream());
    FetchContext ctx = new FetchContext(dis);
    dis.close();
    assertTrue(ctx.equals(context));
    bucket.free();
}

38. FileUtil#equalStreams()

Project: fred
File: FileUtil.java
public static boolean equalStreams(InputStream a, InputStream b, long size) throws IOException {
    byte[] aBuffer = new byte[BUFFER_SIZE];
    byte[] bBuffer = new byte[BUFFER_SIZE];
    DataInputStream aIn = new DataInputStream(a);
    DataInputStream bIn = new DataInputStream(b);
    long checked = 0;
    while (checked < size) {
        int toRead = (int) Math.min(BUFFER_SIZE, size - checked);
        aIn.readFully(aBuffer, 0, toRead);
        bIn.readFully(bBuffer, 0, toRead);
        if (!MessageDigest.isEqual(aBuffer, bBuffer))
            return false;
        checked += toRead;
    }
    return true;
}

39. NewLZMACompressor#decompress()

Project: fred
File: NewLZMACompressor.java
@Override
public long decompress(InputStream is, OutputStream os, long maxLength, long maxCheckSizeBytes) throws IOException, CompressionOutputSizeException {
    byte[] props = new byte[5];
    DataInputStream dis = new DataInputStream(is);
    dis.readFully(props);
    CountedOutputStream cos = new CountedOutputStream(os);
    int dictionarySize = 0;
    for (int i = 0; i < 4; i++) dictionarySize += ((props[1 + i]) & 0xFF) << (i * 8);
    if (dictionarySize < 0)
        throw new InvalidCompressedDataException("Invalid dictionary size");
    if (dictionarySize > MAX_DICTIONARY_SIZE)
        throw new TooBigDictionaryException();
    Decoder decoder = new Decoder();
    if (!decoder.SetDecoderProperties(props))
        throw new InvalidCompressedDataException("Invalid properties");
    decoder.Code(is, cos, maxLength);
    //cos.flush();
    return cos.written();
}

40. GIFFilter#readFilter()

Project: fred
File: GIFFilter.java
@Override
public void readFilter(InputStream input, OutputStream output, String charset, HashMap<String, String> otherParams, FilterCallback cb) throws DataFilterException, IOException {
    DataInputStream dis = new DataInputStream(input);
    // Check the header
    byte[] headerCheck = new byte[HEADER_SIZE];
    dis.readFully(headerCheck);
    if ((!Arrays.equals(headerCheck, gif87aHeader)) && (!Arrays.equals(headerCheck, gif89aHeader))) {
        throwHeaderError(l10n("invalidHeaderTitle"), l10n("invalidHeader"));
    }
    output.write(headerCheck);
    FileUtil.copy(dis, output, -1);
    output.flush();
}

41. GunzipDecorator#append()

Project: flume
File: GunzipDecorator.java
@Override
public void append(Event e) throws IOException, InterruptedException {
    byte[] bs = e.get(GZDOC);
    if (bs == null) {
        super.append(e);
        passthrough.incrementAndGet();
        return;
    }
    gzipCnt.incrementAndGet();
    gzipSize.addAndGet(bs.length);
    ByteArrayInputStream bais = new ByteArrayInputStream(bs);
    GZIPInputStreamExposeSize gzis = new GZIPInputStreamExposeSize(bais);
    DataInputStream dis = new DataInputStream(gzis);
    WriteableEvent out = new WriteableEvent();
    out.readFields(dis);
    // must be done before closed.
    long gunSz = gzis.getDecompressSize();
    dis.close();
    super.append(out);
    gunzipSize.addAndGet(gunSz);
}

42. ClassReader#readAttribute()

Project: error-prone-javac
File: ClassReader.java
public Attribute readAttribute() throws IOException {
    int name_index = readUnsignedShort();
    int length = readInt();
    byte[] data = new byte[length];
    readFully(data);
    DataInputStream prev = in;
    in = new DataInputStream(new ByteArrayInputStream(data));
    try {
        return attributeFactory.createAttribute(this, name_index, data);
    } finally {
        in = prev;
    }
}

43. TestTypedProtobufWritable#testMessageReadWriteEmpty()

Project: elephant-bird
File: TestTypedProtobufWritable.java
@Test
public void testMessageReadWriteEmpty() throws IOException {
    DataOutputStream dos = new DataOutputStream(new FileOutputStream("test3.txt"));
    TypedProtobufWritable<AddressBook> empty = new TypedProtobufWritable<AddressBook>();
    empty.write(dos);
    dos.close();
    DataInputStream dis = new DataInputStream(new FileInputStream("test3.txt"));
    TypedProtobufWritable<Message> after = new TypedProtobufWritable<Message>();
    after.readFields(dis);
    dis.close();
    AddressBook ab2 = (AddressBook) after.get();
    assertNull(ab2);
}

44. TestTypedProtobufWritable#testMessageReadWrite()

Project: elephant-bird
File: TestTypedProtobufWritable.java
@Test
public void testMessageReadWrite() throws IOException {
    DataOutputStream dos = new DataOutputStream(new FileOutputStream("test2.txt"));
    referenceAbWritable.write(dos);
    dos.close();
    DataInputStream dis = new DataInputStream(new FileInputStream("test2.txt"));
    TypedProtobufWritable<Message> after = new TypedProtobufWritable<Message>();
    after.readFields(dis);
    dis.close();
    AddressBook ab2 = (AddressBook) after.get();
    assertEquals(referenceAb, ab2);
    assertEquals(referenceAbWritable.hashCode(), after.hashCode());
}

45. TestTypedProtobufWritable#testReadWrite()

Project: elephant-bird
File: TestTypedProtobufWritable.java
@Test
public void testReadWrite() throws IOException {
    DataOutputStream dos = new DataOutputStream(new FileOutputStream("test.txt"));
    referenceAbWritable.write(dos);
    dos.close();
    DataInputStream dis = new DataInputStream(new FileInputStream("test.txt"));
    TypedProtobufWritable<AddressBook> after = new TypedProtobufWritable<AddressBook>();
    after.readFields(dis);
    dis.close();
    AddressBook ab2 = after.get();
    assertEquals(referenceAb, ab2);
    assertEquals(referenceAbWritable.hashCode(), after.hashCode());
}

46. TestProtobufWritable#testReadWrite()

Project: elephant-bird
File: TestProtobufWritable.java
@Test
public void testReadWrite() throws IOException {
    DataOutputStream dos = new DataOutputStream(new FileOutputStream("test.txt"));
    referenceAbWritable.write(dos);
    dos.close();
    DataInputStream dis = new DataInputStream(new FileInputStream("test.txt"));
    ProtobufWritable<AddressBook> after = new ProtobufWritable<AddressBook>(new TypeRef<AddressBook>() {
    });
    after.readFields(dis);
    dis.close();
    AddressBook ab2 = after.get();
    assertEquals(referenceAb, ab2);
    assertEquals(referenceAbWritable.hashCode(), after.hashCode());
}

47. PrepareStatementTest#testReadBlobCloseToMaxDssLength()

Project: derby
File: PrepareStatementTest.java
/**
     * Test fix for DERBY-4088 where an ArrayIndexOutOfBoundsException was
     * thrown by DDMReader.readBytes() when reading a BLOB value whose length
     * was close to the maximum length of a DSS.
     */
public void testReadBlobCloseToMaxDssLength() throws Exception {
    // max DSS length is 32767
    final int length = 32766;
    // Create test data with the requested length
    DataInputStream stream1 = new DataInputStream(new LoopingAlphabetStream(length));
    byte[] bytes = new byte[length];
    stream1.readFully(bytes);
    // See if the test data can be sent to the server and back with
    // no errors.
    PreparedStatement ps = prepareStatement("values cast(? as blob)");
    ps.setBytes(1, bytes);
    ResultSet rs = ps.executeQuery();
    assertTrue("empty result set", rs.next());
    InputStream stream2 = rs.getBinaryStream(1);
    assertEquals(new LoopingAlphabetStream(length), stream2);
    assertFalse("too many rows", rs.next());
    rs.close();
}

48. StorageTest#testAppender()

Project: consulo
File: StorageTest.java
public void testAppender() throws Exception {
    final int r = myStorage.createNewRecord();
    DataOutputStream out = new DataOutputStream(myStorage.appendStream(r));
    for (int i = 0; i < 10000; i++) {
        out.writeInt(i);
        if (i % 100 == 0) {
            // Drop the appenders cache
            myStorage.readStream(r);
            out.close();
            out = new DataOutputStream(myStorage.appendStream(r));
        }
    }
    out.close();
    DataInputStream in = new DataInputStream(myStorage.readStream(r));
    for (int i = 0; i < 10000; i++) {
        assertEquals(i, in.readInt());
    }
    in.close();
}

49. ClassReader#readAttribute()

Project: ceylon-compiler
File: ClassReader.java
public Attribute readAttribute() throws IOException {
    int name_index = readUnsignedShort();
    int length = readInt();
    byte[] data = new byte[length];
    readFully(data);
    DataInputStream prev = in;
    in = new DataInputStream(new ByteArrayInputStream(data));
    try {
        return attributeFactory.createAttribute(this, name_index, data);
    } finally {
        in = prev;
    }
}

50. ClassReader#readAttribute()

Project: ceylon
File: ClassReader.java
public Attribute readAttribute() throws IOException {
    int name_index = readUnsignedShort();
    int length = readInt();
    byte[] data = new byte[length];
    readFully(data);
    DataInputStream prev = in;
    in = new DataInputStream(new ByteArrayInputStream(data));
    try {
        return attributeFactory.createAttribute(this, name_index, data);
    } finally {
        in = prev;
    }
}

51. DataOutputTest#testSequentialWriter()

Project: cassandra
File: DataOutputTest.java
@Test
public void testSequentialWriter() throws IOException {
    File file = FileUtils.createTempFile("dataoutput", "test");
    SequentialWriterOption option = SequentialWriterOption.newBuilder().bufferSize(32).finishOnClose(true).build();
    final SequentialWriter writer = new SequentialWriter(file, option);
    DataOutputStreamPlus write = new WrappedDataOutputStreamPlus(writer);
    DataInput canon = testWrite(write);
    write.flush();
    write.close();
    DataInputStream test = new DataInputStream(new FileInputStream(file));
    testRead(test, canon);
    test.close();
    Assert.assertTrue(file.delete());
}

52. FastBlobTypeSerializationStateTest#serializeAndDeserialize()

Project: zeno
File: FastBlobTypeSerializationStateTest.java
@Test
public void serializeAndDeserialize() throws Exception {
    /// initialize data in "from" state
    addData(srcState, new byte[] { 1, 2 }, true, true);
    addData(srcState, new byte[] { 3, 4, 5 }, true, false);
    addData(srcState, new byte[] { 6, 7, 8, 9 }, false, true);
    final File f = File.createTempFile("pre", "suf");
    DataOutputStream dos = new DataOutputStream(new FileOutputStream(f));
    srcState.serializeTo(dos);
    dos.close();
    DataInputStream dis = new DataInputStream(new FileInputStream(f));
    destState.deserializeFrom(dis, 2);
    dis.close();
    /// assert data was copied
    assertData(destState, new byte[] { 1, 2 }, true, true);
    assertData(destState, new byte[] { 3, 4, 5 }, true, false);
    assertData(destState, new byte[] { 6, 7, 8, 9 }, false, true);
    f.delete();
}

53. FileReader#readFile()

Project: WS-Attacker
File: FileReader.java
public static String readFile(final String fileName) throws IOException {
    StringBuilder sb = new StringBuilder();
    FileInputStream fstream = new FileInputStream(fileName);
    // Get the object of DataInputStream
    DataInputStream in = new DataInputStream(fstream);
    BufferedReader br = new BufferedReader(new InputStreamReader(in));
    String strLine;
    // Read File Line By Line
    while ((strLine = br.readLine()) != null) {
        // Print the content on the console
        sb.append(strLine + "\r\n");
    }
    // Close the input stream
    in.close();
    return sb.toString();
}

54. SocketResourceFactory#negotiateProtocol()

Project: voldemort
File: SocketResourceFactory.java
private void negotiateProtocol(SocketAndStreams socket, RequestFormatType type) throws IOException {
    OutputStream outputStream = socket.getOutputStream();
    byte[] proposal = ByteUtils.getBytes(type.getCode(), "UTF-8");
    outputStream.write(proposal);
    outputStream.flush();
    DataInputStream inputStream = socket.getInputStream();
    byte[] responseBytes = new byte[2];
    inputStream.readFully(responseBytes);
    String response = ByteUtils.getString(responseBytes, "UTF-8");
    if (response.equals("ok"))
        return;
    else if (response.equals("no"))
        throw new VoldemortException(type.getDisplayName() + " is not an acceptable protcol for the server.");
    else
        throw new VoldemortException("Unknown server response: " + response);
}

55. SuccinctStreamTest#setUp()

Project: succinct
File: SuccinctStreamTest.java
/**
   * Set up test.
   *
   * @throws Exception
   */
public void setUp() throws Exception {
    super.setUp();
    File inputFile = new File(testFileRaw);
    byte[] fileData = new byte[(int) inputFile.length()];
    DataInputStream dis = new DataInputStream(new FileInputStream(inputFile));
    dis.readFully(fileData);
    SuccinctBuffer sBuf = new SuccinctBuffer(fileData);
    sBuf.writeToFile(testFileSuccinct);
    sCore = new SuccinctStream(new Path(testFileSuccinct));
}

56. SuccinctFileStreamTest#setUp()

Project: succinct
File: SuccinctFileStreamTest.java
/**
   * Set up test.
   *
   * @throws Exception
   */
public void setUp() throws Exception {
    super.setUp();
    File inputFile = new File(testFileRaw);
    data = new byte[(int) inputFile.length()];
    DataInputStream dis = new DataInputStream(new FileInputStream(inputFile));
    dis.readFully(data);
    fileData = new Source() {

        @Override
        public int length() {
            return data.length;
        }

        @Override
        public int get(int i) {
            return data[i];
        }
    };
    SuccinctFileBuffer sBuf = new SuccinctFileBuffer(data);
    sBuf.writeToFile(testFileSuccinct);
    sFile = new SuccinctFileStream(new Path(testFileSuccinct));
}

57. SAISTest#testBuildSuffixArray2()

Project: succinct
File: SAISTest.java
public void testBuildSuffixArray2() throws Exception {
    IntArray SA = SAIS.buildSuffixArray(convertToCharArray(data));
    long sum = 0;
    DataInputStream dIS = new DataInputStream(new FileInputStream(new File(testFileSA)));
    int[] testSA = IOUtils.readArray(dIS);
    dIS.close();
    for (int i = 0; i < data.length; i++) {
        assertEquals(testSA[i], SA.get(i));
        sum += SA.get(i);
        sum %= data.length;
    }
    assertEquals(sum, 0L);
}

58. SAISTest#testBuildSuffixArray1()

Project: succinct
File: SAISTest.java
public void testBuildSuffixArray1() throws Exception {
    IntArray SA = SAIS.buildSuffixArray(data);
    long sum = 0;
    DataInputStream dIS = new DataInputStream(new FileInputStream(new File(testFileSA)));
    int[] testSA = IOUtils.readArray(dIS);
    dIS.close();
    for (int i = 0; i < data.length; i++) {
        assertEquals(testSA[i], SA.get(i));
        sum += SA.get(i);
        sum %= data.length;
    }
    assertEquals(sum, 0L);
}

59. SAISTest#setUp()

Project: succinct
File: SAISTest.java
/**
   * Set up test.
   *
   * @throws Exception
   */
public void setUp() throws Exception {
    super.setUp();
    File inputFile = new File(testFileRaw);
    byte[] fileData = new byte[(int) inputFile.length()];
    DataInputStream dis = new DataInputStream(new FileInputStream(inputFile));
    dis.readFully(fileData);
    ByteArrayOutputStream out = new ByteArrayOutputStream(fileData.length + 1);
    out.write(fileData);
    out.write(0);
    data = out.toByteArray();
}

60. QSufSortTest#testGetISA()

Project: succinct
File: QSufSortTest.java
/**
   * Test method: int[] getSA()
   *
   * @throws Exception
   */
public void testGetISA() throws Exception {
    int[] ISA = instance.getISA();
    long sum = 0;
    DataInputStream dIS = new DataInputStream(new FileInputStream(new File(testFileISA)));
    int[] testISA = IOUtils.readArray(dIS);
    dIS.close();
    for (int i = 0; i < n; i++) {
        assertEquals(testISA[i], ISA[i]);
        sum += ISA[i];
        sum %= n;
    }
    assertEquals(0L, sum);
}

61. QSufSortTest#testGetSA()

Project: succinct
File: QSufSortTest.java
/**
   * Test method: int[] getSA()
   *
   * @throws Exception
   */
public void testGetSA() throws Exception {
    int[] SA = instance.getSA();
    long sum = 0;
    DataInputStream dIS = new DataInputStream(new FileInputStream(new File(testFileSA)));
    int[] testSA = IOUtils.readArray(dIS);
    dIS.close();
    for (int i = 0; i < n; i++) {
        assertEquals(testSA[i], SA[i]);
        sum += SA[i];
        sum %= n;
    }
    assertEquals(0L, sum);
}

62. QSufSortTest#setUp()

Project: succinct
File: QSufSortTest.java
/**
   * Set up test.
   *
   * @throws Exception
   */
public void setUp() throws Exception {
    super.setUp();
    instance = new QSufSort();
    File inputFile = new File(testFileRaw);
    data = new byte[(int) inputFile.length()];
    DataInputStream dis = new DataInputStream(new FileInputStream(inputFile));
    dis.readFully(data);
    n = data.length + 1;
    instance.buildSuffixArray(new Source() {

        @Override
        public int length() {
            return data.length;
        }

        @Override
        public int get(int i) {
            return data[i];
        }
    });
}

63. DivSufSortTest#testBuildSuffixArray()

Project: succinct
File: DivSufSortTest.java
public void testBuildSuffixArray() throws Exception {
    instance.buildSuffixArray(data);
    int[] SA = instance.getSA();
    long sum = 0;
    DataInputStream dIS = new DataInputStream(new FileInputStream(new File(testFileSA)));
    int[] testSA = IOUtils.readArray(dIS);
    dIS.close();
    for (int i = 0; i < data.length; i++) {
        assertEquals(testSA[i], SA[i]);
        sum += SA[i];
        sum %= data.length;
    }
    assertEquals(sum, 0L);
}

64. DivSufSortTest#setUp()

Project: succinct
File: DivSufSortTest.java
/**
   * Set up test.
   *
   * @throws Exception
   */
public void setUp() throws Exception {
    super.setUp();
    instance = new DivSufSort();
    File inputFile = new File(testFileRaw);
    byte[] fileData = new byte[(int) inputFile.length()];
    DataInputStream dis = new DataInputStream(new FileInputStream(inputFile));
    dis.readFully(fileData);
    ByteArrayOutputStream out = new ByteArrayOutputStream(fileData.length + 1);
    out.write(fileData);
    out.write(0);
    data = out.toByteArray();
}

65. SuccinctCoreTest#testLookupISA()

Project: succinct
File: SuccinctCoreTest.java
/**
   * Test method: long lookupISA(long i)
   *
   * @throws Exception
   */
public void testLookupISA() throws Exception {
    int sum = 0;
    DataInputStream dIS = getISAInputStream();
    int[] testISA = IOUtils.readArray(dIS);
    dIS.close();
    for (int i = 0; i < sCore.getOriginalSize(); i++) {
        long isaVal = sCore.lookupISA(i);
        assertEquals(testISA[i], isaVal);
        sum += isaVal;
        sum %= sCore.getOriginalSize();
    }
    assertEquals(sum, 0);
}

66. SuccinctCoreTest#testLookupSA()

Project: succinct
File: SuccinctCoreTest.java
/**
   * Test method: long lookupSA(long i)
   *
   * @throws Exception
   */
public void testLookupSA() throws Exception {
    int sum = 0;
    DataInputStream dIS = getSAInputStream();
    int[] testSA = IOUtils.readArray(dIS);
    dIS.close();
    for (int i = 0; i < sCore.getOriginalSize(); i++) {
        long saVal = sCore.lookupSA(i);
        assertEquals(testSA[i], saVal);
        sum += saVal;
        sum %= sCore.getOriginalSize();
    }
    assertEquals(sum, 0);
}

67. SuccinctCoreTest#testLookupNPA()

Project: succinct
File: SuccinctCoreTest.java
/**
   * Test method: long lookupNPA(long i)
   *
   * @throws Exception
   */
public void testLookupNPA() throws Exception {
    int sum = 0;
    DataInputStream dIS = getNPAInputStream();
    int[] testNPA = IOUtils.readArray(dIS);
    dIS.close();
    for (int i = 0; i < sCore.getOriginalSize(); i++) {
        long npaVal = sCore.lookupNPA(i);
        assertEquals(testNPA[i], npaVal);
        sum += npaVal;
        sum %= sCore.getOriginalSize();
    }
    assertEquals(sum, 0);
}

68. SerializationsTest#testEstimatedHistogramRead()

Project: stratio-cassandra
File: SerializationsTest.java
@Test
public void testEstimatedHistogramRead() throws IOException {
    if (EXECUTE_WRITES)
        testEstimatedHistogramWrite();
    DataInputStream in = getInput("utils.EstimatedHistogram.bin");
    assert EstimatedHistogram.serializer.deserialize(in) != null;
    assert EstimatedHistogram.serializer.deserialize(in) != null;
    assert EstimatedHistogram.serializer.deserialize(in) != null;
    in.close();
}

69. SerializationsTest#testBloomFilterReadMURMUR3()

Project: stratio-cassandra
File: SerializationsTest.java
@Test
public void testBloomFilterReadMURMUR3() throws IOException {
    if (EXECUTE_WRITES)
        testBloomFilterWrite(true);
    DataInputStream in = getInput("utils.BloomFilter.bin");
    IFilter bf = FilterFactory.deserialize(in, true);
    assert bf != null;
    bf.close();
    in.close();
}

70. DataOutputTest#testSequentialWriter()

Project: stratio-cassandra
File: DataOutputTest.java
@Test
public void testSequentialWriter() throws IOException {
    File file = FileUtils.createTempFile("dataoutput", "test");
    final SequentialWriter writer = new SequentialWriter(file, 32);
    DataOutputStreamAndChannel write = new DataOutputStreamAndChannel(writer, writer);
    DataInput canon = testWrite(write);
    write.flush();
    write.close();
    DataInputStream test = new DataInputStream(new FileInputStream(file));
    testRead(test, canon);
    test.close();
    Assert.assertTrue(file.delete());
}

71. SerializationsTest#testGossipDigestRead()

Project: stratio-cassandra
File: SerializationsTest.java
@Test
public void testGossipDigestRead() throws IOException {
    if (EXECUTE_WRITES)
        testGossipDigestWrite();
    int count = 0;
    DataInputStream in = getInput("gms.Gossip.bin");
    while (count < Statics.Digests.size()) assert GossipDigestAck2.serializer.deserialize(in, getVersion()) != null;
    assert GossipDigestAck.serializer.deserialize(in, getVersion()) != null;
    assert GossipDigestAck2.serializer.deserialize(in, getVersion()) != null;
    assert GossipDigestSyn.serializer.deserialize(in, getVersion()) != null;
    in.close();
}

72. SerializationsTest#testEndpointStateRead()

Project: stratio-cassandra
File: SerializationsTest.java
@Test
public void testEndpointStateRead() throws IOException {
    if (EXECUTE_WRITES)
        testEndpointStateWrite();
    DataInputStream in = getInput("gms.EndpointState.bin");
    assert HeartBeatState.serializer.deserialize(in, getVersion()) != null;
    assert EndpointState.serializer.deserialize(in, getVersion()) != null;
    assert VersionedValue.serializer.deserialize(in, getVersion()) != null;
    assert VersionedValue.serializer.deserialize(in, getVersion()) != null;
    in.close();
}

73. SerializationsTest#testTruncateRead()

Project: stratio-cassandra
File: SerializationsTest.java
@Test
public void testTruncateRead() throws IOException {
    if (EXECUTE_WRITES)
        testTruncateWrite();
    DataInputStream in = getInput("db.Truncation.bin");
    assert Truncation.serializer.deserialize(in, getVersion()) != null;
    assert TruncateResponse.serializer.deserialize(in, getVersion()) != null;
    assert TruncateResponse.serializer.deserialize(in, getVersion()) != null;
    assert MessageIn.read(in, getVersion(), -1) != null;
    // set up some fake callbacks so deserialization knows that what it's deserializing is a TruncateResponse
    MessagingService.instance().setCallbackForTests(1, new CallbackInfo(null, null, TruncateResponse.serializer));
    MessagingService.instance().setCallbackForTests(2, new CallbackInfo(null, null, TruncateResponse.serializer));
    assert MessageIn.read(in, getVersion(), 1) != null;
    assert MessageIn.read(in, getVersion(), 2) != null;
    in.close();
}

74. SerializationsTest#testMutationRead()

Project: stratio-cassandra
File: SerializationsTest.java
@Test
public void testMutationRead() throws IOException {
    // mutation deserialization requires being able to look up the keyspace in the schema,
    // so we need to rewrite this each time. plus, CF ID is different for every run.
    testMutationWrite();
    DataInputStream in = getInput("db.RowMutation.bin");
    assert Mutation.serializer.deserialize(in, getVersion()) != null;
    assert Mutation.serializer.deserialize(in, getVersion()) != null;
    assert Mutation.serializer.deserialize(in, getVersion()) != null;
    assert Mutation.serializer.deserialize(in, getVersion()) != null;
    assert Mutation.serializer.deserialize(in, getVersion()) != null;
    assert MessageIn.read(in, getVersion(), -1) != null;
    assert MessageIn.read(in, getVersion(), -1) != null;
    assert MessageIn.read(in, getVersion(), -1) != null;
    assert MessageIn.read(in, getVersion(), -1) != null;
    assert MessageIn.read(in, getVersion(), -1) != null;
    in.close();
}

75. SerializationsTest#testRowRead()

Project: stratio-cassandra
File: SerializationsTest.java
@Test
public void testRowRead() throws IOException {
    // Since every table creation generates different CF ID,
    // we need to generate file every time
    testRowWrite();
    DataInputStream in = getInput("db.Row.bin");
    assert Row.serializer.deserialize(in, getVersion()) != null;
    assert Row.serializer.deserialize(in, getVersion()) != null;
    assert Row.serializer.deserialize(in, getVersion()) != null;
    in.close();
}

76. SerializationsTest#testSliceFromReadCommandRead()

Project: stratio-cassandra
File: SerializationsTest.java
@Test
public void testSliceFromReadCommandRead() throws IOException {
    if (EXECUTE_WRITES)
        testSliceFromReadCommandWrite();
    DataInputStream in = getInput("db.SliceFromReadCommand.bin");
    assert SliceFromReadCommand.serializer.deserialize(in, getVersion()) != null;
    assert SliceFromReadCommand.serializer.deserialize(in, getVersion()) != null;
    assert ReadCommand.serializer.deserialize(in, getVersion()) != null;
    assert ReadCommand.serializer.deserialize(in, getVersion()) != null;
    assert MessageIn.read(in, getVersion(), -1) != null;
    assert MessageIn.read(in, getVersion(), -1) != null;
    in.close();
}

77. SerializationsTest#testSliceByNamesReadCommandRead()

Project: stratio-cassandra
File: SerializationsTest.java
@Test
public void testSliceByNamesReadCommandRead() throws IOException {
    if (EXECUTE_WRITES)
        testSliceByNamesReadCommandWrite();
    DataInputStream in = getInput("db.SliceByNamesReadCommand.bin");
    assert SliceByNamesReadCommand.serializer.deserialize(in, getVersion()) != null;
    assert SliceByNamesReadCommand.serializer.deserialize(in, getVersion()) != null;
    assert ReadCommand.serializer.deserialize(in, getVersion()) != null;
    assert ReadCommand.serializer.deserialize(in, getVersion()) != null;
    assert MessageIn.read(in, getVersion(), -1) != null;
    assert MessageIn.read(in, getVersion(), -1) != null;
    in.close();
}

78. SemanticSpaceIO#getFormat()

Project: S-Space
File: SemanticSpaceIO.java
/**
     * Returns the format in which a semantic space is stored in the provided
     * file or {@code null} if the file does not have a recognized format.
     *
     * @param sspaceFile a file containing a semantic space
     *
     * @return the format in which a semantic space is stored in the provided
     *         file or {@code null} if the file does not have a recognized
     *         format.
     *
     * @throws IOException if any I/O exception occurs when reading the semantic
     *         space data from the file
     */
static SSpaceFormat getFormat(File sspaceFile) throws IOException {
    DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(sspaceFile)));
    // read the expected header
    char header = dis.readChar();
    if (header != 's') {
        dis.close();
        return SSpaceFormat.SERIALIZE;
    }
    char encodedFormatCode = dis.readChar();
    int formatCode = encodedFormatCode - '0';
    dis.close();
    return (formatCode < 0 || formatCode > SSpaceFormat.values().length) ? SSpaceFormat.SERIALIZE : SSpaceFormat.values()[formatCode];
}

79. ExampleSignTest#getPublic()

Project: Resteasy
File: ExampleSignTest.java
public PublicKey getPublic(InputStream is) throws Exception {
    DataInputStream dis = new DataInputStream(is);
    byte[] pemFile = new byte[dis.available()];
    dis.readFully(pemFile);
    String pem = new String(pemFile);
    pem = pem.replace("-----BEGIN PUBLIC KEY-----", "");
    pem = pem.replace("-----END PUBLIC KEY-----", "");
    pem = pem.trim();
    //System.out.println(pem);
    byte[] der = Base64.decode(pem);
    X509EncodedKeySpec spec = new X509EncodedKeySpec(der);
    KeyFactory kf = KeyFactory.getInstance("RSA");
    return kf.generatePublic(spec);
}

80. StreamUtility#readToEndAsArray()

Project: RemoteDroid
File: StreamUtility.java
public static byte[] readToEndAsArray(InputStream input) throws IOException {
    DataInputStream dis = new DataInputStream(input);
    byte[] stuff = new byte[1024];
    ByteArrayOutputStream buff = new ByteArrayOutputStream();
    int read = 0;
    while ((read = dis.read(stuff)) != -1) {
        buff.write(stuff, 0, read);
    }
    dis.close();
    return buff.toByteArray();
}

81. SchemaDictionaryEntry#loadDictionaryEntry()

Project: RecordBreaker
File: SchemaDictionaryEntry.java
/**
   * Load dictionary entry from disk.
   */
public void loadDictionaryEntry(File dir, String fileRoot) throws IOException {
    File summaryFile = new File(dir, fileRoot + SUMMARY_ENDING);
    File schemaFile = new File(dir, fileRoot + SCHEMA_ENDING);
    File infoTextFile = new File(dir, fileRoot + INFO_ENDING);
    this.summary = new SchemaStatisticalSummary();
    DataInputStream in = new DataInputStream(new FileInputStream(summaryFile));
    try {
        summary.readFields(in);
    } finally {
        in.close();
    }
    this.schema = Schema.parse(schemaFile);
    in = new DataInputStream(new FileInputStream(infoTextFile));
    try {
        this.info = in.readUTF();
    } finally {
        in.close();
    }
}

82. Media#urlToStream()

Project: pluotsorbet
File: Media.java
/**
     * Reads the content from the specified HTTP URL and returns InputStream
     * where the contents are read.
     * 
     * @return InputStream
     * @throws IOException
     */
private InputStream urlToStream(String url) throws IOException {
    // Open connection to the http url...
    HttpConnection connection = (HttpConnection) Connector.open(url);
    DataInputStream dataIn = connection.openDataInputStream();
    byte[] buffer = new byte[1000];
    int read = -1;
    // Read the content from url.
    ByteArrayOutputStream byteout = new ByteArrayOutputStream();
    while ((read = dataIn.read(buffer)) >= 0) {
        byteout.write(buffer, 0, read);
    }
    dataIn.close();
    // Fill InputStream to return with content read from the URL.
    ByteArrayInputStream byteIn = new ByteArrayInputStream(byteout.toByteArray());
    return byteIn;
}

83. TestStarTreeDataTable#read()

Project: pinot
File: TestStarTreeDataTable.java
public static int[][] read(File tempFile, int numRows, int numCols) throws IOException {
    int[][] data = new int[numRows][];
    DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(tempFile)));
    for (int row = 0; row < numRows; row++) {
        data[row] = new int[numCols];
        for (int col = 0; col < numCols; col++) {
            data[row][col] = dis.readInt();
        }
    }
    dis.close();
    return data;
}

84. StreamingUDF#setStreams()

Project: pig
File: StreamingUDF.java
private void setStreams() throws IOException {
    stdout = new DataInputStream(new BufferedInputStream(process.getInputStream()));
    outputHandler.bindTo("", new BufferedPositionedInputStream(stdout), 0, Long.MAX_VALUE);
    stdin = new DataOutputStream(new BufferedOutputStream(process.getOutputStream()));
    inputHandler.bindTo(stdin);
    stderr = new DataInputStream(new BufferedInputStream(process.getErrorStream()));
}

85. AccumuloBinaryConverter#bytesToBigInteger()

Project: pig
File: AccumuloBinaryConverter.java
@Override
public BigInteger bytesToBigInteger(byte[] b) throws IOException {
    // Taken from Accumulo's BigIntegerLexicoder in 1.6.0
    DataInputStream dis = new DataInputStream(new ByteArrayInputStream(b));
    int len = dis.readInt();
    len = len ^ 0x80000000;
    len = Math.abs(len);
    byte[] bytes = new byte[len];
    dis.readFully(bytes);
    bytes[0] = (byte) (0x80 ^ bytes[0]);
    return new BigInteger(bytes);
}

86. RowTest#makeTestExtractDataWithTimestampConversion()

Project: pentaho-kettle
File: RowTest.java
private void makeTestExtractDataWithTimestampConversion(RowMetaInterface rowMeta, String str, Date date, Timestamp constTimestamp) throws KettleEOFException, KettleFileException, IOException {
    Object[] rowData = new Object[] { str, date };
    byte[] result = RowMeta.extractData(rowMeta, rowData);
    DataInputStream stream = new DataInputStream(new ByteArrayInputStream(result));
    String extractedString = (String) new ValueMetaString().readData(stream);
    Timestamp time = (Timestamp) new ValueMetaTimestamp().readData(stream);
    stream.close();
    assertTrue(str.equals(extractedString));
    assertTrue(constTimestamp.equals(time));
}

87. TestImageIOUtils#checkBmpResolution()

Project: pdfbox
File: TestImageIOUtils.java
/**
     * checks whether the resolution of a BMP image file is as expected.
     *
     * @param filename the name of the BMP file
     * @param expectedResolution the expected resolution
     *
     * @throws IOException if something goes wrong
     */
private void checkBmpResolution(String filename, int expectedResolution) throws FileNotFoundException, IOException {
    // BMP format explained here:
    // http://www.javaworld.com/article/2077561/learn-java/java-tip-60--saving-bitmap-files-in-java.html
    // we skip 38 bytes and then read two 4 byte-integers and reverse the bytes
    DataInputStream dis = new DataInputStream(new FileInputStream(new File(filename)));
    int skipped = dis.skipBytes(38);
    assertEquals("Can't skip 38 bytes in image file " + filename, 38, skipped);
    int pixelsPerMeter = Integer.reverseBytes(dis.readInt());
    int actualResolution = (int) Math.round(pixelsPerMeter / 100.0 * 2.54);
    assertEquals("X resolution doesn't match in image file " + filename, expectedResolution, actualResolution);
    pixelsPerMeter = Integer.reverseBytes(dis.readInt());
    actualResolution = (int) Math.round(pixelsPerMeter / 100.0 * 2.54);
    assertEquals("Y resolution doesn't match in image file " + filename, expectedResolution, actualResolution);
    dis.close();
}

88. ClassReader#readAttribute()

Project: openjdk
File: ClassReader.java
public Attribute readAttribute() throws IOException {
    int name_index = readUnsignedShort();
    int length = readInt();
    byte[] data = new byte[length];
    readFully(data);
    DataInputStream prev = in;
    in = new DataInputStream(new ByteArrayInputStream(data));
    try {
        return attributeFactory.createAttribute(this, name_index, data);
    } finally {
        in = prev;
    }
}

89. TrieDictionary#readFields()

Project: kylin
File: TrieDictionary.java
@Override
public void readFields(DataInput in) throws IOException {
    byte[] headPartial = new byte[HEAD_MAGIC.length + Short.SIZE + Integer.SIZE];
    in.readFully(headPartial);
    if (BytesUtil.compareBytes(HEAD_MAGIC, 0, headPartial, 0, HEAD_MAGIC.length) != 0)
        throw new IllegalArgumentException("Wrong file type (magic does not match)");
    DataInputStream headIn = new //
    DataInputStream(new ByteArrayInputStream(headPartial, HEAD_SIZE_I, headPartial.length - HEAD_SIZE_I));
    int headSize = headIn.readShort();
    int bodyLen = headIn.readInt();
    headIn.close();
    byte[] all = new byte[headSize + bodyLen];
    System.arraycopy(headPartial, 0, all, 0, headPartial.length);
    in.readFully(all, headPartial.length, all.length - headPartial.length);
    init(all);
}

90. StackTraceCodec#newReader()

Project: jvm-tools
File: StackTraceCodec.java
public static StackTraceReader newReader(InputStream is) throws IOException {
    DataInputStream dis = new DataInputStream(is);
    byte[] magic = new byte[MAGIC.length];
    dis.readFully(magic);
    if (Arrays.equals(MAGIC, magic)) {
        return new StackTraceReaderV1(is);
    } else if (Arrays.equals(MAGIC2, magic)) {
        return new StackTraceReaderV2(is);
    } else {
        throw new IOException("Unknown magic [" + new String(magic) + "]");
    }
}

91. IpAddressTest#testStreamableWithHighPort()

Project: JGroups
File: IpAddressTest.java
public static void testStreamableWithHighPort() throws Exception {
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    DataOutputStream oos = new DataOutputStream(bos);
    byte[] buf = null;
    ByteArrayInputStream bis = null;
    DataInputStream dis;
    IpAddress x, x2;
    x = createStackConformantAddress(65535);
    x.writeTo(oos);
    buf = bos.toByteArray();
    bis = new ByteArrayInputStream(buf);
    dis = new DataInputStream(bis);
    x2 = new IpAddress();
    x2.readFrom(dis);
    System.out.println("x: " + x + ", x2: " + x2);
    assert x2.getPort() > 0;
    Assert.assertEquals(x.getPort(), x2.getPort());
}

92. IpAddressTest#testIPv6WithStreamable()

Project: JGroups
File: IpAddressTest.java
public static void testIPv6WithStreamable() throws Exception {
    IpAddress ip = new IpAddress("fe80:0:0:0:21b:21ff:fe07:a3b0", 5555);
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(bos);
    byte[] buf = null;
    ByteArrayInputStream bis = null;
    DataInputStream dis;
    System.out.println("-- address is " + ip);
    ip.writeTo(dos);
    buf = bos.toByteArray();
    bis = new ByteArrayInputStream(buf);
    dis = new DataInputStream(bis);
    IpAddress ip2 = new IpAddress();
    ip2.readFrom(dis);
    Assert.assertEquals(ip, ip2);
}

93. DiagnosticsHandler#authorizeProbeRequest()

Project: JGroups
File: DiagnosticsHandler.java
/**
     * Performs authorization on given DatagramPacket.
     *
     * @param packet to authorize
    * @return offset in DatagramPacket where request payload starts
    * @throws Exception thrown if passcode received from client does not match set passcode
    */
protected int authorizeProbeRequest(DatagramPacket packet) throws Exception {
    int offset = 0;
    ByteArrayInputStream bis = new ByteArrayInputStream(packet.getData());
    DataInputStream in = new DataInputStream(bis);
    long t1 = in.readLong();
    double q1 = in.readDouble();
    int length = in.readInt();
    byte[] digest = new byte[length];
    in.readFully(digest);
    offset = 8 + 8 + 4 + digest.length;
    byte[] local = Util.createDigest(passcode, t1, q1);
    if (!MessageDigest.isEqual(digest, local))
        throw new Exception("Authorization failed! Make sure correct passcode is used");
    else
        log.debug("Request authorized");
    return offset;
}

94. TestDTFile#someReadingWithMetaBlock()

Project: apex-malhar
File: TestDTFile.java
private void someReadingWithMetaBlock(Reader reader) throws IOException {
    DataInputStream din = null;
    readNumMetablocks(reader, 10);
    try {
        din = reader.getMetaBlock("NO ONE");
        assertTrue(false);
    } catch (MetaBlockDoesNotExist me) {
    }
    din = reader.getMetaBlock("TFileMeta100");
    int read = din.read();
    assertTrue("check for status", (read == -1));
    din.close();
}

95. WdcParser#main()

Project: anthelion
File: WdcParser.java
public static void main(String[] args) throws Exception {
    // LOG.setLevel(Level.FINE);
    String name = args[0];
    String url = "file:" + name;
    File file = new File(name);
    byte[] bytes = new byte[(int) file.length()];
    DataInputStream in = new DataInputStream(new FileInputStream(file));
    in.readFully(bytes);
    Configuration conf = NutchConfiguration.create();
    WdcParser parser = new WdcParser();
    parser.setConf(conf);
    Parse parse = parser.getParse(new Content(url, url, bytes, "text/html", new Metadata(), conf)).get(url);
    System.out.println("data: " + parse.getData());
    System.out.println("text: " + parse.getText());
    String contains = parse.getData().getMeta(META_CONTAINS_SEM);
    System.out.println("contains: " + contains);
}

96. FeedParser#main()

Project: anthelion
File: FeedParser.java
/**
   * Runs a command line version of this {@link Parser}.
   * 
   * @param args
   *          A single argument (expected at arg[0]) representing a path on the
   *          local filesystem that points to a feed file.
   * 
   * @throws Exception
   *           If any error occurs.
   */
public static void main(String[] args) throws Exception {
    if (args.length != 1) {
        System.err.println("Usage: FeedParser <feed>");
        System.exit(1);
    }
    String name = args[0];
    String url = "file:" + name;
    Configuration conf = NutchConfiguration.create();
    FeedParser parser = new FeedParser();
    parser.setConf(conf);
    File file = new File(name);
    byte[] bytes = new byte[(int) file.length()];
    DataInputStream in = new DataInputStream(new FileInputStream(file));
    in.readFully(bytes);
    ParseResult parseResult = parser.getParse(new Content(url, url, bytes, "application/rss+xml", new Metadata(), conf));
    for (Entry<Text, Parse> entry : parseResult) {
        System.out.println("key: " + entry.getKey());
        Parse parse = entry.getValue();
        System.out.println("data: " + parse.getData());
        System.out.println("text: " + parse.getText() + "\n");
    }
}

97. BloomFilterTest#testHugeBFSerialization()

Project: ambry
File: BloomFilterTest.java
@Test
public void testHugeBFSerialization() throws IOException {
    ByteBuffer test = ByteBuffer.wrap(new byte[] { 0, 1 });
    File f = File.createTempFile("bloomFilterTest-", ".dat");
    f.deleteOnExit();
    BloomFilter filter = (BloomFilter) FilterFactory.getFilter(((long) 100000 / 8) + 1, 0.01d);
    filter.add(test);
    DataOutputStream out = new DataOutputStream(new FileOutputStream(f));
    FilterFactory.serialize(filter, out);
    filter.bitset.serialize(out);
    out.close();
    DataInputStream in = new DataInputStream(new FileInputStream(f));
    BloomFilter filter2 = (BloomFilter) FilterFactory.deserialize(in);
    Assert.assertTrue(filter2.isPresent(test));
    in.close();
}

98. RequestResponseTest#testPutRequestInvalidVersion()

Project: ambry
File: RequestResponseTest.java
private void testPutRequestInvalidVersion(MockClusterMap clusterMap, int correlationId, String clientId, BlobId blobId, BlobProperties blobProperties, byte[] userMetadata, byte[] blob) throws IOException {
    final short Put_Request_Invalid_version = 0;
    int sizeInBlobProperties = (int) blobProperties.getBlobSize();
    PutRequest request = new MockPutRequestV1(correlationId, clientId, blobId, blobProperties, ByteBuffer.wrap(userMetadata), new ByteBufferInputStream(ByteBuffer.wrap(blob)), sizeInBlobProperties, BlobType.DataBlob, Put_Request_Invalid_version);
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    WritableByteChannel writableByteChannel = Channels.newChannel(outputStream);
    while (!request.isSendComplete()) {
        request.writeTo(writableByteChannel);
    }
    DataInputStream requestStream = new DataInputStream(new ByteArrayInputStream(outputStream.toByteArray()));
    requestStream.readLong();
    Assert.assertEquals(RequestOrResponseType.values()[requestStream.readShort()], RequestOrResponseType.PutRequest);
    try {
        PutRequest.readFrom(requestStream, clusterMap);
        Assert.fail("Deserialization of PutRequest with invalid version should have thrown an exception.");
    } catch (IllegalStateException e) {
    }
}

99. RequestResponseTest#testPutRequest()

Project: ambry
File: RequestResponseTest.java
private void testPutRequest(MockClusterMap clusterMap, int correlationId, String clientId, BlobId blobId, BlobProperties blobProperties, byte[] userMetadata, BlobType blobType, byte[] blob, int blobSize) throws IOException {
    int sizeInBlobProperties = (int) blobProperties.getBlobSize();
    PutRequest request = new PutRequest(correlationId, clientId, blobId, blobProperties, ByteBuffer.wrap(userMetadata), new ByteBufferInputStream(ByteBuffer.wrap(blob)), blobSize, blobType);
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    WritableByteChannel writableByteChannel = Channels.newChannel(outputStream);
    while (!request.isSendComplete()) {
        request.writeTo(writableByteChannel);
    }
    DataInputStream requestStream = new DataInputStream(new ByteArrayInputStream(outputStream.toByteArray()));
    requestStream.readLong();
    Assert.assertEquals(RequestOrResponseType.values()[requestStream.readShort()], RequestOrResponseType.PutRequest);
    PutRequest deserializedPutRequest = PutRequest.readFrom(requestStream, clusterMap);
    Assert.assertEquals(deserializedPutRequest.getBlobId(), blobId);
    Assert.assertEquals(deserializedPutRequest.getBlobProperties().getBlobSize(), sizeInBlobProperties);
    Assert.assertArrayEquals(userMetadata, deserializedPutRequest.getUsermetadata().array());
    Assert.assertEquals(deserializedPutRequest.getBlobSize(), blobSize);
    Assert.assertEquals(deserializedPutRequest.getBlobType(), blobType);
    byte[] blobRead = new byte[blobSize];
    deserializedPutRequest.getBlobStream().read(blobRead);
    Assert.assertArrayEquals(blob, blobRead);
}

100. SSLBlockingChannelTest#sendAndReceive()

Project: ambry
File: SSLBlockingChannelTest.java
private void sendAndReceive(BlockingChannel channel) throws Exception {
    long blobSize = 1028;
    byte[] bytesToSend = new byte[(int) blobSize];
    new Random().nextBytes(bytesToSend);
    ByteBuffer byteBufferToSend = ByteBuffer.wrap(bytesToSend);
    byteBufferToSend.putLong(0, blobSize);
    BoundedByteBufferSend bufferToSend = new BoundedByteBufferSend(byteBufferToSend);
    // send request
    channel.connect();
    channel.send(bufferToSend);
    // receive response
    InputStream streamResponse = channel.receive().getInputStream();
    DataInputStream input = new DataInputStream(streamResponse);
    byte[] bytesReceived = new byte[(int) blobSize - 8];
    input.readFully(bytesReceived);
    for (int i = 0; i < blobSize - 8; i++) {
        Assert.assertEquals(bytesToSend[8 + i], bytesReceived[i]);
    }
}