java.io.DataOutputStream

Here are the examples of the java api class java.io.DataOutputStream taken from open source projects.

1. NTRUSigningKeyGenerationParameters#writeTo()

Project: bc-java
File: NTRUSigningKeyGenerationParameters.java
/**
     * Writes the parameter set to an output stream
     *
     * @param os an output stream
     * @throws java.io.IOException
     */
public void writeTo(OutputStream os) throws IOException {
    DataOutputStream dos = new DataOutputStream(os);
    dos.writeInt(N);
    dos.writeInt(q);
    dos.writeInt(d);
    dos.writeInt(d1);
    dos.writeInt(d2);
    dos.writeInt(d3);
    dos.writeInt(B);
    dos.writeInt(basisType);
    dos.writeDouble(beta);
    dos.writeDouble(normBound);
    dos.writeDouble(keyNormBound);
    dos.writeInt(signFailTolerance);
    dos.writeBoolean(primeCheck);
    dos.writeBoolean(sparse);
    dos.writeInt(bitsF);
    dos.write(keyGenAlg);
    dos.writeUTF(hashAlg.getAlgorithmName());
    dos.write(polyType);
}

2. NTRUEncryptionParameters#writeTo()

Project: bc-java
File: NTRUEncryptionParameters.java
/**
     * Writes the parameter set to an output stream
     *
     * @param os an output stream
     * @throws IOException
     */
public void writeTo(OutputStream os) throws IOException {
    DataOutputStream dos = new DataOutputStream(os);
    dos.writeInt(N);
    dos.writeInt(q);
    dos.writeInt(df);
    dos.writeInt(df1);
    dos.writeInt(df2);
    dos.writeInt(df3);
    dos.writeInt(db);
    dos.writeInt(dm0);
    dos.writeInt(c);
    dos.writeInt(minCallsR);
    dos.writeInt(minCallsMask);
    dos.writeBoolean(hashSeed);
    dos.write(oid);
    dos.writeBoolean(sparse);
    dos.writeBoolean(fastFp);
    dos.write(polyType);
    dos.writeUTF(hashAlg.getAlgorithmName());
}

3. NTRUEncryptionKeyGenerationParameters#writeTo()

Project: bc-java
File: NTRUEncryptionKeyGenerationParameters.java
/**
     * Writes the parameter set to an output stream
     *
     * @param os an output stream
     * @throws java.io.IOException
     */
public void writeTo(OutputStream os) throws IOException {
    DataOutputStream dos = new DataOutputStream(os);
    dos.writeInt(N);
    dos.writeInt(q);
    dos.writeInt(df);
    dos.writeInt(df1);
    dos.writeInt(df2);
    dos.writeInt(df3);
    dos.writeInt(db);
    dos.writeInt(dm0);
    dos.writeInt(c);
    dos.writeInt(minCallsR);
    dos.writeInt(minCallsMask);
    dos.writeBoolean(hashSeed);
    dos.write(oid);
    dos.writeBoolean(sparse);
    dos.writeBoolean(fastFp);
    dos.write(polyType);
    dos.writeUTF(hashAlg.getAlgorithmName());
}

4. NTRUSigningParameters#writeTo()

Project: bc-java
File: NTRUSigningParameters.java
/**
     * Writes the parameter set to an output stream
     *
     * @param os an output stream
     * @throws IOException
     */
public void writeTo(OutputStream os) throws IOException {
    DataOutputStream dos = new DataOutputStream(os);
    dos.writeInt(N);
    dos.writeInt(q);
    dos.writeInt(d);
    dos.writeInt(d1);
    dos.writeInt(d2);
    dos.writeInt(d3);
    dos.writeInt(B);
    dos.writeDouble(beta);
    dos.writeDouble(normBound);
    dos.writeInt(signFailTolerance);
    dos.writeInt(bitsF);
    dos.writeUTF(hashAlg.getAlgorithmName());
}

5. MessageDigest2Test#testSerializationSHA_DATA_2()

Project: j2objc
File: MessageDigest2Test.java
private void testSerializationSHA_DATA_2(MessageDigest sha) throws Exception {
    sha.reset();
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    DataOutputStream output = new DataOutputStream(out);
    // Made up data
    output.writeUTF("tests.api.java.security.MessageDigestTest$InitializerFieldsTest3");
    // class modifiers
    output.writeInt(0);
    // interfaces
    output.writeUTF("java.io.Serializable");
    // Fields
    // name
    output.writeUTF("sub_toBeNotSerialized");
    // modifiers
    output.writeInt(9);
    // signature
    output.writeUTF("Ljava/lang/String;");
    // name
    output.writeUTF("sub_toBeNotSerialized2");
    // modifiers
    output.writeInt(9);
    // signature
    output.writeUTF("Ljava/lang/String;");
    // name
    output.writeUTF("sub_toBeSerialized");
    // modifiers
    output.writeInt(1);
    // signature
    output.writeUTF("Ljava/lang/String;");
    // name
    output.writeUTF("sub_toBeSerialized3");
    // modifiers
    output.writeInt(1);
    // signature
    output.writeUTF("Ljava/lang/String;");
    // name
    output.writeUTF("sub_toBeSerialized4");
    // modifiers
    output.writeInt(1);
    // signature
    output.writeUTF("Ljava/lang/String;");
    // name
    output.writeUTF("sub_toBeSerialized5");
    // modifiers
    output.writeInt(1);
    // signature
    output.writeUTF("Ljava/lang/String;");
    // clinit
    // name
    output.writeUTF("<clinit>");
    // modifiers
    output.writeInt(8);
    // signature
    output.writeUTF("()V");
    // constructors
    // name
    output.writeUTF("<init>");
    // modifiers
    output.writeInt(0);
    // signature
    output.writeUTF("()V");
    // methods
    // name
    output.writeUTF("equals");
    // modifiers
    output.writeInt(1);
    // signature
    output.writeUTF("(Ljava.lang.Object;)Z");
    output.flush();
    byte[] data = out.toByteArray();
    byte[] hash = sha.digest(data);
    assertTrue("SHA_DATA_2 NOT ok", Arrays.equals(hash, SHA_DATA_2));
}

6. SplitFileFetcherStorage#encodeAndChecksumOriginalDetails()

Project: fred
File: SplitFileFetcherStorage.java
/** Write details needed to restart the download from scratch, and to identify whether it is
     * useful to do so. */
private byte[] encodeAndChecksumOriginalDetails(FreenetURI thisKey, FreenetURI origKey, byte[] clientDetails, boolean isFinalFetch) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    dos.writeUTF(thisKey.toASCIIString());
    dos.writeUTF(origKey.toASCIIString());
    dos.writeBoolean(isFinalFetch);
    dos.writeInt(clientDetails.length);
    dos.write(clientDetails);
    dos.writeInt(maxRetries);
    dos.writeInt(cooldownTries);
    dos.writeLong(cooldownLength);
    return checksumChecker.appendChecksum(baos.toByteArray());
}

7. FieldAccessBCW#initCodeAttr()

Project: beetl2.0
File: FieldAccessBCW.java
public byte[] initCodeAttr() throws Exception {
    ByteArrayOutputStream bs = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(bs);
    int index = this.registerUTFString("Code");
    out.writeShort(index);
    byte[] codes = initCodes();
    //????
    int attrlen = 4 + 4 + codes.length + 4;
    out.writeInt(attrlen);
    //stack
    out.writeShort(1);
    //local var
    out.writeShort(1);
    out.writeInt(codes.length);
    //codes;
    out.write(codes);
    //exceptions
    out.writeShort(0);
    //attr-info
    out.writeShort(0);
    return bs.toByteArray();
}

8. TaskLog#writeToIndexFile()

Project: hadoop-common
File: TaskLog.java
private static void writeToIndexFile(TaskAttemptID firstTaskid, boolean isCleanup) throws IOException {
    File indexFile = getIndexFile(currentTaskid.toString(), isCleanup);
    BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(indexFile, false));
    DataOutputStream dos = new DataOutputStream(bos);
    //the format of the index file is
    //LOG_DIR: <the dir where the task logs are really stored>
    //STDOUT: <start-offset in the stdout file> <length>
    //STDERR: <start-offset in the stderr file> <length>
    //SYSLOG: <start-offset in the syslog file> <length>    
    dos.writeBytes(LogFileDetail.LOCATION + firstTaskid.toString() + "\n" + LogName.STDOUT.toString() + ":");
    dos.writeBytes(Long.toString(prevOutLength) + " ");
    dos.writeBytes(Long.toString(getTaskLogFile(firstTaskid, LogName.STDOUT).length() - prevOutLength) + "\n" + LogName.STDERR + ":");
    dos.writeBytes(Long.toString(prevErrLength) + " ");
    dos.writeBytes(Long.toString(getTaskLogFile(firstTaskid, LogName.STDERR).length() - prevErrLength) + "\n" + LogName.SYSLOG.toString() + ":");
    dos.writeBytes(Long.toString(prevLogLength) + " ");
    dos.writeBytes(Long.toString(getTaskLogFile(firstTaskid, LogName.SYSLOG).length() - prevLogLength) + "\n");
    dos.close();
}

9. RConThreadClient#sendResponse()

Project: Kingdoms
File: RConThreadClient.java
/**
     * Sends the given response message to the client
     */
private void sendResponse(int p_72654_1_, int p_72654_2_, String message) throws IOException {
    ByteArrayOutputStream bytearrayoutputstream = new ByteArrayOutputStream(1248);
    DataOutputStream dataoutputstream = new DataOutputStream(bytearrayoutputstream);
    byte[] abyte = message.getBytes("UTF-8");
    dataoutputstream.writeInt(Integer.reverseBytes(abyte.length + 10));
    dataoutputstream.writeInt(Integer.reverseBytes(p_72654_1_));
    dataoutputstream.writeInt(Integer.reverseBytes(p_72654_2_));
    dataoutputstream.write(abyte);
    dataoutputstream.write(0);
    dataoutputstream.write(0);
    this.clientSocket.getOutputStream().write(bytearrayoutputstream.toByteArray());
}

10. TestBytesArray#testWriteAndRead()

Project: HanLP
File: TestBytesArray.java
public void testWriteAndRead() throws Exception {
    DataOutputStream out = new DataOutputStream(new FileOutputStream(DATA_OUT_DAT));
    out.writeChar('H');
    out.writeChar('e');
    out.writeChar('l');
    out.writeChar('l');
    out.writeChar('o');
    out.close();
    ByteArray byteArray = ByteArray.createByteArray(DATA_OUT_DAT);
    while (byteArray.hasMore()) {
        System.out.println(byteArray.nextChar());
    }
}

11. JsonStoreBuilder#generateFirstElement()

Project: voldemort
File: JsonStoreBuilder.java
private byte[] generateFirstElement(KeyValuePair currentPair) throws IOException {
    ByteArrayOutputStream stream = new ByteArrayOutputStream();
    DataOutputStream valueStream = new DataOutputStream(stream);
    valueStream.writeShort(1);
    valueStream.writeInt(currentPair.getKey().length);
    valueStream.writeInt(currentPair.getValue().length);
    valueStream.write(currentPair.getKey());
    valueStream.write(currentPair.getValue());
    valueStream.flush();
    return stream.toByteArray();
}

12. TestBlockReplacement#replaceBlock()

Project: hadoop-20
File: TestBlockReplacement.java
/* Copy a block from sourceProxy to destination. If the block becomes
   * over-replicated, preferably remove it from source.
   * 
   * Return true if a block is successfully copied; otherwise false.
   */
private boolean replaceBlock(Block block, DatanodeInfo source, DatanodeInfo sourceProxy, DatanodeInfo destination, int namespaceId) throws IOException {
    Socket sock = new Socket();
    sock.connect(NetUtils.createSocketAddr(destination.getName()), HdfsConstants.READ_TIMEOUT);
    sock.setKeepAlive(true);
    // sendRequest
    DataOutputStream out = new DataOutputStream(sock.getOutputStream());
    out.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);
    out.writeByte(DataTransferProtocol.OP_REPLACE_BLOCK);
    out.writeInt(namespaceId);
    out.writeLong(block.getBlockId());
    out.writeLong(block.getGenerationStamp());
    Text.writeString(out, source.getStorageID());
    sourceProxy.write(out);
    out.flush();
    // receiveResponse
    DataInputStream reply = new DataInputStream(sock.getInputStream());
    short status = reply.readShort();
    if (status == DataTransferProtocol.OP_STATUS_SUCCESS) {
        return true;
    }
    return false;
}

13. IDEStructureParameter#writeToStream()

Project: fop
File: IDEStructureParameter.java
/** {@inheritDoc} */
public void writeToStream(OutputStream os) throws IOException {
    int length = 7 + bitsPerIDE.length;
    byte flags = 0x00;
    if (subtractive) {
        flags |= 1 << 7;
    }
    /* graCoding is never written
        if (grayCoding) {
            flags |= 1 << 6;
        }
        */
    DataOutputStream dout = new DataOutputStream(os);
    //ID
    dout.writeByte(0x9B);
    //LENGTH
    dout.writeByte(length - 2);
    //FLAGS
    dout.writeByte(flags);
    //FORMAT
    dout.writeByte(this.colorModel);
    for (int i = 0; i < 3; i++) {
        //RESERVED
        dout.writeByte(0);
    }
    //component sizes
    dout.write(this.bitsPerIDE);
}

14. FieldAccessBCW#getInitMethod()

Project: beetl2.0
File: FieldAccessBCW.java
public byte[] getInitMethod() throws Exception {
    ByteArrayOutputStream bs = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(bs);
    //public 
    out.writeShort(1);
    int nameIndex = this.registerUTFString("<init>");
    out.writeShort(nameIndex);
    int descIndex = this.registerUTFString("()V");
    out.writeShort(descIndex);
    //attributeCount
    out.writeShort(1);
    byte[] initCodeAttr = initCodeAttr();
    out.write(initCodeAttr);
    return bs.toByteArray();
}

15. FieldAccessBCW#getProxyMethod()

Project: beetl2.0
File: FieldAccessBCW.java
public byte[] getProxyMethod() throws Exception {
    ByteArrayOutputStream bs = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(bs);
    //public 
    out.writeShort(1);
    int nameIndex = this.registerUTFString(this.valueFunction);
    out.writeShort(nameIndex);
    int descIndex = this.registerUTFString(this.valueFunctionDesc);
    out.writeShort(descIndex);
    //attributeCount
    out.writeShort(1);
    byte[] initCodeAttr = proxyCodeAttr();
    out.write(initCodeAttr);
    return bs.toByteArray();
}

16. TopKPhaseMapOutputKey#toBytes()

Project: pinot
File: TopKPhaseMapOutputKey.java
public byte[] toBytes() throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    byte[] bytes;
    // dimension name
    bytes = dimensionName.getBytes();
    dos.writeInt(bytes.length);
    dos.write(bytes);
    // dimension value
    bytes = dimensionValue.getBytes();
    dos.writeInt(bytes.length);
    dos.write(bytes);
    baos.close();
    dos.close();
    return baos.toByteArray();
}

17. HttpKnife#partFile()

Project: WeGit
File: HttpKnife.java
/**
	 * ????
	 * 
	 * @param dos
	 * @param name
	 * @param fileName
	 * @param file
	 * @throws IOException
	 */
public HttpKnife partFile(String name, String fileName, File file) throws IOException {
    DataOutputStream dos = new DataOutputStream(openOutput());
    dos.writeBytes("Content-Disposition: form-data; name=\"" + name + "\";filename=\"" + fileName + "\"" + CRLF);
    dos.writeBytes("Content-Type: " + URLConnection.guessContentTypeFromName(fileName));
    dos.writeBytes(CRLF);
    dos.writeBytes(CRLF);
    System.out.println("guessContentTypeFromName " + URLConnection.guessContentTypeFromName(fileName));
    FileInputStream inputStream = new FileInputStream(file);
    byte[] buffer = new byte[4096];
    int bytesRead = -1;
    while ((bytesRead = inputStream.read(buffer)) != -1) {
        dos.write(buffer, 0, bytesRead);
    }
    dos.writeBytes(CRLF);
    inputStream.close();
    return this;
}

18. TestFileCorruption#testLocalFileCorruption()

Project: hadoop-hdfs
File: TestFileCorruption.java
/** check if local FS can handle corrupted blocks properly */
public void testLocalFileCorruption() throws Exception {
    Configuration conf = new Configuration();
    Path file = new Path(System.getProperty("test.build.data"), "corruptFile");
    FileSystem fs = FileSystem.getLocal(conf);
    DataOutputStream dos = fs.create(file);
    dos.writeBytes("original bytes");
    dos.close();
    // Now deliberately corrupt the file
    dos = new DataOutputStream(new FileOutputStream(file.toString()));
    dos.writeBytes("corruption");
    dos.close();
    // Now attempt to read the file
    DataInputStream dis = fs.open(file, 512);
    try {
        System.out.println("A ChecksumException is expected to be logged.");
        dis.readByte();
    } catch (ChecksumException ignore) {
    }
    fs.delete(file, true);
}

19. TestFileCorruption#testLocalFileCorruption()

Project: hadoop-common
File: TestFileCorruption.java
/** check if local FS can handle corrupted blocks properly */
public void testLocalFileCorruption() throws Exception {
    Configuration conf = new Configuration();
    Path file = new Path(System.getProperty("test.build.data"), "corruptFile");
    FileSystem fs = FileSystem.getLocal(conf);
    DataOutputStream dos = fs.create(file);
    dos.writeBytes("original bytes");
    dos.close();
    // Now deliberately corrupt the file
    dos = new DataOutputStream(new FileOutputStream(file.toString()));
    dos.writeBytes("corruption");
    dos.close();
    // Now attempt to read the file
    DataInputStream dis = fs.open(file, 512);
    try {
        System.out.println("A ChecksumException is expected to be logged.");
        dis.readByte();
    } catch (ChecksumException ignore) {
    }
    fs.delete(file, true);
}

20. TestBlockReplacement#replaceBlock()

Project: hadoop-common
File: TestBlockReplacement.java
/* Copy a block from sourceProxy to destination. If the block becomes
   * over-replicated, preferably remove it from source.
   * 
   * Return true if a block is successfully copied; otherwise false.
   */
private boolean replaceBlock(Block block, DatanodeInfo source, DatanodeInfo sourceProxy, DatanodeInfo destination) throws IOException {
    Socket sock = new Socket();
    sock.connect(NetUtils.createSocketAddr(destination.getName()), HdfsConstants.READ_TIMEOUT);
    sock.setKeepAlive(true);
    // sendRequest
    DataOutputStream out = new DataOutputStream(sock.getOutputStream());
    out.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);
    out.writeByte(DataTransferProtocol.OP_REPLACE_BLOCK);
    out.writeLong(block.getBlockId());
    out.writeLong(block.getGenerationStamp());
    Text.writeString(out, source.getStorageID());
    sourceProxy.write(out);
    out.flush();
    // receiveResponse
    DataInputStream reply = new DataInputStream(sock.getInputStream());
    short status = reply.readShort();
    if (status == DataTransferProtocol.OP_STATUS_SUCCESS) {
        return true;
    }
    return false;
}

21. TestFileCorruption#testFileCorruptionHelper()

Project: hadoop-20
File: TestFileCorruption.java
private void testFileCorruptionHelper(Configuration conf) throws Exception {
    Path file = new Path(TEST_ROOT_DIR, "corruptFile");
    FileSystem fs = FileSystem.getLocal(conf);
    DataOutputStream dos = fs.create(file);
    dos.writeBytes("original bytes");
    dos.close();
    // Now deliberately corrupt the file
    dos = new DataOutputStream(new FileOutputStream(file.toString()));
    dos.writeBytes("corruption");
    dos.close();
    // Now attempt to read the file
    DataInputStream dis = fs.open(file, 512);
    try {
        System.out.println("A ChecksumException is expected to be logged.");
        dis.readByte();
    } catch (ChecksumException ignore) {
    }
    fs.delete(file, true);
}

22. NIODataInputStreamTest#testReadUTF()

Project: cassandra
File: NIODataInputStreamTest.java
@SuppressWarnings("resource")
@Test
public void testReadUTF() throws Exception {
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream daos = new DataOutputStream(baos);
    String simple = "foobar42";
    assertEquals(2, BufferedDataOutputStreamTest.twoByte.getBytes(Charsets.UTF_8).length);
    assertEquals(3, BufferedDataOutputStreamTest.threeByte.getBytes(Charsets.UTF_8).length);
    assertEquals(4, BufferedDataOutputStreamTest.fourByte.getBytes(Charsets.UTF_8).length);
    daos.writeUTF(simple);
    daos.writeUTF(BufferedDataOutputStreamTest.twoByte);
    daos.writeUTF(BufferedDataOutputStreamTest.threeByte);
    daos.writeUTF(BufferedDataOutputStreamTest.fourByte);
    NIODataInputStream is = new NIODataInputStream(wrap(baos.toByteArray()), 4096);
    assertEquals(simple, is.readUTF());
    assertEquals(BufferedDataOutputStreamTest.twoByte, is.readUTF());
    assertEquals(BufferedDataOutputStreamTest.threeByte, is.readUTF());
    assertEquals(BufferedDataOutputStreamTest.fourByte, is.readUTF());
}

23. Manifest#serialize()

Project: buck
File: Manifest.java
/**
   * Serializes the manifest to the given {@link OutputStream}.
   */
public void serialize(OutputStream rawOutput) throws IOException {
    DataOutputStream output = new DataOutputStream(rawOutput);
    output.writeInt(VERSION);
    output.writeInt(headers.size());
    for (String header : headers) {
        output.writeUTF(header);
    }
    output.writeInt(hashes.size());
    for (Pair<Integer, HashCode> hash : hashes) {
        output.writeInt(hash.getFirst());
        output.writeUTF(hash.getSecond().toString());
    }
    output.writeInt(entries.size());
    for (Pair<RuleKey, int[]> entry : entries) {
        output.writeInt(entry.getSecond().length);
        for (int hashIndex : entry.getSecond()) {
            output.writeInt(hashIndex);
        }
        output.writeUTF(entry.getFirst().toString());
    }
}

24. BuildCheckpoints#writeBinaryCheckpoints()

Project: bitcoinj
File: BuildCheckpoints.java
private static void writeBinaryCheckpoints(TreeMap<Integer, StoredBlock> checkpoints, File file) throws Exception {
    final FileOutputStream fileOutputStream = new FileOutputStream(file, false);
    MessageDigest digest = Sha256Hash.newDigest();
    final DigestOutputStream digestOutputStream = new DigestOutputStream(fileOutputStream, digest);
    digestOutputStream.on(false);
    final DataOutputStream dataOutputStream = new DataOutputStream(digestOutputStream);
    dataOutputStream.writeBytes("CHECKPOINTS 1");
    // Number of signatures to read. Do this later.
    dataOutputStream.writeInt(0);
    digestOutputStream.on(true);
    dataOutputStream.writeInt(checkpoints.size());
    ByteBuffer buffer = ByteBuffer.allocate(StoredBlock.COMPACT_SERIALIZED_SIZE);
    for (StoredBlock block : checkpoints.values()) {
        block.serializeCompact(buffer);
        dataOutputStream.write(buffer.array());
        buffer.position(0);
    }
    dataOutputStream.close();
    Sha256Hash checkpointsHash = Sha256Hash.wrap(digest.digest());
    System.out.println("Hash of checkpoints data is " + checkpointsHash);
    digestOutputStream.close();
    fileOutputStream.close();
    System.out.println("Checkpoints written to '" + file.getCanonicalPath() + "'.");
}

25. FieldAccessBCW#initCodes()

Project: beetl2.0
File: FieldAccessBCW.java
public byte[] initCodes() throws Exception {
    ByteArrayOutputStream bs = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(bs);
    out.writeByte(ALOAD_0);
    out.writeByte(INVOKE_SPECIAL);
    int index = registerMethod(this.parentCls, "<init>", "()V");
    out.writeShort(index);
    out.writeByte(RETURN - 256);
    return bs.toByteArray();
}

26. TestBlockReplacement#replaceBlock()

Project: hadoop-hdfs
File: TestBlockReplacement.java
/* Copy a block from sourceProxy to destination. If the block becomes
   * over-replicated, preferably remove it from source.
   * 
   * Return true if a block is successfully copied; otherwise false.
   */
private boolean replaceBlock(Block block, DatanodeInfo source, DatanodeInfo sourceProxy, DatanodeInfo destination) throws IOException {
    Socket sock = new Socket();
    sock.connect(NetUtils.createSocketAddr(destination.getName()), HdfsConstants.READ_TIMEOUT);
    sock.setKeepAlive(true);
    // sendRequest
    DataOutputStream out = new DataOutputStream(sock.getOutputStream());
    out.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);
    REPLACE_BLOCK.write(out);
    out.writeLong(block.getBlockId());
    out.writeLong(block.getGenerationStamp());
    Text.writeString(out, source.getStorageID());
    sourceProxy.write(out);
    AccessToken.DUMMY_TOKEN.write(out);
    out.flush();
    // receiveResponse
    DataInputStream reply = new DataInputStream(sock.getInputStream());
    return DataTransferProtocol.Status.read(reply) == SUCCESS;
}

27. Learner#request()

Project: zookeeper
File: Learner.java
/**
     * send a request packet to the leader
     *
     * @param request
     *                the request from the client
     * @throws IOException
     */
void request(Request request) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream oa = new DataOutputStream(baos);
    oa.writeLong(request.sessionId);
    oa.writeInt(request.cxid);
    oa.writeInt(request.type);
    if (request.request != null) {
        request.request.rewind();
        int len = request.request.remaining();
        byte b[] = new byte[len];
        request.request.get(b);
        request.request.rewind();
        oa.write(b);
    }
    oa.close();
    QuorumPacket qp = new QuorumPacket(Leader.REQUEST, -1, baos.toByteArray(), request.authInfo);
    writePacket(qp, true);
}

28. PageIoTest#testRead()

Project: JDBM3
File: PageIoTest.java
/**
     * Test reading
     */
public void testRead() throws Exception {
    ByteArrayOutputStream bos = new ByteArrayOutputStream(100);
    DataOutputStream os = new DataOutputStream(bos);
    os.writeShort(SHORT_VALUE);
    os.writeLong(LONG_VALUE);
    os.writeInt(INT_VALUE);
    os.writeLong(LONG_VALUE2);
    byte[] data = bos.toByteArray();
    PageIo test = new PageIo(0, data);
    assertEquals("short", SHORT_VALUE, test.readShort(0));
    assertEquals("long", LONG_VALUE, test.readLong(2));
    assertEquals("int", INT_VALUE, test.readInt(10));
    assertEquals("long", LONG_VALUE2, test.readLong(14));
}

29. StoringAndRecoveringData#main()

Project: java-core-learning-example
File: StoringAndRecoveringData.java
@SuppressWarnings("resource")
public static void main(String[] args) throws IOException {
    // ????
    DataOutputStream outputStream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream("data.txt")));
    outputStream.writeDouble(3.1415926);
    outputStream.writeUTF("??");
    outputStream.writeInt(1);
    outputStream.close();
    // ????
    DataInputStream inputStream = new DataInputStream(new BufferedInputStream(new FileInputStream("data.txt")));
    System.out.println(inputStream.readDouble());
    System.out.println(inputStream.readUTF());
    System.out.println(inputStream.readInt());
}

30. FeatureTypeAnalyzer#write()

Project: incubator-joshua
File: FeatureTypeAnalyzer.java
public void write(String file_name) throws IOException {
    File out_file = new File(file_name);
    BufferedOutputStream buf_stream = new BufferedOutputStream(new FileOutputStream(out_file));
    DataOutputStream out_stream = new DataOutputStream(buf_stream);
    buildFeatureMap();
    getIdEncoder().writeState(out_stream);
    out_stream.writeBoolean(labeled);
    out_stream.writeInt(types.size());
    for (int index = 0; index < types.size(); index++) types.get(index).encoder.writeState(out_stream);
    out_stream.writeInt(featureToType.size());
    for (int feature_id : featureToType.keySet()) {
        if (labeled)
            out_stream.writeUTF(Vocabulary.word(feature_id));
        else
            out_stream.writeInt(feature_id);
        out_stream.writeInt(featureIdMap.get(feature_id));
        out_stream.writeInt(featureToType.get(feature_id));
    }
    out_stream.close();
}

31. TestTFileStreams#testFailureValueTooLong()

Project: apex-malhar
File: TestTFileStreams.java
public void testFailureValueTooLong() throws IOException {
    if (skip)
        return;
    DataOutputStream outKey = writer.prepareAppendKey(4);
    outKey.write("key0".getBytes());
    outKey.close();
    DataOutputStream outValue = writer.prepareAppendValue(3);
    try {
        outValue.write("value0".getBytes());
        outValue.close();
        Assert.fail("Value is longer than expected.");
    } catch (Exception e) {
    }
    try {
        outKey.close();
        outKey.close();
    } catch (Exception e) {
        Assert.fail("Second or more close() should have no effect.");
    }
}

32. TestTFileStreams#testFailureKeyTooShort()

Project: apex-malhar
File: TestTFileStreams.java
public void testFailureKeyTooShort() throws IOException {
    if (skip)
        return;
    DataOutputStream outKey = writer.prepareAppendKey(4);
    outKey.write("key0".getBytes());
    outKey.close();
    DataOutputStream outValue = writer.prepareAppendValue(15);
    try {
        outValue.write("value0".getBytes());
        outValue.close();
        Assert.fail("Value is shorter than expected.");
    } catch (Exception e) {
    } finally {
    }
}

33. TestDTFileByteArrays#testFailureWriteRecordAfterMetaBlock()

Project: apex-malhar
File: TestDTFileByteArrays.java
@Test
public void testFailureWriteRecordAfterMetaBlock() throws IOException {
    if (skip) {
        return;
    }
    // write a key/value first
    writer.append("keyX".getBytes(), "valueX".getBytes());
    // create a new metablock
    DataOutputStream outMeta = writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
    outMeta.write(123);
    outMeta.write("dummy".getBytes());
    outMeta.close();
    // add more key/value
    try {
        writer.append("keyY".getBytes(), "valueY".getBytes());
        Assert.fail("Cannot add key/value after start adding meta blocks.");
    } catch (Exception e) {
    }
    closeOutput();
}

34. TestDTFileByteArrays#testFailureWriteMetaBlocksWithSameName()

Project: apex-malhar
File: TestDTFileByteArrays.java
@Test
public void testFailureWriteMetaBlocksWithSameName() throws IOException {
    if (skip) {
        return;
    }
    writer.append("keyX".getBytes(), "valueX".getBytes());
    // create a new metablock
    DataOutputStream outMeta = writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
    outMeta.write(123);
    outMeta.write("foo".getBytes());
    outMeta.close();
    // add the same metablock
    try {
        writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
        Assert.fail("Cannot create metablocks with the same name.");
    } catch (Exception e) {
    }
    closeOutput();
}

35. StorageHelper#logCommit()

Project: aegisthus
File: StorageHelper.java
public void logCommit(String file) throws IOException {
    Path log = commitPath(getTaskId());
    if (debug) {
        LOG.info(String.format("logging (%s) to commit log (%s)", file, log.toUri().toString()));
    }
    FileSystem fs = log.getFileSystem(config);
    DataOutputStream os = null;
    if (fs.exists(log)) {
        os = fs.append(log);
    } else {
        os = fs.create(log);
    }
    os.writeBytes(file);
    os.write('\n');
    os.close();
}

36. DiskBackedPartitionStore#addToOOCPartition()

Project: giraph
File: DiskBackedPartitionStore.java
/**
   * Append a partition on disk at the end of the file. Expects the caller
   * to hold the global lock.
   *
   * @param partition The partition
   * @throws IOException
   */
private void addToOOCPartition(Partition<I, V, E, M> partition) throws IOException {
    Integer id = partition.getId();
    Integer count = onDisk.get(id);
    onDisk.put(id, count + (int) partition.getVertexCount());
    File file = new File(getVerticesPath(id));
    DataOutputStream outputStream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(file, true)));
    for (Vertex<I, V, E, M> vertex : partition) {
        writeVertexData(outputStream, vertex);
    }
    outputStream.close();
    file = new File(getEdgesPath(id));
    outputStream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(file, true)));
    for (Vertex<I, V, E, M> vertex : partition) {
        writeVertexEdges(outputStream, vertex);
    }
    outputStream.close();
}

37. SplitFileInserterSegmentStorage#readKey()

Project: fred
File: SplitFileInserterSegmentStorage.java
ClientCHK readKey(int blockNumber) throws IOException, MissingKeyException {
    byte[] buf = parent.innerReadSegmentKey(segNo, blockNumber);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    dos.writeInt(segNo);
    dos.writeInt(blockNumber);
    dos.close();
    byte[] prefix = baos.toByteArray();
    byte[] checkBuf = new byte[prefix.length + buf.length];
    System.arraycopy(prefix, 0, checkBuf, 0, prefix.length);
    int checksumLength = parent.checker.checksumLength();
    System.arraycopy(buf, 0, checkBuf, prefix.length, buf.length - checksumLength);
    byte[] checksum = Arrays.copyOfRange(buf, buf.length - checksumLength, buf.length);
    if (parent.checker.checkChecksum(checkBuf, 0, checkBuf.length, checksum))
        throw new MissingKeyException();
    DataInputStream dis = new DataInputStream(new ByteArrayInputStream(buf));
    byte b = dis.readByte();
    if (b != 1)
        throw new MissingKeyException();
    ClientCHK key = innerReadKey(dis);
    setHasKey(blockNumber);
    if (logDEBUG)
        Logger.debug(this, "Returning " + key);
    return key;
}

38. IndexInfo#persistDeletions()

Project: community-edition
File: IndexInfo.java
/**
     * @param id String
     * @param toDelete Set<String>
     * @param fileName String
     * @throws IOException
     * @throws FileNotFoundException
     */
private void persistDeletions(String id, Set<String> toDelete, String fileName) throws IOException, FileNotFoundException {
    File location = new File(indexDirectory, id).getCanonicalFile();
    if (!location.exists()) {
        if (!location.mkdirs()) {
            throw new IndexerException("Failed to make index directory " + location);
        }
    }
    // Write deletions
    DataOutputStream os = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(new File(location, fileName).getCanonicalFile())));
    os.writeInt(toDelete.size());
    for (String ref : toDelete) {
        os.writeUTF(ref);
    }
    os.flush();
    os.close();
}

39. FingerClient#getInputStream()

Project: commons-net
File: FingerClient.java
/***
     * Fingers a user and returns the input stream from the network connection
     * of the finger query.  You must first connect to a finger server before
     * calling this method, and you should disconnect after finishing reading
     * the stream.
     *
     * @param longOutput Set to true if long output is requested, false if not.
     * @param username  The name of the user to finger.
     * @param encoding the character encoding that should be used for the query,
     *        null for the platform's default encoding
     * @return The InputStream of the network connection of the finger query.
     *         Can be read to obtain finger results.
     * @throws IOException If an I/O error during the operation.
     ***/
public InputStream getInputStream(boolean longOutput, String username, String encoding) throws IOException {
    DataOutputStream output;
    StringBuilder buffer = new StringBuilder(64);
    if (longOutput) {
        buffer.append(__LONG_FLAG);
    }
    buffer.append(username);
    buffer.append(SocketClient.NETASCII_EOL);
    // Note: Charsets.toCharset() returns the platform default for null input
    // Java 1.6 can use charset directly
    byte[] encodedQuery = buffer.toString().getBytes(Charsets.toCharset(encoding).name());
    output = new DataOutputStream(new BufferedOutputStream(_output_, 1024));
    output.write(encodedQuery, 0, encodedQuery.length);
    output.flush();
    return _input_;
}

40. Learner#validateSession()

Project: zookeeper
File: Learner.java
/**
     * validate a session for a client
     *
     * @param clientId
     *                the client to be revalidated
     * @param timeout
     *                the timeout for which the session is valid
     * @return
     * @throws IOException
     */
void validateSession(ServerCnxn cnxn, long clientId, int timeout) throws IOException {
    LOG.info("Revalidating client: 0x" + Long.toHexString(clientId));
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    dos.writeLong(clientId);
    dos.writeInt(timeout);
    dos.close();
    QuorumPacket qp = new QuorumPacket(Leader.REVALIDATE, -1, baos.toByteArray(), null);
    pendingRevalidations.put(clientId, cnxn);
    if (LOG.isTraceEnabled()) {
        ZooTrace.logTraceMessage(LOG, ZooTrace.SESSION_TRACE_MASK, "To validate session 0x" + Long.toHexString(clientId));
    }
    writePacket(qp, true);
}

41. FastBlobStateEngine#serializeTo()

Project: zeno
File: FastBlobStateEngine.java
/**
     *  Serialize a previous serialization state from the stream.  The deserialized state engine will be in exactly the same state as the serialized state engine.
     */
public void serializeTo(OutputStream os) throws IOException {
    DataOutputStream dos = new DataOutputStream(os);
    dos.writeInt(STATE_ENGINE_SERIALIZATION_FORMAT_VERSION);
    dos.writeUTF(latestVersion);
    dos.writeShort(headerTags.size());
    for (Map.Entry<String, String> headerTag : headerTags.entrySet()) {
        dos.writeUTF(headerTag.getKey());
        dos.writeUTF(headerTag.getValue());
    }
    VarInt.writeVInt(dos, numberOfConfigurations);
    VarInt.writeVInt(dos, orderedSerializationStates.size());
    for (FastBlobTypeSerializationState<?> typeState : orderedSerializationStates) {
        dos.writeUTF(typeState.getSchema().getName());
        typeState.serializeTo(dos);
    }
}

42. BloomFilter#writeTo()

Project: voltdb
File: BloomFilter.java
/**
   * Writes this {@code BloomFilter} to an output stream, with a custom format (not Java
   * serialization). This has been measured to save at least 400 bytes compared to regular
   * serialization.
   *
   * <p>Use {@linkplain #readFrom(InputStream, Funnel)} to reconstruct the written BloomFilter.
   */
public void writeTo(OutputStream out) throws IOException {
    /*
     * Serial form:
     * 1 signed byte for the strategy
     * 1 unsigned byte for the number of hash functions
     * 1 big endian int, the number of longs in our bitset
     * N big endian longs of our bitset
     */
    DataOutputStream dout = new DataOutputStream(out);
    dout.writeByte(SignedBytes.checkedCast(strategy.ordinal()));
    // note: checked at the c'tor
    dout.writeByte(UnsignedBytes.checkedCast(numHashFunctions));
    dout.writeInt(bits.data.length);
    for (long value : bits.data) {
        dout.writeLong(value);
    }
}

43. AggregationPhaseMapOutputKey#toBytes()

Project: pinot
File: AggregationPhaseMapOutputKey.java
public byte[] toBytes() throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    byte[] bytes;
    // time
    dos.writeLong(time);
    // dimensions size
    dos.writeInt(dimensions.size());
    // dimension values
    for (String dimension : dimensions) {
        bytes = dimension.getBytes();
        dos.writeInt(bytes.length);
        dos.write(bytes);
    }
    baos.close();
    dos.close();
    return baos.toByteArray();
}

44. SegmentIndexCreationDriverImpl#persistCreationMeta()

Project: pinot
File: SegmentIndexCreationDriverImpl.java
/**
   * Writes segment creation metadata to disk.
   */
void persistCreationMeta(File outputDir, long crc) throws IOException {
    final File crcFile = new File(outputDir, V1Constants.SEGMENT_CREATION_META);
    final DataOutputStream out = new DataOutputStream(new FileOutputStream(crcFile));
    out.writeLong(crc);
    long creationTime = System.currentTimeMillis();
    // Use the creation time from the configuration if it exists and is not -1
    try {
        long configCreationTime = Long.parseLong(config.getCreationTime());
        if (0L < configCreationTime) {
            creationTime = configCreationTime;
        }
    } catch (Exception nfe) {
    }
    out.writeLong(creationTime);
    out.close();
}

45. ParquetInputSplit#write()

Project: parquet-mr
File: ParquetInputSplit.java
/**
   * {@inheritDoc}
   */
@Override
public void write(DataOutput hout) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(new GZIPOutputStream(baos));
    super.write(out);
    out.writeLong(end);
    out.writeBoolean(rowGroupOffsets != null);
    if (rowGroupOffsets != null) {
        out.writeInt(rowGroupOffsets.length);
        for (long o : rowGroupOffsets) {
            out.writeLong(o);
        }
    }
    out.close();
    writeArray(hout, baos.toByteArray());
}

46. ProxyGenerator#generateConstructor()

Project: openjdk
File: ProxyGenerator.java
/**
     * Generate the constructor method for the proxy class.
     */
private MethodInfo generateConstructor() throws IOException {
    MethodInfo minfo = new MethodInfo("<init>", "(Ljava/lang/reflect/InvocationHandler;)V", ACC_PUBLIC);
    DataOutputStream out = new DataOutputStream(minfo.code);
    code_aload(0, out);
    code_aload(1, out);
    out.writeByte(opc_invokespecial);
    out.writeShort(cp.getMethodRef(superclassName, "<init>", "(Ljava/lang/reflect/InvocationHandler;)V"));
    out.writeByte(opc_return);
    minfo.maxStack = 10;
    minfo.maxLocals = 2;
    minfo.declaredExceptions = new short[0];
    return minfo;
}

47. TempEclipseWorkspace#writeLocationFile()

Project: maven-plugins
File: TempEclipseWorkspace.java
/**
     * Given the relative path from the workspace to the project to link use the basename as the project name and link
     * this project to the fully qualified path anchored at workspaceLocation.
     * 
     * @param projectToLink The project to link
     * @throws MalformedURLException
     * @throws FileNotFoundException
     * @throws IOException
     */
private void writeLocationFile(String projectToLink) throws IOException {
    File projectToLinkAsRelativeFile = new File(projectToLink);
    File projectWorkspaceDirectory = new File(workspaceLocation, projectToLinkAsRelativeFile.getPath()).getCanonicalFile();
    String uriToProjectWorkspaceDirectory = "URI//" + projectWorkspaceDirectory.toURI().toURL().toString();
    File metaDataPlugins = new File(workspaceLocation, ReadWorkspaceLocations.METADATA_PLUGINS_ORG_ECLIPSE_CORE_RESOURCES_PROJECTS);
    File projectMetaDataDirectory = new File(metaDataPlugins, projectToLinkAsRelativeFile.getName());
    File locationFile = new File(projectMetaDataDirectory, ReadWorkspaceLocations.BINARY_LOCATION_FILE);
    DataOutputStream dataOutputStream = new DataOutputStream(new FileOutputStream(locationFile));
    dataOutputStream.write(ILocalStoreConstants.BEGIN_CHUNK);
    dataOutputStream.writeUTF(uriToProjectWorkspaceDirectory);
    dataOutputStream.write(ILocalStoreConstants.END_CHUNK);
    IOUtil.close(dataOutputStream);
}

48. ConnectionCostsCompiler#compile()

Project: kuromoji
File: ConnectionCostsCompiler.java
@Override
public void compile() throws IOException {
    DataOutputStream dataOutput = new DataOutputStream(new BufferedOutputStream(output));
    dataOutput.writeInt(cardinality);
    dataOutput.writeInt(bufferSize * SHORT_BYTES);
    ByteBuffer byteBuffer = ByteBuffer.allocate(costs.array().length * SHORT_BYTES);
    for (short cost : this.costs.array()) {
        byteBuffer.putShort(cost);
    }
    WritableByteChannel channel = Channels.newChannel(dataOutput);
    byteBuffer.flip();
    channel.write(byteBuffer);
    dataOutput.close();
}

49. ProxyGenerator#generateConstructor()

Project: jdk7u-jdk
File: ProxyGenerator.java
/**
     * Generate the constructor method for the proxy class.
     */
private MethodInfo generateConstructor() throws IOException {
    MethodInfo minfo = new MethodInfo("<init>", "(Ljava/lang/reflect/InvocationHandler;)V", ACC_PUBLIC);
    DataOutputStream out = new DataOutputStream(minfo.code);
    code_aload(0, out);
    code_aload(1, out);
    out.writeByte(opc_invokespecial);
    out.writeShort(cp.getMethodRef(superclassName, "<init>", "(Ljava/lang/reflect/InvocationHandler;)V"));
    out.writeByte(opc_return);
    minfo.maxStack = 10;
    minfo.maxLocals = 2;
    minfo.declaredExceptions = new short[0];
    return minfo;
}

50. QuantizerConfiguration#write()

Project: incubator-joshua
File: QuantizerConfiguration.java
public void write(String file_name) throws IOException {
    File vocab_file = new File(file_name);
    DataOutputStream out_stream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(vocab_file)));
    out_stream.writeInt(quantizers.size());
    for (int index = 0; index < quantizers.size(); index++) quantizers.get(index).writeState(out_stream);
    out_stream.writeInt(quantizerByFeatureId.size());
    for (int feature_id : quantizerByFeatureId.keySet()) {
        out_stream.writeUTF(Vocabulary.word(feature_id));
        out_stream.writeInt(quantizerByFeatureId.get(feature_id));
    }
    out_stream.close();
}

51. ProxyGenerator#generateConstructor()

Project: HotswapAgent
File: ProxyGenerator.java
/**
	 * Generate the constructor method for the proxy class.
	 */
private MethodInfo generateConstructor() throws IOException {
    MethodInfo minfo = new MethodInfo("<init>", "(Ljava/lang/reflect/InvocationHandler;)V", ACC_PUBLIC);
    DataOutputStream out = new DataOutputStream(minfo.code);
    code_aload(0, out);
    code_aload(1, out);
    out.writeByte(opc_invokespecial);
    out.writeShort(cp.getMethodRef(superclassName, "<init>", "(Ljava/lang/reflect/InvocationHandler;)V"));
    out.writeByte(opc_return);
    minfo.maxStack = 10;
    minfo.maxLocals = 2;
    minfo.declaredExceptions = new short[0];
    return minfo;
}

52. CtClassJavaProxyGenerator#generateConstructor()

Project: HotswapAgent
File: CtClassJavaProxyGenerator.java
/**
	 * Generate the constructor method for the proxy class.
	 */
private MethodInfo generateConstructor() throws IOException {
    MethodInfo minfo = new MethodInfo("<init>", "(Ljava/lang/reflect/InvocationHandler;)V", ACC_PUBLIC);
    DataOutputStream out = new DataOutputStream(minfo.code);
    code_aload(0, out);
    code_aload(1, out);
    out.writeByte(opc_invokespecial);
    out.writeShort(cp.getMethodRef(superclassName, "<init>", "(Ljava/lang/reflect/InvocationHandler;)V"));
    out.writeByte(opc_return);
    minfo.maxStack = 10;
    minfo.maxLocals = 2;
    minfo.declaredExceptions = new short[0];
    return minfo;
}

53. TestMultipleArchiveFiles#createInput()

Project: hadoop-mapreduce
File: TestMultipleArchiveFiles.java
protected void createInput() throws IOException {
    DataOutputStream dos = fileSys.create(new Path(INPUT_FILE));
    String inputFileString = "symlink1/cacheArchive1\nsymlink2/cacheArchive2";
    dos.write(inputFileString.getBytes("UTF-8"));
    dos.close();
    DataOutputStream out = fileSys.create(new Path(CACHE_ARCHIVE_1.toString()));
    ZipOutputStream zos = new ZipOutputStream(out);
    ZipEntry ze = new ZipEntry(CACHE_FILE_1.toString());
    zos.putNextEntry(ze);
    zos.write(input.getBytes("UTF-8"));
    zos.closeEntry();
    zos.close();
    out = fileSys.create(new Path(CACHE_ARCHIVE_2.toString()));
    zos = new ZipOutputStream(out);
    ze = new ZipEntry(CACHE_FILE_2.toString());
    zos.putNextEntry(ze);
    zos.write(input.getBytes("UTF-8"));
    zos.closeEntry();
    zos.close();
}

54. TestMultipleArchiveFiles#createInput()

Project: hadoop-common
File: TestMultipleArchiveFiles.java
protected void createInput() throws IOException {
    DataOutputStream dos = fileSys.create(new Path(INPUT_FILE));
    String inputFileString = "symlink1/cacheArchive1\nsymlink2/cacheArchive2";
    dos.write(inputFileString.getBytes("UTF-8"));
    dos.close();
    DataOutputStream out = fileSys.create(new Path(CACHE_ARCHIVE_1.toString()));
    ZipOutputStream zos = new ZipOutputStream(out);
    ZipEntry ze = new ZipEntry(CACHE_FILE_1.toString());
    zos.putNextEntry(ze);
    zos.write(input.getBytes("UTF-8"));
    zos.closeEntry();
    zos.close();
    out = fileSys.create(new Path(CACHE_ARCHIVE_2.toString()));
    zos = new ZipOutputStream(out);
    ze = new ZipEntry(CACHE_FILE_2.toString());
    zos.putNextEntry(ze);
    zos.write(input.getBytes("UTF-8"));
    zos.closeEntry();
    zos.close();
}

55. TestTFileStreams#testFailureValueTooLong()

Project: hadoop-20
File: TestTFileStreams.java
public void testFailureValueTooLong() throws IOException {
    if (skip)
        return;
    DataOutputStream outKey = writer.prepareAppendKey(4);
    outKey.write("key0".getBytes());
    outKey.close();
    DataOutputStream outValue = writer.prepareAppendValue(3);
    try {
        outValue.write("value0".getBytes());
        outValue.close();
        Assert.fail("Value is longer than expected.");
    } catch (Exception e) {
    }
    try {
        outKey.close();
        outKey.close();
    } catch (Exception e) {
        Assert.fail("Second or more close() should have no effect.");
    }
}

56. TestTFileStreams#testFailureKeyTooShort()

Project: hadoop-20
File: TestTFileStreams.java
public void testFailureKeyTooShort() throws IOException {
    if (skip)
        return;
    DataOutputStream outKey = writer.prepareAppendKey(4);
    outKey.write("key0".getBytes());
    outKey.close();
    DataOutputStream outValue = writer.prepareAppendValue(15);
    try {
        outValue.write("value0".getBytes());
        outValue.close();
        Assert.fail("Value is shorter than expected.");
    } catch (Exception e) {
    } finally {
    }
}

57. TestTFileByteArrays#testFailureWriteRecordAfterMetaBlock()

Project: hadoop-20
File: TestTFileByteArrays.java
@Test
public void testFailureWriteRecordAfterMetaBlock() throws IOException {
    if (skip)
        return;
    // write a key/value first
    writer.append("keyX".getBytes(), "valueX".getBytes());
    // create a new metablock
    DataOutputStream outMeta = writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
    outMeta.write(123);
    outMeta.write("dummy".getBytes());
    outMeta.close();
    // add more key/value
    try {
        writer.append("keyY".getBytes(), "valueY".getBytes());
        Assert.fail("Cannot add key/value after start adding meta blocks.");
    } catch (Exception e) {
    }
    closeOutput();
}

58. TestTFileByteArrays#testFailureWriteMetaBlocksWithSameName()

Project: hadoop-20
File: TestTFileByteArrays.java
@Test
public void testFailureWriteMetaBlocksWithSameName() throws IOException {
    if (skip)
        return;
    writer.append("keyX".getBytes(), "valueX".getBytes());
    // create a new metablock
    DataOutputStream outMeta = writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
    outMeta.write(123);
    outMeta.write("foo".getBytes());
    outMeta.close();
    // add the same metablock
    try {
        writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
        Assert.fail("Cannot create metablocks with the same name.");
    } catch (Exception e) {
    }
    closeOutput();
}

59. TestMultipleArchiveFiles#createInput()

Project: hadoop-20
File: TestMultipleArchiveFiles.java
protected void createInput() throws IOException {
    DataOutputStream dos = fileSys.create(new Path(INPUT_FILE));
    String inputFileString = "symlink1/cacheArchive1\nsymlink2/cacheArchive2";
    dos.write(inputFileString.getBytes("UTF-8"));
    dos.close();
    DataOutputStream out = fileSys.create(new Path(CACHE_ARCHIVE_1.toString()));
    ZipOutputStream zos = new ZipOutputStream(out);
    ZipEntry ze = new ZipEntry(CACHE_FILE_1.toString());
    zos.putNextEntry(ze);
    zos.write(input.getBytes("UTF-8"));
    zos.closeEntry();
    zos.close();
    out = fileSys.create(new Path(CACHE_ARCHIVE_2.toString()));
    zos = new ZipOutputStream(out);
    ze = new ZipEntry(CACHE_FILE_2.toString());
    zos.putNextEntry(ze);
    zos.write(input.getBytes("UTF-8"));
    zos.closeEntry();
    zos.close();
}

60. TestPreTransactionalServerLogReader#endRoll()

Project: hadoop-20
File: TestPreTransactionalServerLogReader.java
private void endRoll(File editsDir) throws IOException {
    File edits = getFileWithCurrent(editsDir, "edits");
    File editsNew = getFileWithCurrent(editsDir, "edits.new");
    File fstime = getFileWithCurrent(editsDir, "fstime");
    Assert.assertTrue(editsNew.exists());
    Assert.assertTrue(fstime.exists());
    if (!editsNew.renameTo(edits)) {
        edits.delete();
        if (!editsNew.renameTo(edits))
            throw new IOException();
    }
    fstime.delete();
    DataOutputStream fstimeOutput = new DataOutputStream(new FileOutputStream(fstime));
    fstimeOutput.writeLong(System.currentTimeMillis());
    fstimeOutput.flush();
    fstimeOutput.close();
}

61. BloomFilter#writeTo()

Project: guava
File: BloomFilter.java
/**
   * Writes this {@code BloomFilter} to an output stream, with a custom format (not Java
   * serialization). This has been measured to save at least 400 bytes compared to regular
   * serialization.
   *
   * <p>Use {@linkplain #readFrom(InputStream, Funnel)} to reconstruct the written BloomFilter.
   */
public void writeTo(OutputStream out) throws IOException {
    // Serial form:
    // 1 signed byte for the strategy
    // 1 unsigned byte for the number of hash functions
    // 1 big endian int, the number of longs in our bitset
    // N big endian longs of our bitset
    DataOutputStream dout = new DataOutputStream(out);
    dout.writeByte(SignedBytes.checkedCast(strategy.ordinal()));
    // note: checked at the c'tor
    dout.writeByte(UnsignedBytes.checkedCast(numHashFunctions));
    dout.writeInt(bits.data.length);
    for (long value : bits.data) {
        dout.writeLong(value);
    }
}

62. LdifReaderTest#createFile()

Project: directory-shared
File: LdifReaderTest.java
private static File createFile(String name, byte[] data) throws IOException {
    File jpeg = File.createTempFile(name, "jpg");
    jpeg.createNewFile();
    DataOutputStream os = new DataOutputStream(new FileOutputStream(jpeg));
    os.write(data);
    os.close();
    // This file will be deleted when the JVM
    // will exit.
    jpeg.deleteOnExit();
    return jpeg;
}

63. MapCoder#encode()

Project: DataflowJavaSDK
File: MapCoder.java
@Override
public void encode(Map<K, V> map, OutputStream outStream, Context context) throws IOException, CoderException {
    if (map == null) {
        throw new CoderException("cannot encode a null Map");
    }
    DataOutputStream dataOutStream = new DataOutputStream(outStream);
    dataOutStream.writeInt(map.size());
    for (Entry<K, V> entry : map.entrySet()) {
        keyCoder.encode(entry.getKey(), outStream, context.nested());
        valueCoder.encode(entry.getValue(), outStream, context.nested());
    }
    dataOutStream.flush();
}

64. SevenZOutputFile#writeFileNames()

Project: commons-compress
File: SevenZOutputFile.java
private void writeFileNames(final DataOutput header) throws IOException {
    header.write(NID.kName);
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final DataOutputStream out = new DataOutputStream(baos);
    out.write(0);
    for (final SevenZArchiveEntry entry : files) {
        out.write(entry.getName().getBytes("UTF-16LE"));
        out.writeShort(0);
    }
    out.flush();
    final byte[] contents = baos.toByteArray();
    writeUint64(header, contents.length);
    header.write(contents);
}

65. TestTFileStreams#testFailureCloseKeyStreamManyTimesInWriter()

Project: apex-malhar
File: TestTFileStreams.java
public void testFailureCloseKeyStreamManyTimesInWriter() throws IOException {
    if (skip)
        return;
    DataOutputStream outKey = writer.prepareAppendKey(4);
    try {
        outKey.write("key0".getBytes());
        outKey.close();
    } catch (Exception e) {
    } finally {
        try {
            outKey.close();
        } catch (Exception e) {
        }
    }
    outKey.close();
    outKey.close();
    Assert.assertTrue("Multiple close should have no effect.", true);
}

66. BrokerTestSupport#createXATransaction()

Project: activemq-artemis
File: BrokerTestSupport.java
protected XATransactionId createXATransaction(SessionInfo info) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream os = new DataOutputStream(baos);
    os.writeLong(++txGenerator);
    os.close();
    byte[] bs = baos.toByteArray();
    XATransactionId xid = new XATransactionId();
    xid.setBranchQualifier(bs);
    xid.setGlobalTransactionId(bs);
    xid.setFormatId(55);
    return xid;
}

67. WatchLeakTest#createValidateSessionPacketResponse()

Project: zookeeper
File: WatchLeakTest.java
/**
     * Forge an invalid session packet as a LEADER do
     *
     * @param valid <code>true</code> to create a valid session message
     *
     * @throws Exception
     */
private QuorumPacket createValidateSessionPacketResponse(boolean valid) throws Exception {
    QuorumPacket qp = createValidateSessionPacket();
    ByteArrayInputStream bis = new ByteArrayInputStream(qp.getData());
    DataInputStream dis = new DataInputStream(bis);
    long id = dis.readLong();
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(bos);
    dos.writeLong(id);
    // false means that the session has expired
    dos.writeBoolean(valid);
    qp.setData(bos.toByteArray());
    return qp;
}

68. TaskPacket#serialize()

Project: settlers-remake
File: TaskPacket.java
@Override
public final void serialize(DataOutputStream dos) throws IOException {
    ByteArrayOutputStream bufferOutStream = new ByteArrayOutputStream();
    DataOutputStream bufferDataOutStream = new DataOutputStream(bufferOutStream);
    bufferDataOutStream.writeUTF(this.getClass().getName());
    serializeTask(bufferDataOutStream);
    bufferDataOutStream.flush();
    dos.writeInt(bufferOutStream.size());
    bufferOutStream.writeTo(dos);
}

69. ServiceType#computeHash()

Project: river
File: ServiceType.java
/**
     * Computes a SHA-1 digest from the hash of the superclass, if there
     * is a superclass, followed by the name of this class, followed by
     * the name and type for each field, if any, declared by this class and
     * ordered alphabetically by field name.  The first 8 bytes of the digest
     * are used to form the 64-bit hash value for this type.
     */
private void computeHash() throws IOException, NoSuchAlgorithmException {
    hash = 0;
    MessageDigest md = MessageDigest.getInstance("SHA");
    DataOutputStream out = new DataOutputStream(new DigestOutputStream(new ByteArrayOutputStream(127), md));
    out.writeUTF(name);
    out.flush();
    byte[] digest = md.digest();
    for (int i = Math.min(8, digest.length); --i >= 0; ) {
        hash += ((long) (digest[i] & 0xFF)) << (i * 8);
    }
}

70. MethodHashing#createHash()

Project: Resteasy
File: MethodHashing.java
public static long createHash(String methodDesc) throws Exception {
    long hash = 0;
    ByteArrayOutputStream bytearrayoutputstream = new ByteArrayOutputStream(512);
    MessageDigest messagedigest = MessageDigest.getInstance("SHA");
    DataOutputStream dataoutputstream = new DataOutputStream(new DigestOutputStream(bytearrayoutputstream, messagedigest));
    dataoutputstream.writeUTF(methodDesc);
    dataoutputstream.flush();
    byte abyte0[] = messagedigest.digest();
    for (int j = 0; j < Math.min(8, abyte0.length); j++) hash += (long) (abyte0[j] & 0xff) << j * 8;
    return hash;
}

71. FormMarshaller#createHash()

Project: Resteasy
File: FormMarshaller.java
public static long createHash(String methodDesc) throws Exception {
    long hash = 0;
    ByteArrayOutputStream bytearrayoutputstream = new ByteArrayOutputStream(512);
    MessageDigest messagedigest = MessageDigest.getInstance("SHA");
    DataOutputStream dataoutputstream = new DataOutputStream(new DigestOutputStream(bytearrayoutputstream, messagedigest));
    dataoutputstream.writeUTF(methodDesc);
    dataoutputstream.flush();
    byte abyte0[] = messagedigest.digest();
    for (int j = 0; j < Math.min(8, abyte0.length); j++) hash += (long) (abyte0[j] & 0xff) << j * 8;
    return hash;
}

72. FormProcessor#createHash()

Project: Resteasy
File: FormProcessor.java
public static long createHash(String methodDesc) throws Exception {
    long hash = 0;
    ByteArrayOutputStream bytearrayoutputstream = new ByteArrayOutputStream(512);
    MessageDigest messagedigest = MessageDigest.getInstance("SHA");
    DataOutputStream dataoutputstream = new DataOutputStream(new DigestOutputStream(bytearrayoutputstream, messagedigest));
    dataoutputstream.writeUTF(methodDesc);
    dataoutputstream.flush();
    byte abyte0[] = messagedigest.digest();
    for (int j = 0; j < Math.min(8, abyte0.length); j++) hash += (long) (abyte0[j] & 0xff) << j * 8;
    return hash;
}

73. HeapBitmapInvertedIndexCreator#seal()

Project: pinot
File: HeapBitmapInvertedIndexCreator.java
@Override
public void seal() throws IOException {
    final DataOutputStream out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(invertedIndexFile)));
    // First, write out offsets of bitmaps. The information can be used to access a certain bitmap directly.
    // Totally (invertedIndex.length+1) offsets will be written out; the last offset is used to calculate the length of
    // the last bitmap, which might be needed when accessing bitmaps randomly.
    // If a bitmap's offset is k, then k bytes need to be skipped to reach the bitmap.
    // The first bitmap's offset
    int offset = 4 * (invertedIndex.length + 1);
    out.writeInt(offset);
    for (final MutableRoaringBitmap element : invertedIndex) {
        // the other bitmap's offset
        offset += element.serializedSizeInBytes();
        out.writeInt(offset);
    }
    // write out bitmaps one by one
    for (final MutableRoaringBitmap element : invertedIndex) {
        element.serialize(out);
    }
    out.close();
    LOGGER.debug("persisted bitmap inverted index for column : " + spec.getName() + " in " + invertedIndexFile.getAbsolutePath());
}

74. LPKUtils#writeFileFromByteBuffer()

Project: LGame
File: LPKUtils.java
/**
	 * ?????????
	 * 
	 * @param fileBuff
	 * @param fileName
	 * @param extractDir
	 * @throws Exception
	 */
public static void writeFileFromByteBuffer(byte[] fileBuff, String fileName, String extractDir) throws Exception {
    String extractFilePath = extractDir + fileName;
    File file = new File(extractFilePath);
    if (!file.exists()) {
        FileUtils.makedirs(extractFilePath);
    }
    FileOutputStream fos = new FileOutputStream(file);
    DataOutputStream dos = new DataOutputStream(fos);
    dos.write(fileBuff);
    dos.close();
    fos.close();
}

75. StringArrayIO#writeSparseArray2D()

Project: kuromoji
File: StringArrayIO.java
public static void writeSparseArray2D(OutputStream output, String[][] array) throws IOException {
    DataOutputStream dataOutput = new DataOutputStream(output);
    int length = array.length;
    dataOutput.writeInt(length);
    for (int i = 0; i < length; i++) {
        String[] inner = array[i];
        if (inner != null) {
            dataOutput.writeInt(i);
            writeArray(dataOutput, inner);
        }
    }
    // This negative index serves as an end-of-array marker
    dataOutput.writeInt(-1);
}

76. IntegerArrayIO#writeSparseArray2D()

Project: kuromoji
File: IntegerArrayIO.java
public static void writeSparseArray2D(OutputStream output, int[][] array) throws IOException {
    DataOutputStream dataOutput = new DataOutputStream(output);
    int length = array.length;
    dataOutput.writeInt(length);
    for (int i = 0; i < length; i++) {
        int[] inner = array[i];
        if (inner != null) {
            dataOutput.writeInt(i);
            writeArray(dataOutput, inner);
        }
    }
    // This negative index serves as an end-of-array marker
    dataOutput.writeInt(-1);
}

77. ByteBufferIO#write()

Project: kuromoji
File: ByteBufferIO.java
public static void write(OutputStream output, ByteBuffer buffer) throws IOException {
    DataOutputStream dataOutput = new DataOutputStream(output);
    buffer = buffer.duplicate();
    buffer.rewind();
    dataOutput.writeInt(buffer.capacity());
    WritableByteChannel channel = Channels.newChannel(dataOutput);
    channel.write(buffer);
    // TODO: Do we need this?
    dataOutput.flush();
}

78. SizeTest#_testWriteAddresses()

Project: JGroups
File: SizeTest.java
private static void _testWriteAddresses(List<Address> list) throws Exception {
    long len = Util.size(list);
    ByteArrayOutputStream output = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(output);
    Util.writeAddresses(list, out);
    out.flush();
    byte[] buf = output.toByteArray();
    out.close();
    System.out.println("\nlen=" + len + ", serialized length=" + buf.length);
    assert len == buf.length;
    DataInputStream in = new DataInputStream(new ByteArrayInputStream(buf));
    Collection<? extends Address> new_list = Util.readAddresses(in, ArrayList.class);
    System.out.println("old list=" + list + "\nnew list=" + new_list);
    assert list.equals(new_list);
}

79. SizeTest#_testWriteAddress()

Project: JGroups
File: SizeTest.java
private static void _testWriteAddress(Address addr) throws Exception {
    int len = Util.size(addr);
    ByteArrayOutputStream output = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(output);
    Util.writeAddress(addr, out);
    out.flush();
    byte[] buf = output.toByteArray();
    out.close();
    System.out.println("\nlen=" + len + ", serialized length=" + buf.length);
    assert len == buf.length;
    DataInputStream in = new DataInputStream(new ByteArrayInputStream(buf));
    Address new_addr = Util.readAddress(in);
    System.out.println("old addr=" + addr + "\nnew addr=" + new_addr);
    assert addr.equals(new_addr);
}

80. MapCoder#encode()

Project: incubator-beam
File: MapCoder.java
@Override
public void encode(Map<K, V> map, OutputStream outStream, Context context) throws IOException, CoderException {
    if (map == null) {
        throw new CoderException("cannot encode a null Map");
    }
    DataOutputStream dataOutStream = new DataOutputStream(outStream);
    dataOutStream.writeInt(map.size());
    for (Entry<K, V> entry : map.entrySet()) {
        keyCoder.encode(entry.getKey(), outStream, context.nested());
        valueCoder.encode(entry.getValue(), outStream, context.nested());
    }
    dataOutStream.flush();
}

81. ConsoleNote#encodeToBytes()

Project: hudson-2.x
File: ConsoleNote.java
private ByteArrayOutputStream encodeToBytes() throws IOException {
    ByteArrayOutputStream buf = new ByteArrayOutputStream();
    ObjectOutputStream oos = new ObjectOutputStream(new GZIPOutputStream(buf));
    oos.writeObject(this);
    oos.close();
    ByteArrayOutputStream buf2 = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(new Base64OutputStream(buf2, true, -1, null));
    buf2.write(PREAMBLE);
    dos.writeInt(buf.size());
    buf.writeTo(dos);
    dos.close();
    buf2.write(POSTAMBLE);
    return buf2;
}

82. Server#setupResponse()

Project: hadoop-common
File: Server.java
/**
   * Setup response for the IPC Call.
   * 
   * @param response buffer to serialize the response into
   * @param call {@link Call} to which we are setting up the response
   * @param status {@link Status} of the IPC call
   * @param rv return value for the IPC Call, if the call was successful
   * @param errorClass error class, if the the call failed
   * @param error error message, if the call failed
   * @throws IOException
   */
private void setupResponse(ByteArrayOutputStream response, Call call, Status status, Writable rv, String errorClass, String error) throws IOException {
    response.reset();
    DataOutputStream out = new DataOutputStream(response);
    // write call id
    out.writeInt(call.id);
    // write status
    out.writeInt(status.state);
    if (status == Status.SUCCESS) {
        rv.write(out);
    } else {
        WritableUtils.writeString(out, errorClass);
        WritableUtils.writeString(out, error);
    }
    call.setResponse(ByteBuffer.wrap(response.toByteArray()));
}

83. INode#serialize()

Project: hadoop-common
File: INode.java
public InputStream serialize() throws IOException {
    ByteArrayOutputStream bytes = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(bytes);
    out.writeByte(fileType.ordinal());
    if (isFile()) {
        out.writeInt(blocks.length);
        for (int i = 0; i < blocks.length; i++) {
            out.writeLong(blocks[i].getId());
            out.writeLong(blocks[i].getLength());
        }
    }
    out.close();
    return new ByteArrayInputStream(bytes.toByteArray());
}

84. TestTFileStreams#testFailureCloseKeyStreamManyTimesInWriter()

Project: hadoop-20
File: TestTFileStreams.java
public void testFailureCloseKeyStreamManyTimesInWriter() throws IOException {
    if (skip)
        return;
    DataOutputStream outKey = writer.prepareAppendKey(4);
    try {
        outKey.write("key0".getBytes());
        outKey.close();
    } catch (Exception e) {
    } finally {
        try {
            outKey.close();
        } catch (Exception e) {
        }
    }
    outKey.close();
    outKey.close();
    Assert.assertTrue("Multiple close should have no effect.", true);
}

85. TestDFSShell#writeFileContents()

Project: hadoop-20
File: TestDFSShell.java
static Path writeFileContents(FileSystem fs, Path f, String data, long offset) throws IOException {
    DataOutputStream out = fs.create(f);
    if (offset > 0) {
        // write some trash
        byte[] trash = new byte[(int) offset];
        out.write(trash);
    }
    out.writeUTF(data);
    out.close();
    assertTrue(fs.exists(f));
    return f;
}

86. INode#serialize()

Project: hadoop-20
File: INode.java
public InputStream serialize() throws IOException {
    ByteArrayOutputStream bytes = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(bytes);
    out.writeByte(fileType.ordinal());
    if (isFile()) {
        out.writeInt(blocks.length);
        for (int i = 0; i < blocks.length; i++) {
            out.writeLong(blocks[i].getId());
            out.writeLong(blocks[i].getLength());
        }
    }
    out.close();
    return new ByteArrayInputStream(bytes.toByteArray());
}

87. DictionaryManager#save()

Project: kylin
File: DictionaryManager.java
void save(DictionaryInfo dict) throws IOException {
    ResourceStore store = MetadataManager.getInstance(config).getStore();
    String path = dict.getResourcePath();
    logger.info("Saving dictionary at " + path);
    ByteArrayOutputStream buf = new ByteArrayOutputStream();
    DataOutputStream dout = new DataOutputStream(buf);
    DictionaryInfoSerializer.FULL_SERIALIZER.serialize(dict, dout);
    dout.close();
    buf.close();
    ByteArrayInputStream inputStream = new ByteArrayInputStream(buf.toByteArray());
    store.putResource(path, inputStream, System.currentTimeMillis());
    inputStream.close();
}

88. IntegerArrayIO#writeArray()

Project: kuromoji
File: IntegerArrayIO.java
public static void writeArray(OutputStream output, int[] array) throws IOException {
    DataOutputStream dataOutput = new DataOutputStream(output);
    int length = array.length;
    dataOutput.writeInt(length);
    ByteBuffer tmpBuffer = ByteBuffer.allocate(length * INT_BYTES);
    IntBuffer intBuffer = tmpBuffer.asIntBuffer();
    tmpBuffer.rewind();
    intBuffer.put(array);
    WritableByteChannel channel = Channels.newChannel(dataOutput);
    channel.write(tmpBuffer);
}

89. StackTreeCoderTest#verifyVarIntEncoding()

Project: jvm-tools
File: StackTreeCoderTest.java
//    @Test
public void verifyVarIntEncoding() throws IOException {
    int n = 0;
    int j = 0;
    ByteArrayOutputStream bos;
    DataOutputStream dos;
    bos = new ByteArrayOutputStream(4 << 20);
    dos = new DataOutputStream(bos);
    for (int i = 0; i != (1 << 30) + 1000000; ++i) {
        StackTraceCodec.writeVarInt(dos, i);
        ++n;
        if (n == 1000000) {
            System.out.println(i);
            n = 0;
            byte[] buf = bos.toByteArray();
            DataInputStream dis = new DataInputStream(new ByteArrayInputStream(buf));
            for (; j <= i; ++j) {
                int x = StackTraceCodec.readVarInt(dis);
                Assert.assertEquals(j, x);
            }
            bos = new ByteArrayOutputStream(4 << 20);
            dos = new DataOutputStream(bos);
        }
    }
}

90. MessageBatchTest#testSize()

Project: JGroups
File: MessageBatchTest.java
public void testSize() throws Exception {
    List<Message> msgs = createMessages();
    ByteArrayOutputStream output = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(output);
    Util.writeMessageList(b, a, "cluster".getBytes(), msgs, out, false, UDP_ID);
    out.flush();
    byte[] buf = output.toByteArray();
    System.out.println("size=" + buf.length + " bytes, " + msgs.size() + " messages");
    DataInputStream in = new DataInputStream(new ByteArrayInputStream(buf));
    // version
    in.readShort();
    // flags
    in.readByte();
    List<Message> list = Util.readMessageList(in, UDP_ID);
    assert msgs.size() == list.size();
}

91. DigestTest#testStreamable()

Project: JGroups
File: DigestTest.java
public void testStreamable() throws Exception {
    ByteArrayOutputStream outstream = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(outstream);
    d.writeTo(dos);
    dos.close();
    byte[] buf = outstream.toByteArray();
    ByteArrayInputStream instream = new ByteArrayInputStream(buf);
    DataInputStream dis = new DataInputStream(instream);
    Digest tmp = new Digest();
    tmp.readFrom(dis);
    Assert.assertEquals(d, tmp);
}

92. StreamRemoteCall#getResultStream()

Project: jdk7u-jdk
File: StreamRemoteCall.java
/**
     * Returns an output stream (may put out header information
     * relating to the success of the call).
     * @param success If true, indicates normal return, else indicates
     * exceptional return.
     * @exception StreamCorruptedException If result stream previously
     * acquired
     * @exception IOException For any other problem with I/O.
     */
public ObjectOutput getResultStream(boolean success) throws IOException {
    /* make sure result code only marshaled once. */
    if (resultStarted)
        throw new StreamCorruptedException("result already in progress");
    else
        resultStarted = true;
    // write out return header
    // return header, part 1 (read by Transport)
    DataOutputStream wr = new DataOutputStream(conn.getOutputStream());
    // transport op
    wr.writeByte(TransportConstants.Return);
    // creates a MarshalOutputStream
    getOutputStream(true);
    // return header, part 2 (read by client-side RemoteCall)
    if (//
    success)
        out.writeByte(TransportConstants.NormalReturn);
    else
        out.writeByte(TransportConstants.ExceptionalReturn);
    // write id for gcAck
    out.writeID();
    return out;
}

93. FontConfiguration#saveBinary()

Project: jdk7u-jdk
File: FontConfiguration.java
/* Generate a binary format font configuration from internal data
     * tables.
     */
public static void saveBinary(OutputStream out) throws IOException {
    sanityCheck();
    DataOutputStream dataOut = new DataOutputStream(out);
    writeShortTable(dataOut, head);
    writeShortTable(dataOut, table_scriptIDs);
    writeShortTable(dataOut, table_scriptFonts);
    writeShortTable(dataOut, table_elcIDs);
    writeShortTable(dataOut, table_sequences);
    writeShortTable(dataOut, table_fontfileNameIDs);
    writeShortTable(dataOut, table_componentFontNameIDs);
    writeShortTable(dataOut, table_filenames);
    writeShortTable(dataOut, table_awtfontpaths);
    writeShortTable(dataOut, table_exclusions);
    writeShortTable(dataOut, table_proportionals);
    writeShortTable(dataOut, table_scriptFontsMotif);
    writeShortTable(dataOut, table_alphabeticSuffix);
    writeShortTable(dataOut, table_stringIDs);
    //stringTable
    dataOut.writeChars(new String(table_stringTable));
    out.close();
    if (verbose) {
        dump();
    }
}

94. Manifest#write()

Project: jdk7u-jdk
File: Manifest.java
/**
     * Writes the Manifest to the specified OutputStream.
     * Attributes.Name.MANIFEST_VERSION must be set in
     * MainAttributes prior to invoking this method.
     *
     * @param out the output stream
     * @exception IOException if an I/O error has occurred
     * @see #getMainAttributes
     */
public void write(OutputStream out) throws IOException {
    DataOutputStream dos = new DataOutputStream(out);
    // Write out the main attributes for the manifest
    attr.writeMain(dos);
    // Now write out the pre-entry attributes
    Iterator it = entries.entrySet().iterator();
    while (it.hasNext()) {
        Map.Entry e = (Map.Entry) it.next();
        StringBuffer buffer = new StringBuffer("Name: ");
        String value = (String) e.getKey();
        if (value != null) {
            byte[] vb = value.getBytes("UTF8");
            value = new String(vb, 0, 0, vb.length);
        }
        buffer.append(value);
        buffer.append("\r\n");
        make72Safe(buffer);
        dos.writeBytes(buffer.toString());
        ((Attributes) e.getValue()).write(dos);
    }
    dos.flush();
}

95. ApnsFeedbackServerSocket#handleSocket()

Project: java-apns
File: ApnsFeedbackServerSocket.java
@Override
void handleSocket(Socket socket) throws IOException {
    Map<byte[], Date> inactiveDevices = requestDelegate.getInactiveDevices();
    DataOutputStream dataStream = new DataOutputStream(socket.getOutputStream());
    for (Entry<byte[], Date> entry : inactiveDevices.entrySet()) {
        int time = (int) (entry.getValue().getTime() / 1000L);
        dataStream.writeInt(time);
        byte[] bytes = entry.getKey();
        dataStream.writeShort(bytes.length);
        dataStream.write(bytes);
    }
    dataStream.close();
}

96. Serializer#serialize()

Project: jackrabbit
File: Serializer.java
/**
     * Serializes the specified <code>NodeReferences</code> object to the given
     * binary <code>stream</code>.
     *
     * @param refs   object to serialize
     * @param stream the stream where the object should be serialized to
     * @throws Exception if an error occurs during the serialization
     * @see #deserialize(NodeReferences, InputStream)
     */
public static void serialize(NodeReferences refs, OutputStream stream) throws Exception {
    DataOutputStream out = new DataOutputStream(stream);
    // references
    Collection<PropertyId> c = refs.getReferences();
    // count
    out.writeInt(c.size());
    for (Iterator<PropertyId> iter = c.iterator(); iter.hasNext(); ) {
        PropertyId propId = iter.next();
        // propertyId
        out.writeUTF(propId.toString());
    }
}

97. OldDataOutputStreamTest#test_flush()

Project: j2objc
File: OldDataOutputStreamTest.java
public void test_flush() throws IOException {
    BufferedOutputStream buf = new BufferedOutputStream(bos);
    os = new DataOutputStream(buf);
    os.writeInt(9087589);
    assertTrue("Test 1: Written data should not be available.", bos.toByteArray().length == 0);
    os.flush();
    assertTrue("Test 2: Written data should be available.", bos.toByteArray().length > 0);
    os.close();
    openDataInputStream();
    int c = dis.readInt();
    assertEquals("Test 3: Failed to flush correctly;", 9087589, c);
    dis.close();
    os = new DataOutputStream(sos);
    try {
        os.flush();
        fail("Test 4: IOException expected.");
    } catch (IOException e) {
    }
}

98. PythonLanguageLevelPusher#persistAttribute()

Project: intellij-community
File: PythonLanguageLevelPusher.java
public void persistAttribute(@NotNull Project project, @NotNull VirtualFile fileOrDir, @NotNull LanguageLevel level) throws IOException {
    final DataInputStream iStream = PERSISTENCE.readAttribute(fileOrDir);
    if (iStream != null) {
        try {
            final int oldLevelOrdinal = DataInputOutputUtil.readINT(iStream);
            if (oldLevelOrdinal == level.ordinal())
                return;
        } finally {
            iStream.close();
        }
    }
    final DataOutputStream oStream = PERSISTENCE.writeAttribute(fileOrDir);
    DataInputOutputUtil.writeINT(oStream, level.ordinal());
    oStream.close();
    for (VirtualFile child : fileOrDir.getChildren()) {
        final FileType fileType = FileTypeRegistry.getInstance().getFileTypeByFileName(child.getName());
        if (!child.isDirectory() && PythonFileType.INSTANCE.equals(fileType)) {
            clearSdkPathCache(child);
            PushedFilePropertiesUpdater.getInstance(project).filePropertiesChanged(child);
        }
    }
}

99. StorageTest#testAppender()

Project: intellij-community
File: StorageTest.java
public void testAppender() throws Exception {
    final int r = myStorage.createNewRecord();
    DataOutputStream out = new DataOutputStream(myStorage.appendStream(r));
    for (int i = 0; i < 10000; i++) {
        out.writeInt(i);
        if (i % 100 == 0) {
            // Drop the appenders cache
            myStorage.readStream(r);
            out.close();
            out = new DataOutputStream(myStorage.appendStream(r));
        }
    }
    out.close();
    DataInputStream in = new DataInputStream(myStorage.readStream(r));
    for (int i = 0; i < 10000; i++) {
        assertEquals(i, in.readInt());
    }
    in.close();
}

100. JavaLanguageLevelPusher#persistAttribute()

Project: intellij-community
File: JavaLanguageLevelPusher.java
@Override
public void persistAttribute(@NotNull Project project, @NotNull VirtualFile fileOrDir, @NotNull LanguageLevel level) throws IOException {
    final DataInputStream iStream = PERSISTENCE.readAttribute(fileOrDir);
    if (iStream != null) {
        try {
            final int oldLevelOrdinal = DataInputOutputUtil.readINT(iStream);
            if (oldLevelOrdinal == level.ordinal())
                return;
        } finally {
            iStream.close();
        }
    }
    final DataOutputStream oStream = PERSISTENCE.writeAttribute(fileOrDir);
    DataInputOutputUtil.writeINT(oStream, level.ordinal());
    oStream.close();
    for (VirtualFile child : fileOrDir.getChildren()) {
        if (!child.isDirectory() && StdFileTypes.JAVA.equals(child.getFileType())) {
            PushedFilePropertiesUpdater.getInstance(project).filePropertiesChanged(child);
        }
    }
}