Here are the examples of the java api class java.io.DataInput taken from open source projects.
1. DBNParameterVector#Deserialize()
Project: Metronome
File: DBNParameterVector.java
File: DBNParameterVector.java
/** * We're just saving the bytes ref locally because we'll let the DBN * deserialize later on its own time. * * @param bytes * @throws IOException */ public void Deserialize(byte[] bytes) throws IOException { // DataInput in) throws IOException { ByteArrayInputStream b = new ByteArrayInputStream(bytes); DataInput in = new DataInputStream(b); this.iteration = in.readInt(); this.datasetPassComplete = in.readBoolean(); this.preTrainPhaseComplete = in.readBoolean(); this.masterSignalToStartFineTunePhase = in.readBoolean(); this.masterSignalToStartNextDatasetPass = in.readBoolean(); int bytesToRead = in.readInt(); this.dbn_payload = new byte[bytesToRead]; in.readFully(this.dbn_payload, 0, bytesToRead); // this.dbn.load(b); //this.dbn_payload = bytes; }
2. LittleEndianDataOutputStreamTest#testWriteBytes_discardHighOrderBytes()
Project: guava
File: LittleEndianDataOutputStreamTest.java
File: LittleEndianDataOutputStreamTest.java
// testing a deprecated method @SuppressWarnings("deprecation") public void testWriteBytes_discardHighOrderBytes() throws IOException { /* Write out various test values in LITTLE ENDIAN FORMAT */ out.writeBytes("???"); byte[] data = baos.toByteArray(); /* Setup input streams */ DataInput in = new DataInputStream(new ByteArrayInputStream(data)); /* Read in various values NORMALLY */ byte[] b = new byte[3]; in.readFully(b); byte[] expected = { (byte) 0xAA, (byte) 0xBB, (byte) 0xCC }; assertEquals(expected, b); }
3. LittleEndianDataOutputStreamTest#testWriteBytes()
Project: guava
File: LittleEndianDataOutputStreamTest.java
File: LittleEndianDataOutputStreamTest.java
// testing a deprecated method @SuppressWarnings("deprecation") public void testWriteBytes() throws IOException { /* Write out various test values in LITTLE ENDIAN FORMAT */ out.writeBytes("rÉsumÉ"); byte[] data = baos.toByteArray(); /* Setup input streams */ DataInput in = new DataInputStream(new ByteArrayInputStream(data)); /* Read in various values NORMALLY */ byte[] b = new byte[6]; in.readFully(b); assertEquals("rÉsumÉ".getBytes(Charsets.ISO_8859_1), b); }
4. LittleEndianDataInputStreamTest#testSkipBytes()
Project: guava
File: LittleEndianDataInputStreamTest.java
File: LittleEndianDataInputStreamTest.java
public void testSkipBytes() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos); /* Write out various test values NORMALLY */ // 10 bytes of junk to skip out.write(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }); initializeData(out); byte[] data = baos.toByteArray(); DataInput in = new LittleEndianDataInputStream(new ByteArrayInputStream(data)); int bytesSkipped = 0; while (bytesSkipped < 10) { bytesSkipped += in.skipBytes(10 - bytesSkipped); } /* Read in various values in LITTLE ENDIAN FORMAT */ byte[] b = new byte[2]; in.readFully(b); assertEquals(-100, b[0]); assertEquals(100, b[1]); assertTrue(in.readBoolean()); assertFalse(in.readBoolean()); }
5. LittleEndianDataInputStreamTest#testReadLittleEndian()
Project: guava
File: LittleEndianDataInputStreamTest.java
File: LittleEndianDataInputStreamTest.java
public void testReadLittleEndian() throws IOException { DataInput in = new LittleEndianDataInputStream(new ByteArrayInputStream(data)); /* Read in various values in LITTLE ENDIAN FORMAT */ byte[] b = new byte[2]; in.readFully(b); assertEquals(-100, b[0]); assertEquals(100, b[1]); assertEquals(true, in.readBoolean()); assertEquals(false, in.readBoolean()); assertEquals(100, in.readByte()); assertEquals(-100, in.readByte()); assertEquals(200, in.readUnsignedByte()); assertEquals('?', in.readChar()); assertEquals(-12150, in.readShort()); assertEquals(20675, in.readUnsignedShort()); assertEquals(0xBEBAFECA, in.readInt()); assertEquals(0xBEBAFECAEFBEADDEL, in.readLong()); assertEquals("Herby Derby", in.readUTF()); assertEquals(0xBEBAFECA, Float.floatToIntBits(in.readFloat())); assertEquals(0xBEBAFECAEFBEADDEL, Double.doubleToLongBits(in.readDouble())); }
6. DataInputUtilTest#testSkipFully()
Project: derby
File: DataInputUtilTest.java
File: DataInputUtilTest.java
public void testSkipFully() throws IOException { int length = 1024; DataInput di = new DataInputStream(new ByteArrayInputStream(new byte[length])); DataInputUtil.skipFully(di, length); try { di.readByte(); fail("Should have met EOF!"); } catch (EOFException e) { assertTrue(true); } di = new DataInputStream(new ByteArrayInputStream(new byte[length])); DataInputUtil.skipFully(di, length - 1); di.readByte(); try { di.readByte(); fail("Should have met EOF!"); } catch (EOFException e) { assertTrue(true); } }
7. TransactionEditTest#verifyDecodingSupportsOlderVersion()
Project: tephra
File: TransactionEditTest.java
File: TransactionEditTest.java
@SuppressWarnings("deprecation") private void verifyDecodingSupportsOlderVersion(TransactionEdit edit, TransactionEditCodecs.TransactionEditCodec olderCodec) throws IOException { // encoding with older version of codec ByteArrayDataOutput out = ByteStreams.newDataOutput(); TransactionEditCodecs.encode(edit, out, olderCodec); // decoding TransactionEdit decodedEdit = new TransactionEdit(); DataInput in = ByteStreams.newDataInput(out.toByteArray()); decodedEdit.readFields(in); Assert.assertEquals(edit, decodedEdit); }
8. DataOutputTest#testSequentialWriter()
Project: stratio-cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testSequentialWriter() throws IOException { File file = FileUtils.createTempFile("dataoutput", "test"); final SequentialWriter writer = new SequentialWriter(file, 32); DataOutputStreamAndChannel write = new DataOutputStreamAndChannel(writer, writer); DataInput canon = testWrite(write); write.flush(); write.close(); DataInputStream test = new DataInputStream(new FileInputStream(file)); testRead(test, canon); test.close(); Assert.assertTrue(file.delete()); }
9. StorageConnectorTest#testGetAsTemporaryByteBuffer()
Project: sis
File: StorageConnectorTest.java
File: StorageConnectorTest.java
/** * Tests the {@link StorageConnector#getStorageAs(Class)} method for the {@link ByteBuffer} type when * the buffer is only temporary. The difference between this test and {@link #testGetAsByteBuffer()} is * that the buffer created in this test will not be used for the "real" reading process in the data store. * Consequently, it should be a smaller, only temporary, buffer. * * @throws DataStoreException if an error occurred while using the storage connector. * @throws IOException if an error occurred while reading the test file. */ @Test @DependsOnMethod("testGetAsDataInputFromStream") public void testGetAsTemporaryByteBuffer() throws DataStoreException, IOException { StorageConnector connection = create(true); final DataInput in = ImageIO.createImageInputStream(connection.getStorage()); // Sanity check. assertNotNull("ImageIO.createImageInputStream(InputStream)", in); connection = new StorageConnector(in); assertSame(in, connection.getStorageAs(DataInput.class)); final ByteBuffer buffer = connection.getStorageAs(ByteBuffer.class); assertNotNull("getStorageAs(ByteBuffer.class)", buffer); assertEquals(StorageConnector.MINIMAL_BUFFER_SIZE, buffer.capacity()); assertEquals(MAGIC_NUMBER, buffer.getInt()); connection.closeAllExcept(null); }
10. StorageConnectorTest#testGetAsChannelDataInput()
Project: sis
File: StorageConnectorTest.java
File: StorageConnectorTest.java
/** * Tests the {@link StorageConnector#getStorageAs(Class)} method for the {@link ChannelDataInput} type. * The initial value should not be an instance of {@link ChannelImageInputStream} in order to avoid initializing * the Image I/O classes. However after a call to {@code getStorageAt(ChannelImageInputStream.class)}, the type * should have been promoted. * * @throws DataStoreException if an error occurred while using the storage connector. * @throws IOException if an error occurred while reading the test file. */ @Test public void testGetAsChannelDataInput() throws DataStoreException, IOException { final StorageConnector connection = create(true); final ChannelDataInput input = connection.getStorageAs(ChannelDataInput.class); assertFalse(input instanceof ChannelImageInputStream); assertEquals(MAGIC_NUMBER, input.buffer.getInt()); /* * Get as an image input stream and ensure that the cached value has been replaced. */ final DataInput stream = connection.getStorageAs(DataInput.class); assertInstanceOf("Needs the SIS implementation", ChannelImageInputStream.class, stream); assertNotSame("Expected a new instance.", input, stream); assertSame("Shall share the channel.", input.channel, ((ChannelDataInput) stream).channel); assertSame("Shall share the buffer.", input.buffer, ((ChannelDataInput) stream).buffer); assertSame("Cached valud shall have been replaced.", stream, connection.getStorageAs(ChannelDataInput.class)); connection.closeAllExcept(null); }
11. StorageConnectorTest#testGetAsDataInput()
Project: sis
File: StorageConnectorTest.java
File: StorageConnectorTest.java
/** * Implementation of {@link #testGetAsDataInputFromURL()} and {@link #testGetAsDataInputFromStream()}. */ private void testGetAsDataInput(final boolean asStream) throws DataStoreException, IOException { final StorageConnector connection = create(asStream); final DataInput input = connection.getStorageAs(DataInput.class); assertSame("Value shall be cached.", input, connection.getStorageAs(DataInput.class)); assertInstanceOf("Needs the SIS implementation", ChannelImageInputStream.class, input); assertSame("Instance shall be shared.", input, connection.getStorageAs(ChannelDataInput.class)); /* * Reads a single integer for checking that the stream is at the right position, then close the stream. * Since the file is a compiled Java class, the integer that we read shall be the Java magic number. */ final ReadableByteChannel channel = ((ChannelImageInputStream) input).channel; assertTrue("channel.isOpen()", channel.isOpen()); assertEquals(MAGIC_NUMBER, input.readInt()); connection.closeAllExcept(null); assertFalse("channel.isOpen()", channel.isOpen()); }
12. MetricTimeSeries#fromBytes()
Project: pinot
File: MetricTimeSeries.java
File: MetricTimeSeries.java
public static MetricTimeSeries fromBytes(byte[] buf, MetricSchema schema) throws IOException { MetricTimeSeries series = new MetricTimeSeries(schema); DataInput in = new DataInputStream(new ByteArrayInputStream(buf)); int numTimeWindows = in.readInt(); int bufferSize = in.readInt(); for (int i = 0; i < numTimeWindows; i++) { long timeWindow = in.readLong(); byte[] bytes = new byte[bufferSize]; in.readFully(bytes); series.timeseries.put(timeWindow, ByteBuffer.wrap(bytes)); } return series; }
13. DimensionKey#fromBytes()
Project: pinot
File: DimensionKey.java
File: DimensionKey.java
/** * @param bytes * @return * @throws IOException */ public static DimensionKey fromBytes(byte[] bytes) throws IOException { DataInput in = new DataInputStream(new ByteArrayInputStream(bytes)); // read the number of dimensions int size = in.readInt(); String[] dimensionValues = new String[size]; // values try { for (int i = 0; i < size; i++) { int length = in.readInt(); byte[] b = new byte[length]; in.readFully(b); dimensionValues[i] = new String(b, "UTF-8"); } } catch (Exception e) { LOGGER.info(Arrays.toString(bytes), e); throw new RuntimeException(e); } return new DimensionKey(dimensionValues); }
14. ParameterVector#Deserialize()
Project: Metronome
File: ParameterVector.java
File: ParameterVector.java
public void Deserialize(byte[] bytes) throws IOException { ByteArrayInputStream b = new ByteArrayInputStream(bytes); DataInput in = new DataInputStream(b); this.y_partial_sum = in.readDouble(); this.y_avg = in.readDouble(); this.SSyy_partial_sum = in.readDouble(); this.SSE_partial_sum = in.readDouble(); this.IterationComplete = in.readInt(); this.CurrentIteration = in.readInt(); this.batchTimeMS = in.readLong(); // d.writeInt(this.TrainedRecords); this.TrainedRecords = in.readInt(); // d.writeFloat(this.PercentCorrect); this.AvgError = in.readFloat(); this.parameter_vector = MatrixWritable.readMatrix(in); }
15. ParameterVector#Deserialize()
Project: Metronome
File: ParameterVector.java
File: ParameterVector.java
public void Deserialize(byte[] bytes) throws IOException { // DataInput in) throws IOException { ByteArrayInputStream b = new ByteArrayInputStream(bytes); DataInput in = new DataInputStream(b); // this.src_host = in.readUTF(); this.SrcWorkerPassCount = in.readInt(); this.GlobalPassCount = in.readInt(); this.IterationComplete = in.readInt(); this.CurrentIteration = in.readInt(); // d.writeInt(this.TrainedRecords); this.TrainedRecords = in.readInt(); // d.writeFloat(this.AvgLogLikelihood); this.AvgLogLikelihood = in.readFloat(); // d.writeFloat(this.PercentCorrect); this.PercentCorrect = in.readFloat(); this.parameter_vector = MatrixWritable.readMatrix(in); }
16. BPING#run()
Project: JGroups
File: BPING.java
File: BPING.java
public void run() { final byte[] receive_buf = new byte[65535]; DatagramPacket packet = new DatagramPacket(receive_buf, receive_buf.length); DataInput inp; while (sock != null && receiver != null && Thread.currentThread().equals(receiver)) { packet.setData(receive_buf, 0, receive_buf.length); try { sock.receive(packet); inp = new ByteArrayDataInputStream(packet.getData(), packet.getOffset(), packet.getLength()); Message msg = new Message(); msg.readFrom(inp); up(new Event(Event.MSG, msg)); } catch (SocketException socketEx) { break; } catch (Throwable ex) { log.error(Util.getMessage("FailedReceivingPacketFrom"), packet.getSocketAddress(), ex); } } if (log.isTraceEnabled()) log.trace("receiver thread terminated"); }
17. TransactionEditTest#verifyDecodingSupportsOlderVersion()
Project: incubator-tephra
File: TransactionEditTest.java
File: TransactionEditTest.java
@SuppressWarnings("deprecation") private void verifyDecodingSupportsOlderVersion(TransactionEdit edit, TransactionEditCodecs.TransactionEditCodec olderCodec) throws IOException { // encoding with older version of codec ByteArrayDataOutput out = ByteStreams.newDataOutput(); TransactionEditCodecs.encode(edit, out, olderCodec); // decoding TransactionEdit decodedEdit = new TransactionEdit(); DataInput in = ByteStreams.newDataInput(out.toByteArray()); decodedEdit.readFields(in); Assert.assertEquals(edit, decodedEdit); }
18. RowMatrixBlock#readExternal()
Project: incubator-systemml
File: RowMatrixBlock.java
File: RowMatrixBlock.java
/** * Redirects the default java serialization via externalizable to our default * hadoop writable serialization for efficient deserialization. * * @param is * @throws IOException */ public void readExternal(ObjectInput is) throws IOException { DataInput dis = is; if (is instanceof ObjectInputStream) { //fast deserialize of dense/sparse blocks ObjectInputStream ois = (ObjectInputStream) is; dis = new FastBufferedDataInputStream(ois); } readHeaderAndPayload(dis); }
19. PartitionedBlock#readExternal()
Project: incubator-systemml
File: PartitionedBlock.java
File: PartitionedBlock.java
/** * Redirects the default java serialization via externalizable to our default * hadoop writable serialization for efficient broadcast deserialization. * * @param is * @throws IOException */ public void readExternal(ObjectInput is) throws IOException { DataInput dis = is; if (is instanceof ObjectInputStream) { //fast deserialize of dense/sparse blocks ObjectInputStream ois = (ObjectInputStream) is; dis = new FastBufferedDataInputStream(ois); } readHeaderAndPayload(dis); }
20. CorrMatrixBlock#readExternal()
Project: incubator-systemml
File: CorrMatrixBlock.java
File: CorrMatrixBlock.java
/** * Redirects the default java serialization via externalizable to our default * hadoop writable serialization for efficient deserialization. * * @param is * @throws IOException */ public void readExternal(ObjectInput is) throws IOException { DataInput dis = is; if (is instanceof ObjectInputStream) { //fast deserialize of dense/sparse blocks ObjectInputStream ois = (ObjectInputStream) is; dis = new FastBufferedDataInputStream(ois); } readHeaderAndPayload(dis); }
21. HadoopSerializationWrapperSelfTest#testIntJavaSerialization()
Project: ignite
File: HadoopSerializationWrapperSelfTest.java
File: HadoopSerializationWrapperSelfTest.java
/** * Tests read/write of Integer via native JavaleSerialization. * @throws Exception If fails. */ public void testIntJavaSerialization() throws Exception { HadoopSerialization ser = new HadoopSerializationWrapper(new JavaSerialization(), Integer.class); ByteArrayOutputStream buf = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(buf); ser.write(out, 3); ser.write(out, -5); ser.close(); DataInput in = new DataInputStream(new ByteArrayInputStream(buf.toByteArray())); assertEquals(3, ((Integer) ser.read(in, null)).intValue()); assertEquals(-5, ((Integer) ser.read(in, null)).intValue()); }
22. HadoopSerializationWrapperSelfTest#testIntWritableSerialization()
Project: ignite
File: HadoopSerializationWrapperSelfTest.java
File: HadoopSerializationWrapperSelfTest.java
/** * Tests read/write of IntWritable via native WritableSerialization. * @throws Exception If fails. */ public void testIntWritableSerialization() throws Exception { HadoopSerialization ser = new HadoopSerializationWrapper(new WritableSerialization(), IntWritable.class); ByteArrayOutputStream buf = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(buf); ser.write(out, new IntWritable(3)); ser.write(out, new IntWritable(-5)); assertEquals("[0, 0, 0, 3, -1, -1, -1, -5]", Arrays.toString(buf.toByteArray())); DataInput in = new DataInputStream(new ByteArrayInputStream(buf.toByteArray())); assertEquals(3, ((IntWritable) ser.read(in, null)).get()); assertEquals(-5, ((IntWritable) ser.read(in, null)).get()); }
23. TestTypedBytesWritable#testIO()
Project: hadoop-mapreduce
File: TestTypedBytesWritable.java
File: TestTypedBytesWritable.java
public void testIO() throws IOException { TypedBytesWritable tbw = new TypedBytesWritable(); tbw.setValue(12345); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput dout = new DataOutputStream(baos); tbw.write(dout); ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DataInput din = new DataInputStream(bais); TypedBytesWritable readTbw = new TypedBytesWritable(); readTbw.readFields(din); assertEquals(tbw, readTbw); }
24. TestTypedBytesWritable#testIO()
Project: hadoop-common
File: TestTypedBytesWritable.java
File: TestTypedBytesWritable.java
public void testIO() throws IOException { TypedBytesWritable tbw = new TypedBytesWritable(); tbw.setValue(12345); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput dout = new DataOutputStream(baos); tbw.write(dout); ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DataInput din = new DataInputStream(bais); TypedBytesWritable readTbw = new TypedBytesWritable(); readTbw.readFields(din); assertEquals(tbw, readTbw); }
25. TestWritableUtils#testWritesReads()
Project: gora
File: TestWritableUtils.java
File: TestWritableUtils.java
@Test public void testWritesReads() throws Exception { Properties props = new Properties(); props.put("keyBlah", "valueBlah"); props.put("keyBlah2", "valueBlah2"); ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(bos); WritableUtils.writeProperties(out, props); ((DataOutputStream) out).flush(); DataInput in = new DataInputStream(new ByteArrayInputStream(bos.toByteArray())); Properties propsRead = WritableUtils.readProperties(in); assertEquals(propsRead.get("keyBlah"), props.get("keyBlah")); assertEquals(propsRead.get("keyBlah2"), props.get("keyBlah2")); }
26. ZookeeperUtils#byteArrayToWritable()
Project: goldenorb
File: ZookeeperUtils.java
File: ZookeeperUtils.java
/** * * @param byteArray * - byte[] * @param writableClass * - Class <? extends Writable> * @param orbConf * - OrbConfiguration * @returns Writable */ public static Writable byteArrayToWritable(byte[] byteArray, Class<? extends Writable> writableClass, OrbConfiguration orbConf) { ByteArrayInputStream bais = new ByteArrayInputStream(byteArray); DataInput in = new DataInputStream(bais); Writable w = (Writable) ReflectionUtils.newInstance(writableClass, orbConf); try { w.readFields(in); } catch (IOException e) { throw new RuntimeException(e); } return w; }
27. DataStreamSerializationFactoryV1#read()
Project: geogig
File: DataStreamSerializationFactoryV1.java
File: DataStreamSerializationFactoryV1.java
@Override public RevObject read(ObjectId id, InputStream rawData) throws IOException { DataInput in = new DataInputStream(rawData); String header = readToMarker(in, NUL); if ("commit".equals(header)) return readCommit(id, in); else if ("tree".equals(header)) return readTree(id, in); else if ("feature".equals(header)) return readFeature(id, in); else if ("featuretype".equals(header)) return readFeatureType(id, in); else if ("tag".equals(header)) return readTag(id, in); else throw new IllegalArgumentException("Unrecognized object header: " + header); }
28. UnbatchingDecorator#append()
Project: flume
File: UnbatchingDecorator.java
File: UnbatchingDecorator.java
/** * if it is not a batch event, pass it through, otherwise, unbatch and pass * through the events. */ @Override public void append(Event e) throws IOException, InterruptedException { if (!BatchingDecorator.isBatch(e)) { super.append(e); passthrough.incrementAndGet(); return; } int sz = ByteBuffer.wrap(e.get(BatchingDecorator.BATCH_SIZE)).getInt(); byte[] data = e.get(BatchingDecorator.BATCH_DATA); DataInput in = new DataInputStream(new ByteArrayInputStream(data)); batchCnt.incrementAndGet(); for (int i = 0; i < sz; i++) { WriteableEvent we = new WriteableEvent(); we.readFields(in); super.append(we); unbatchedCnt.incrementAndGet(); } }
29. FileMetadataSerializerTest#readsMultivaluedPropertiesAndJoinsValuesWithComma()
Project: che
File: FileMetadataSerializerTest.java
File: FileMetadataSerializerTest.java
@Test public void readsMultivaluedPropertiesAndJoinsValuesWithComma() throws Exception { DataInput data = mock(DataInput.class); when(data.readInt()).thenReturn(2, 3, 3); when(data.readUTF()).thenReturn("a", "x", "y", "z", "b", "z", "y", "x"); Map<String, String> expected = ImmutableMap.of("a", "x,y,z", "b", "z,y,x"); assertEquals(expected, metadataSerializer.read(data)); }
30. FileMetadataSerializerTest#readsProperties()
Project: che
File: FileMetadataSerializerTest.java
File: FileMetadataSerializerTest.java
@Test public void readsProperties() throws Exception { DataInput data = mock(DataInput.class); when(data.readInt()).thenReturn(2, 1, 1); when(data.readUTF()).thenReturn("a", "x", "b", "z"); Map<String, String> expected = ImmutableMap.of("a", "x", "b", "z"); assertEquals(expected, metadataSerializer.read(data)); }
31. FileLockSerializerTest#readsLockObjectWithExpirationData()
Project: che
File: FileLockSerializerTest.java
File: FileLockSerializerTest.java
@Test public void readsLockObjectWithExpirationData() throws Exception { String token = Long.toString(System.currentTimeMillis()); long expired = System.currentTimeMillis() + 10000; DataInput data = mock(DataInput.class); when(data.readUTF()).thenReturn(token); when(data.readLong()).thenReturn(expired); FileLock lock = lockSerializer.read(data); assertEquals(new FileLock(token, expired), lock); }
32. SparkBatchSinkFactory#deserialize()
Project: cdap
File: SparkBatchSinkFactory.java
File: SparkBatchSinkFactory.java
static SparkBatchSinkFactory deserialize(InputStream inputStream) throws IOException { DataInput input = new DataInputStream(inputStream); Map<String, OutputFormatProvider> outputFormatProviders = Serializations.deserializeMap(input, new Serializations.ObjectReader<OutputFormatProvider>() { @Override public OutputFormatProvider read(DataInput input) throws IOException { return new BasicOutputFormatProvider(input.readUTF(), Serializations.deserializeMap(input, Serializations.createStringObjectReader())); } }); Map<String, DatasetInfo> datasetInfos = Serializations.deserializeMap(input, new Serializations.ObjectReader<DatasetInfo>() { @Override public DatasetInfo read(DataInput input) throws IOException { return DatasetInfo.deserialize(input); } }); Map<String, Set<String>> sinkOutputs = Serializations.deserializeMap(input, Serializations.createStringSetObjectReader()); return new SparkBatchSinkFactory(outputFormatProviders, datasetInfos, sinkOutputs); }
33. DataOutputTest#testSequentialWriter()
Project: cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testSequentialWriter() throws IOException { File file = FileUtils.createTempFile("dataoutput", "test"); SequentialWriterOption option = SequentialWriterOption.newBuilder().bufferSize(32).finishOnClose(true).build(); final SequentialWriter writer = new SequentialWriter(file, option); DataOutputStreamPlus write = new WrappedDataOutputStreamPlus(writer); DataInput canon = testWrite(write); write.flush(); write.close(); DataInputStream test = new DataInputStream(new FileInputStream(file)); testRead(test, canon); test.close(); Assert.assertTrue(file.delete()); }
34. NamedColorProfileParser#readNamedColors()
Project: xml-graphics-commons
File: NamedColorProfileParser.java
File: NamedColorProfileParser.java
private NamedColorSpace[] readNamedColors(ICC_Profile profile, String profileName, String profileURI) throws IOException { byte[] tag = profile.getData(ICC_Profile.icSigNamedColor2Tag); DataInput din = new DataInputStream(new ByteArrayInputStream(tag)); int sig = din.readInt(); if (sig != NCL2) { throw new UnsupportedOperationException("Unsupported structure type: " + toSignatureString(sig) + ". Expected " + toSignatureString(NCL2)); } din.skipBytes(8); int numColors = din.readInt(); NamedColorSpace[] result = new NamedColorSpace[numColors]; int numDeviceCoord = din.readInt(); String prefix = readAscii(din, 32); String suffix = readAscii(din, 32); for (int i = 0; i < numColors; i++) { String name = prefix + readAscii(din, 32) + suffix; int[] pcs = readUInt16Array(din, 3); float[] colorvalue = new float[3]; for (int j = 0; j < pcs.length; j++) { colorvalue[j] = ((float) pcs[j]) / 0x8000; } //device coordinates are ignored for now /*int[] deviceCoord =*/ readUInt16Array(din, numDeviceCoord); switch(profile.getPCSType()) { case ColorSpace.TYPE_XYZ: result[i] = new NamedColorSpace(name, colorvalue, profileName, profileURI); break; case ColorSpace.TYPE_Lab: //Not sure if this always D50 here, //but the illuminant in the header is fixed to D50. CIELabColorSpace labCS = ColorSpaces.getCIELabColorSpaceD50(); result[i] = new NamedColorSpace(name, labCS.toColor(colorvalue, 1.0f), profileName, profileURI); break; default: throw new UnsupportedOperationException("PCS type is not supported: " + profile.getPCSType()); } } return result; }
35. DataOutputTest#testSafeMemoryWriter()
Project: stratio-cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testSafeMemoryWriter() throws IOException { SafeMemoryWriter write = new SafeMemoryWriter(10); DataInput canon = testWrite(write); byte[] bytes = new byte[345]; write.currentBuffer().getBytes(0, bytes, 0, 345); DataInput test = new DataInputStream(new ByteArrayInputStream(bytes)); testRead(test, canon); }
36. DataOutputTest#testDataOutputHeapByteBuffer()
Project: stratio-cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testDataOutputHeapByteBuffer() throws IOException { ByteBuffer buf = wrap(new byte[345], false); DataOutputByteBuffer write = new DataOutputByteBuffer(buf.duplicate()); DataInput canon = testWrite(write); DataInput test = new DataInputStream(new ByteArrayInputStream(ByteBufferUtil.getArray(buf))); testRead(test, canon); }
37. DataOutputTest#testDataOutputDirectByteBuffer()
Project: stratio-cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testDataOutputDirectByteBuffer() throws IOException { ByteBuffer buf = wrap(new byte[345], true); DataOutputByteBuffer write = new DataOutputByteBuffer(buf.duplicate()); DataInput canon = testWrite(write); DataInput test = new DataInputStream(new ByteArrayInputStream(ByteBufferUtil.getArray(buf))); testRead(test, canon); }
38. DataOutputTest#testDataOutputBuffer()
Project: stratio-cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testDataOutputBuffer() throws IOException { DataOutputBuffer write = new DataOutputBuffer(); DataInput canon = testWrite(write); DataInput test = new DataInputStream(new ByteArrayInputStream(write.toByteArray())); testRead(test, canon); }
39. DataOutputTest#testDataOutputChannelAndChannel()
Project: stratio-cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testDataOutputChannelAndChannel() throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStreamPlus write = new DataOutputStreamAndChannel(Channels.newChannel(bos)); DataInput canon = testWrite(write); DataInput test = new DataInputStream(new ByteArrayInputStream(bos.toByteArray())); testRead(test, canon); }
40. DataOutputTest#testDataOutputStreamPlus()
Project: stratio-cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testDataOutputStreamPlus() throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStreamPlus write = new DataOutputStreamPlus(bos); DataInput canon = testWrite(write); DataInput test = new DataInputStream(new ByteArrayInputStream(bos.toByteArray())); testRead(test, canon); }
41. LittleEndianDataOutputStreamTest#testWriteChars()
Project: guava
File: LittleEndianDataOutputStreamTest.java
File: LittleEndianDataOutputStreamTest.java
public void testWriteChars() throws IOException { /* Write out various test values in LITTLE ENDIAN FORMAT */ out.writeChars("rÉsumÉ"); byte[] data = baos.toByteArray(); /* Setup input streams */ DataInput in = new DataInputStream(new ByteArrayInputStream(data)); /* Read in various values NORMALLY */ byte[] actual = new byte[12]; in.readFully(actual); assertEquals('r', actual[0]); assertEquals(0, actual[1]); assertEquals((byte) 0xC9, actual[2]); assertEquals(0, actual[3]); assertEquals('s', actual[4]); assertEquals(0, actual[5]); assertEquals('u', actual[6]); assertEquals(0, actual[7]); assertEquals('m', actual[8]); assertEquals(0, actual[9]); assertEquals((byte) 0xC9, actual[10]); assertEquals(0, actual[11]); }
42. LittleEndianDataOutputStreamTest#testWriteLittleEndian()
Project: guava
File: LittleEndianDataOutputStreamTest.java
File: LittleEndianDataOutputStreamTest.java
public void testWriteLittleEndian() throws IOException { /* Write out various test values in LITTLE ENDIAN FORMAT */ out.write(new byte[] { -100, 100 }); out.writeBoolean(true); out.writeBoolean(false); out.writeByte(100); out.writeByte(-100); out.writeByte((byte) 200); out.writeChar('a'); out.writeShort((short) -30000); out.writeShort((short) 50000); out.writeInt(0xCAFEBABE); out.writeLong(0xDEADBEEFCAFEBABEL); out.writeUTF("Herby Derby"); out.writeFloat(Float.intBitsToFloat(0xCAFEBABE)); out.writeDouble(Double.longBitsToDouble(0xDEADBEEFCAFEBABEL)); byte[] data = baos.toByteArray(); /* Setup input streams */ DataInput in = new DataInputStream(new ByteArrayInputStream(data)); /* Read in various values NORMALLY */ byte[] b = new byte[2]; in.readFully(b); assertEquals(-100, b[0]); assertEquals(100, b[1]); assertEquals(true, in.readBoolean()); assertEquals(false, in.readBoolean()); assertEquals(100, in.readByte()); assertEquals(-100, in.readByte()); assertEquals(200, in.readUnsignedByte()); assertEquals('?', in.readChar()); assertEquals(-12150, in.readShort()); assertEquals(20675, in.readUnsignedShort()); assertEquals(0xBEBAFECA, in.readInt()); assertEquals(0xBEBAFECAEFBEADDEL, in.readLong()); assertEquals("Herby Derby", in.readUTF()); assertEquals(0xBEBAFECA, Float.floatToIntBits(in.readFloat())); assertEquals(0xBEBAFECAEFBEADDEL, Double.doubleToLongBits(in.readDouble())); }
43. LittleEndianDataInputStreamTest#testReadFully()
Project: guava
File: LittleEndianDataInputStreamTest.java
File: LittleEndianDataInputStreamTest.java
public void testReadFully() throws IOException { DataInput in = new LittleEndianDataInputStream(new ByteArrayInputStream(data)); byte[] b = new byte[data.length]; in.readFully(b); assertEquals(Bytes.asList(data), Bytes.asList(b)); }
44. DataOutputTest#testDataOutputHeapByteBuffer()
Project: cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testDataOutputHeapByteBuffer() throws IOException { ByteBuffer buf = wrap(new byte[345], false); BufferedDataOutputStreamPlus write = new BufferedDataOutputStreamPlus(null, buf.duplicate()); DataInput canon = testWrite(write); DataInput test = new DataInputStream(new ByteArrayInputStream(ByteBufferUtil.getArray(buf))); testRead(test, canon); }
45. DataOutputTest#testDataOutputDirectByteBuffer()
Project: cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testDataOutputDirectByteBuffer() throws IOException { ByteBuffer buf = wrap(new byte[345], true); BufferedDataOutputStreamPlus write = new BufferedDataOutputStreamPlus(null, buf.duplicate()); DataInput canon = testWrite(write); DataInput test = new DataInputStream(new ByteArrayInputStream(ByteBufferUtil.getArray(buf))); testRead(test, canon); }
46. DataOutputTest#testDataOutputBuffer()
Project: cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testDataOutputBuffer() throws IOException { DataOutputBuffer write = new DataOutputBuffer(); DataInput canon = testWrite(write); DataInput test = new DataInputStream(new ByteArrayInputStream(write.toByteArray())); testRead(test, canon); }
47. DataOutputTest#testBufferedDataOutputStreamPlusAndChannel()
Project: cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testBufferedDataOutputStreamPlusAndChannel() throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStreamPlus write = new BufferedDataOutputStreamPlus(Channels.newChannel(bos)); DataInput canon = testWrite(write); write.close(); DataInput test = new DataInputStream(new ByteArrayInputStream(bos.toByteArray())); testRead(test, canon); }
48. DataOutputTest#testWrappedDataOutputChannelAndChannel()
Project: cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testWrappedDataOutputChannelAndChannel() throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStreamPlus write = new WrappedDataOutputStreamPlus(bos); DataInput canon = testWrite(write); DataInput test = new DataInputStream(new ByteArrayInputStream(bos.toByteArray())); testRead(test, canon); }
49. DataOutputTest#testWrappedDataOutputStreamPlus()
Project: cassandra
File: DataOutputTest.java
File: DataOutputTest.java
@Test public void testWrappedDataOutputStreamPlus() throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStreamPlus write = new WrappedDataOutputStreamPlus(bos); DataInput canon = testWrite(write); DataInput test = new DataInputStream(new ByteArrayInputStream(bos.toByteArray())); testRead(test, canon); }
50. NamedColorProfileParser#readSimpleString()
Project: xml-graphics-commons
File: NamedColorProfileParser.java
File: NamedColorProfileParser.java
private String readSimpleString(byte[] tag) throws IOException { DataInput din = new DataInputStream(new ByteArrayInputStream(tag)); int sig = din.readInt(); if (sig == MLUC) { return readMLUC(din); } else { //Unsupported tag structure type return null; } }
51. BinaryCasSerDes4#deserialize()
Project: uima-uimaj
File: BinaryCasSerDes4.java
File: BinaryCasSerDes4.java
public void deserialize(CASImpl cas, InputStream deserIn, boolean isDelta) throws IOException { DataInput in; if (deserIn instanceof DataInputStream) { in = (DataInputStream) deserIn; } else { in = new DataInputStream(deserIn); } Deserializer deserializer = new Deserializer(cas, in, isDelta); deserializer.deserialize(); }
52. TestTaskSpec#testSerDe()
Project: tez
File: TestTaskSpec.java
File: TestTaskSpec.java
@Test(timeout = 5000) public void testSerDe() throws IOException { ByteBuffer payload = null; ProcessorDescriptor procDesc = ProcessorDescriptor.create("proc").setUserPayload(UserPayload.create(payload)).setHistoryText("historyText"); List<InputSpec> inputSpecs = new ArrayList<>(); InputSpec inputSpec = new InputSpec("src1", InputDescriptor.create("inputClass"), 10); inputSpecs.add(inputSpec); List<OutputSpec> outputSpecs = new ArrayList<>(); OutputSpec outputSpec = new OutputSpec("dest1", OutputDescriptor.create("outputClass"), 999); outputSpecs.add(outputSpec); List<GroupInputSpec> groupInputSpecs = null; Configuration taskConf = new Configuration(false); taskConf.set("foo", "bar"); TezTaskAttemptID taId = TezTaskAttemptID.getInstance(TezTaskID.getInstance(TezVertexID.getInstance(TezDAGID.getInstance("1234", 1, 1), 1), 1), 1); TaskSpec taskSpec = new TaskSpec(taId, "dagName", "vName", -1, procDesc, inputSpecs, outputSpecs, groupInputSpecs, taskConf); ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(bos); taskSpec.write(out); TaskSpec deSerTaskSpec = new TaskSpec(); ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray()); DataInput in = new DataInputStream(bis); deSerTaskSpec.readFields(in); Assert.assertEquals(taskSpec.getDAGName(), deSerTaskSpec.getDAGName()); Assert.assertEquals(taskSpec.getVertexName(), deSerTaskSpec.getVertexName()); Assert.assertEquals(taskSpec.getVertexParallelism(), deSerTaskSpec.getVertexParallelism()); Assert.assertEquals(taskSpec.getInputs().size(), deSerTaskSpec.getInputs().size()); Assert.assertEquals(taskSpec.getOutputs().size(), deSerTaskSpec.getOutputs().size()); Assert.assertNull(deSerTaskSpec.getGroupInputs()); Assert.assertEquals(taskSpec.getInputs().get(0).getSourceVertexName(), deSerTaskSpec.getInputs().get(0).getSourceVertexName()); Assert.assertEquals(taskSpec.getOutputs().get(0).getDestinationVertexName(), deSerTaskSpec.getOutputs().get(0).getDestinationVertexName()); Assert.assertEquals(taskConf.get("foo"), deSerTaskSpec.getTaskConf().get("foo")); }
53. TransactionEditTest#assertSerializedEdit()
Project: tephra
File: TransactionEditTest.java
File: TransactionEditTest.java
private void assertSerializedEdit(TransactionEdit originalEdit) throws IOException { ByteArrayDataOutput out = ByteStreams.newDataOutput(); originalEdit.write(out); TransactionEdit decodedEdit = new TransactionEdit(); DataInput in = ByteStreams.newDataInput(out.toByteArray()); decodedEdit.readFields(in); Assert.assertEquals(originalEdit, decodedEdit); }
54. StorageConnectorTest#testCloseAllExcept()
Project: sis
File: StorageConnectorTest.java
File: StorageConnectorTest.java
/** * Tests the {@link StorageConnector#closeAllExcept(Object)} method. * * @throws DataStoreException if an error occurred while using the storage connector. * @throws IOException if an error occurred while reading the test file. */ @Test @DependsOnMethod("testGetAsDataInputFromStream") public void testCloseAllExcept() throws DataStoreException, IOException { final StorageConnector connection = create(true); final DataInput input = connection.getStorageAs(DataInput.class); final ReadableByteChannel channel = ((ChannelImageInputStream) input).channel; assertTrue("channel.isOpen()", channel.isOpen()); connection.closeAllExcept(input); assertTrue("channel.isOpen()", channel.isOpen()); channel.close(); }
55. ChannelDataInputTest#transferRandomData()
Project: sis
File: ChannelDataInputTest.java
File: ChannelDataInputTest.java
/** * Reads a random unit of data using a method selected randomly. * This method is invoked (indirectly) by {@link #testAllReadMethods()}. */ @Override final void transferRandomData(final int operation) throws IOException { final ChannelDataInput t = testedStream; final DataInput r = referenceStream; switch(operation) { default: throw new AssertionError(operation); case 0: assertEquals("readByte()", r.readByte(), t.readByte()); break; case 1: assertEquals("readShort()", r.readShort(), t.readShort()); break; case 2: assertEquals("readUnsignedShort()", r.readUnsignedShort(), t.readUnsignedShort()); break; case 3: assertEquals("readChar()", r.readChar(), t.readChar()); break; case 4: assertEquals("readInt()", r.readInt(), t.readInt()); break; case 5: assertEquals("readUnsignedInt()", r.readInt() & 0xFFFFFFFFL, t.readUnsignedInt()); break; case 6: assertEquals("readLong()", r.readLong(), t.readLong()); break; case 7: assertEquals("readFloat()", r.readFloat(), t.readFloat(), 0f); break; case 8: assertEquals("readDouble()", r.readDouble(), t.readDouble(), 0d); break; case 9: { final int n = random.nextInt(ARRAY_MAX_LENGTH); final byte[] tmp = new byte[n]; r.readFully(tmp); assertArrayEquals("readBytes(int)", tmp, t.readBytes(n)); break; } case 10: { final int n = random.nextInt(ARRAY_MAX_LENGTH / (Character.SIZE / Byte.SIZE)); final char[] tmp = new char[n]; for (int i = 0; i < n; i++) tmp[i] = r.readChar(); assertArrayEquals("readChars(int)", tmp, t.readChars(n)); break; } case 11: { final int n = random.nextInt(ARRAY_MAX_LENGTH / (Short.SIZE / Byte.SIZE)); final short[] tmp = new short[n]; for (int i = 0; i < n; i++) tmp[i] = r.readShort(); assertArrayEquals("readShorts(int)", tmp, t.readShorts(n)); break; } case 12: { final int n = random.nextInt(ARRAY_MAX_LENGTH / (Integer.SIZE / Byte.SIZE)); final int[] tmp = new int[n]; for (int i = 0; i < n; i++) tmp[i] = r.readInt(); assertArrayEquals("readInts(int)", tmp, t.readInts(n)); break; } case 13: { final int n = random.nextInt(ARRAY_MAX_LENGTH / (Long.SIZE / Byte.SIZE)); final long[] tmp = new long[n]; for (int i = 0; i < n; i++) tmp[i] = r.readLong(); assertArrayEquals("readLongs(int)", tmp, t.readLongs(n)); break; } case 14: { final int n = random.nextInt(ARRAY_MAX_LENGTH / (Float.SIZE / Byte.SIZE)); final float[] tmp = new float[n]; for (int i = 0; i < n; i++) tmp[i] = r.readFloat(); assertArrayEquals("readFloats(int)", tmp, t.readFloats(n), 0); break; } case 15: { final int n = random.nextInt(ARRAY_MAX_LENGTH / (Double.SIZE / Byte.SIZE)); final double[] tmp = new double[n]; for (int i = 0; i < n; i++) tmp[i] = r.readDouble(); assertArrayEquals("readDoubles(int)", tmp, t.readDoubles(n), 0); break; } } }
56. StorageConnector#createDataInput()
Project: sis
File: StorageConnector.java
File: StorageConnector.java
/** * Creates a view for the input as a {@link DataInput} if possible. This method performs the choice * documented in the {@link #getStorageAs(Class)} method for the {@code DataInput} case. Opening the * data input may imply creating a {@link ByteBuffer}, in which case the buffer will be stored under * the {@code ByteBuffer.class} key together with the {@code DataInput.class} case. * * @throws IOException if an error occurred while opening a stream for the input. */ private void createDataInput() throws IOException { final DataInput asDataInput; if (storage instanceof DataInput) { asDataInput = (DataInput) storage; } else { /* * Creates a ChannelImageInputStream instance. We really need that specific type because some * SIS data stores will want to access directly the channel and the buffer. We will fallback * on the ImageIO.createImageInputStream(Object) method only in last resort. */ if (!views.containsKey(ChannelDataInput.class)) { createChannelDataInput(true); } final ChannelDataInput c = getView(ChannelDataInput.class); if (c == null) { asDataInput = ImageIO.createImageInputStream(storage); addViewToClose(asDataInput, storage); } else if (c instanceof DataInput) { asDataInput = (DataInput) c; // No call to 'addViewToClose' because the instance already exists. } else { asDataInput = new ChannelImageInputStream(c); if (views.put(ChannelDataInput.class, asDataInput) != c) { // Replace the previous instance. throw new ConcurrentModificationException(); } addViewToClose(asDataInput, c.channel); } } addView(DataInput.class, asDataInput); }
57. TestDataModel#testReadWriteInternal()
Project: pig
File: TestDataModel.java
File: TestDataModel.java
@Test public void testReadWriteInternal() throws Exception { // Create a tuple with every internal data type in it, and then read and // write it, both via DataReaderWriter and Tuple.readFields TupleFactory tf = TupleFactory.getInstance(); Tuple t1 = tf.newTuple(1); InternalMap map = new InternalMap(2); map.put(new Integer(1), new String("world")); map.put(new Long(3L), new String("all")); t1.set(0, map); File file = File.createTempFile("Tuple", "put"); FileOutputStream fos = new FileOutputStream(file); DataOutput out = new DataOutputStream(fos); t1.write(out); fos.close(); FileInputStream fis = new FileInputStream(file); DataInput in = new DataInputStream(fis); Tuple after = tf.newTuple(); after.readFields(in); Object o = after.get(0); assertTrue("isa InternalMap", o instanceof InternalMap); InternalMap m = (InternalMap) o; assertEquals("world", (String) m.get(new Integer(1))); assertEquals("all", (String) m.get(new Long(3L))); assertNull(m.get("fred")); file.delete(); }
58. TestDataModel#testReadWrite()
Project: pig
File: TestDataModel.java
File: TestDataModel.java
@Test public void testReadWrite() throws Exception { // Create a tuple with every data type in it, and then read and // write it, both via DataReaderWriter and Tuple.readFields TupleFactory tf = TupleFactory.getInstance(); Tuple t1 = giveMeOneOfEach(); File file = File.createTempFile("Tuple", "put"); FileOutputStream fos = new FileOutputStream(file); DataOutput out = new DataOutputStream(fos); t1.write(out); // twice in a row on purpose t1.write(out); fos.close(); FileInputStream fis = new FileInputStream(file); DataInput in = new DataInputStream(fis); for (int i = 0; i < 2; i++) { Tuple after = tf.newTuple(); after.readFields(in); Object o = after.get(0); assertTrue("isa Tuple", o instanceof Tuple); Tuple t3 = (Tuple) o; o = t3.get(0); assertTrue("isa Integer", o instanceof Integer); assertEquals(new Integer(3), (Integer) o); o = t3.get(1); assertTrue("isa Float", o instanceof Float); assertEquals(new Float(3.0), (Float) o); o = after.get(1); assertTrue("isa Bag", o instanceof DataBag); DataBag b = (DataBag) o; Iterator<Tuple> j = b.iterator(); Tuple[] ts = new Tuple[2]; assertTrue("first tuple in bag", j.hasNext()); ts[0] = j.next(); assertTrue("second tuple in bag", j.hasNext()); ts[1] = j.next(); o = ts[0].get(0); assertTrue("isa Integer", o instanceof Integer); assertEquals(new Integer(4), (Integer) o); o = ts[1].get(0); assertTrue("isa String", o instanceof String); assertEquals("mary had a little lamb", (String) o); o = after.get(2); assertTrue("isa Map", o instanceof Map); Map<String, Object> m = (Map<String, Object>) o; assertEquals("world", (String) m.get("hello")); assertEquals("all", (String) m.get("goodbye")); assertNull(m.get("fred")); o = after.get(3); assertTrue("isa Integer", o instanceof Integer); Integer ii = (Integer) o; assertEquals(new Integer(42), ii); o = after.get(4); assertTrue("isa Long", o instanceof Long); Long l = (Long) o; assertEquals(new Long(5000000000L), l); o = after.get(5); assertTrue("isa Float", o instanceof Float); Float f = (Float) o; assertEquals(new Float(3.141592654), f); o = after.get(6); assertTrue("isa Double", o instanceof Double); Double d = (Double) o; assertEquals(new Double(2.99792458e8), d); o = after.get(7); assertTrue("isa Boolean", o instanceof Boolean); Boolean bool = (Boolean) o; assertTrue(bool); o = after.get(8); assertTrue("isa DataByteArray", o instanceof DataByteArray); DataByteArray ba = (DataByteArray) o; assertEquals(new DataByteArray("hello"), ba); o = after.get(9); assertTrue("isa String", o instanceof String); String s = (String) o; assertEquals("goodbye", s); } file.delete(); }
59. TestSchemaTuple#testSerDe()
Project: pig
File: TestSchemaTuple.java
File: TestSchemaTuple.java
public void testSerDe(SchemaTupleFactory tf) throws IOException { int sz = 4096; List<Tuple> written = new ArrayList<Tuple>(sz); File temp = File.createTempFile("tmp", "tmp"); temp.deleteOnExit(); FileOutputStream fos = new FileOutputStream(temp); DataOutput dos = new DataOutputStream(fos); for (int i = 0; i < sz; i++) { SchemaTuple<?> st = (SchemaTuple<?>) tf.newTuple(); fillWithData(st); bis.writeDatum(dos, st); written.add(st); } fos.close(); assertEquals(sz, written.size()); FileInputStream fis = new FileInputStream(temp); DataInput din = new DataInputStream(fis); for (int i = 0; i < sz; i++) { SchemaTuple<?> st = (SchemaTuple<?>) bis.readDatum(din); assertEquals(written.get(i), st); } fis.close(); }
60. HttpRequestImpl#readMessage()
Project: openejb
File: HttpRequestImpl.java
File: HttpRequestImpl.java
/*------------------------------------------------------------*/ /* Methods for reading in and parsing a request */ /*------------------------------------------------------------*/ /** parses the request into the 3 different parts, request, headers, and body * @param input the data input for this page * @throws IOException if an exception is thrown */ protected void readMessage(InputStream input) throws IOException { DataInput in = new DataInputStream(input); readRequestLine(in); readHeaders(in); readBody(in); }
61. HttpRequestImpl#readMessage()
Project: openejb
File: HttpRequestImpl.java
File: HttpRequestImpl.java
/*------------------------------------------------------------*/ /* Methods for reading in and parsing a request */ /*------------------------------------------------------------*/ /** * parses the request into the 3 different parts, request, headers, and body * * @param input the data input for this page * @throws java.io.IOException if an exception is thrown */ protected void readMessage(InputStream input) throws IOException { DataInput in = new DataInputStream(input); readRequestLine(in); readHeaders(in); readBody(in); parameters = new HashMap(); parameters.putAll(this.getFormParameters()); parameters.putAll(this.getQueryParameters()); }
62. NeuralNetworkWeightsDelta#Deserialize()
Project: Metronome
File: NeuralNetworkWeightsDelta.java
File: NeuralNetworkWeightsDelta.java
public void Deserialize(byte[] bytes) throws IOException { // DataInput in) throws IOException { ByteArrayInputStream b = new ByteArrayInputStream(bytes); DataInput in = new DataInputStream(b); // this.src_host = in.readUTF(); //this.SrcWorkerPassCount = in.readInt(); this.GlobalPassCount = in.readInt(); this.IterationComplete = in.readInt(); this.CurrentIteration = in.readInt(); // d.writeInt(this.TrainedRecords); this.TrainedRecords = in.readInt(); //this.AvgLogLikelihood = in.readFloat(); // d.writeFloat(this.AvgLogLikelihood); // d.writeFloat(this.PercentCorrect); this.PercentCorrect = in.readFloat(); this.RMSE = in.readDouble(); ObjectInputStream oistream = null; try { oistream = new ObjectInputStream(b); this.network = (NeuralNetwork) oistream.readObject(); } catch (IOException ioe) { ioe.printStackTrace(); } catch (ClassNotFoundException cnfe) { cnfe.printStackTrace(); } finally { if (oistream != null) { try { oistream.close(); } catch (IOException ioe) { } } } }
63. ValueSerializerTest#testValue()
Project: kairosdb
File: ValueSerializerTest.java
File: ValueSerializerTest.java
private void testValue(long value) throws IOException { KDataOutput bufOutput = new KDataOutput(); packLong(value, bufOutput); DataInput bufInput = KDataInput.createInput(bufOutput.getBytes()); long resp = unpackLong(bufInput); assertThat(resp, equalTo(value)); }
64. GMS#_readViewAndDigest()
Project: JGroups
File: GMS.java
File: GMS.java
public static Tuple<View, Digest> _readViewAndDigest(byte[] buffer, int offset, int length) throws Exception { if (buffer == null) return null; DataInput in = new ByteArrayDataInputStream(buffer, offset, length); View tmp_view = null; Digest digest = null; short flags = in.readShort(); if ((flags & VIEW_PRESENT) == VIEW_PRESENT) { tmp_view = (flags & MERGE_VIEW) == MERGE_VIEW ? new MergeView() : (flags & DELTA_VIEW) == DELTA_VIEW ? new DeltaView() : new View(); tmp_view.readFrom(in); } if ((flags & DIGEST_PRESENT) == DIGEST_PRESENT) { if ((flags & READ_ADDRS) == READ_ADDRS) { digest = new Digest(); digest.readFrom(in); } else { digest = new Digest(tmp_view.getMembersRaw()); digest.readFrom(in, false); } } return new Tuple<>(tmp_view, digest); }
65. TransactionEditTest#assertSerializedEdit()
Project: incubator-tephra
File: TransactionEditTest.java
File: TransactionEditTest.java
private void assertSerializedEdit(TransactionEdit originalEdit) throws IOException { ByteArrayDataOutput out = ByteStreams.newDataOutput(); originalEdit.write(out); TransactionEdit decodedEdit = new TransactionEdit(); DataInput in = ByteStreams.newDataInput(out.toByteArray()); decodedEdit.readFields(in); Assert.assertEquals(originalEdit, decodedEdit); }
66. HadoopV2JobSelfTest#testCustomSerializationApplying()
Project: ignite
File: HadoopV2JobSelfTest.java
File: HadoopV2JobSelfTest.java
/** * Tests that {@link HadoopJob} provides wrapped serializer if it's set in configuration. * * @throws IgniteCheckedException If fails. */ public void testCustomSerializationApplying() throws IgniteCheckedException { JobConf cfg = new JobConf(); cfg.setMapOutputKeyClass(IntWritable.class); cfg.setMapOutputValueClass(Text.class); cfg.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, CustomSerialization.class.getName()); HadoopDefaultJobInfo info = createJobInfo(cfg); final UUID uuid = UUID.randomUUID(); HadoopJobId id = new HadoopJobId(uuid, 1); HadoopJob job = info.createJob(HadoopV2Job.class, id, log, null); HadoopTaskContext taskCtx = job.getTaskContext(new HadoopTaskInfo(HadoopTaskType.MAP, null, 0, 0, null)); HadoopSerialization ser = taskCtx.keySerialization(); assertEquals(HadoopSerializationWrapper.class.getName(), ser.getClass().getName()); DataInput in = new DataInputStream(new ByteArrayInputStream(new byte[0])); assertEquals(TEST_SERIALIZED_VALUE, ser.read(in, null).toString()); ser = taskCtx.valueSerialization(); assertEquals(HadoopSerializationWrapper.class.getName(), ser.getClass().getName()); assertEquals(TEST_SERIALIZED_VALUE, ser.read(in, null).toString()); }
67. TestGet#testDynamicFilter()
Project: hindex
File: TestGet.java
File: TestGet.java
@Test public void testDynamicFilter() throws Exception { DataInput dis = ByteStreams.newDataInput(Base64.decode(WRITABLE_GET)); Get get = new Get(); try { get.readFields(dis); fail("Should not be able to load the filter class"); } catch (RuntimeException re) { String msg = re.getMessage(); Assert.assertTrue(msg != null && msg.contains("Can't find class test.MockFilter")); } Configuration conf = HBaseConfiguration.create(); String localPath = conf.get("hbase.local.dir") + File.separator + "jars" + File.separator; File jarFile = new File(localPath, "MockFilter.jar"); jarFile.deleteOnExit(); FileOutputStream fos = new FileOutputStream(jarFile); fos.write(Base64.decode(MOCK_FILTER_JAR)); fos.close(); dis = ByteStreams.newDataInput(Base64.decode(WRITABLE_GET)); get.readFields(dis); Assert.assertEquals("test.MockFilter", get.getFilter().getClass().getName()); }
68. HFilePrettyPrinter#printMeta()
Project: hindex
File: HFilePrettyPrinter.java
File: HFilePrettyPrinter.java
private void printMeta(HFile.Reader reader, Map<byte[], byte[]> fileInfo) throws IOException { System.out.println("Block index size as per heapsize: " + reader.indexSize()); System.out.println(asSeparateLines(reader.toString())); System.out.println("Trailer:\n " + asSeparateLines(reader.getTrailer().toString())); System.out.println("Fileinfo:"); for (Map.Entry<byte[], byte[]> e : fileInfo.entrySet()) { System.out.print(FOUR_SPACES + Bytes.toString(e.getKey()) + " = "); if (Bytes.compareTo(e.getKey(), Bytes.toBytes("MAX_SEQ_ID_KEY")) == 0) { long seqid = Bytes.toLong(e.getValue()); System.out.println(seqid); } else if (Bytes.compareTo(e.getKey(), Bytes.toBytes("TIMERANGE")) == 0) { TimeRangeTracker timeRangeTracker = new TimeRangeTracker(); Writables.copyWritable(e.getValue(), timeRangeTracker); System.out.println(timeRangeTracker.getMinimumTimestamp() + "...." + timeRangeTracker.getMaximumTimestamp()); } else if (Bytes.compareTo(e.getKey(), FileInfo.AVG_KEY_LEN) == 0 || Bytes.compareTo(e.getKey(), FileInfo.AVG_VALUE_LEN) == 0) { System.out.println(Bytes.toInt(e.getValue())); } else { System.out.println(Bytes.toStringBinary(e.getValue())); } } System.out.println("Mid-key: " + Bytes.toStringBinary(reader.midkey())); // Printing general bloom information DataInput bloomMeta = reader.getGeneralBloomFilterMetadata(); BloomFilter bloomFilter = null; if (bloomMeta != null) bloomFilter = BloomFilterFactory.createFromMeta(bloomMeta, reader); System.out.println("Bloom filter:"); if (bloomFilter != null) { System.out.println(FOUR_SPACES + bloomFilter.toString().replaceAll(ByteBloomFilter.STATS_RECORD_SEP, "\n" + FOUR_SPACES)); } else { System.out.println(FOUR_SPACES + "Not present"); } // Printing delete bloom information bloomMeta = reader.getDeleteBloomFilterMetadata(); bloomFilter = null; if (bloomMeta != null) bloomFilter = BloomFilterFactory.createFromMeta(bloomMeta, reader); System.out.println("Delete Family Bloom filter:"); if (bloomFilter != null) { System.out.println(FOUR_SPACES + bloomFilter.toString().replaceAll(ByteBloomFilter.STATS_RECORD_SEP, "\n" + FOUR_SPACES)); } else { System.out.println(FOUR_SPACES + "Not present"); } }
69. TestDefaultHCatRecord#testRYW()
Project: hcatalog
File: TestDefaultHCatRecord.java
File: TestDefaultHCatRecord.java
public void testRYW() throws IOException { File f = new File("binary.dat"); f.delete(); f.createNewFile(); f.deleteOnExit(); OutputStream fileOutStream = new FileOutputStream(f); DataOutput outStream = new DataOutputStream(fileOutStream); HCatRecord[] recs = getHCatRecords(); for (int i = 0; i < recs.length; i++) { recs[i].write(outStream); } fileOutStream.flush(); fileOutStream.close(); InputStream fInStream = new FileInputStream(f); DataInput inpStream = new DataInputStream(fInStream); for (int i = 0; i < recs.length; i++) { HCatRecord rec = new DefaultHCatRecord(); rec.readFields(inpStream); Assert.assertEquals(recs[i], rec); } Assert.assertEquals(fInStream.available(), 0); fInStream.close(); }
70. LittleEndianDataInputStreamTest#testReadLine()
Project: guava
File: LittleEndianDataInputStreamTest.java
File: LittleEndianDataInputStreamTest.java
public void testReadLine() throws IOException { DataInput in = new LittleEndianDataInputStream(new ByteArrayInputStream(data)); try { in.readLine(); fail(); } catch (UnsupportedOperationException expected) { assertThat(expected).hasMessage("readLine is not supported"); } }
71. LittleEndianDataInputStreamTest#testReadUnsignedShort_eof()
Project: guava
File: LittleEndianDataInputStreamTest.java
File: LittleEndianDataInputStreamTest.java
public void testReadUnsignedShort_eof() throws IOException { byte[] buf = { 23 }; DataInput in = new LittleEndianDataInputStream(new ByteArrayInputStream(buf)); try { in.readUnsignedShort(); fail(); } catch (EOFException expected) { } }
72. LittleEndianDataInputStreamTest#testReadUnsignedByte_eof()
Project: guava
File: LittleEndianDataInputStreamTest.java
File: LittleEndianDataInputStreamTest.java
public void testReadUnsignedByte_eof() throws IOException { DataInput in = new LittleEndianDataInputStream(new ByteArrayInputStream(new byte[0])); try { in.readUnsignedByte(); fail(); } catch (EOFException expected) { } }
73. PasscodeGenerator#hashToInt()
Project: google-authenticator-android
File: PasscodeGenerator.java
File: PasscodeGenerator.java
/** * Grabs a positive integer value from the input array starting at * the given offset. * @param bytes the array of bytes * @param start the index into the array to start grabbing bytes * @return the integer constructed from the four bytes in the array */ private int hashToInt(byte[] bytes, int start) { DataInput input = new DataInputStream(new ByteArrayInputStream(bytes, start, bytes.length - start)); int val; try { val = input.readInt(); } catch (IOException e) { throw new IllegalStateException(e); } return val; }
74. TestPartitionRequest#testPartitionRequest()
Project: goldenorb
File: TestPartitionRequest.java
File: TestPartitionRequest.java
/* * Start of user / non-generated code -- any code written outside of this block will be * removed in subsequent code generations. */ /* End of user / non-generated code */ @Before public void testPartitionRequest() throws IOException { /* * Start of user / non-generated code -- any code written outside of this block will be * removed in subsequent code generations. */ /* End of user / non-generated code */ partitionRequest = new PartitionRequest(); partitionRequest.setReservedPartitions(INT_RESERVEDPARTITIONS_VALUE); partitionRequest.setActivePartitions(INT_ACTIVEPARTITIONS_VALUE); partitionRequest.setJobID(STRING_JOBID_VALUE); partitionRequest.setBasePartitionID(INT_BASEPARTITIONID_VALUE); partitionRequest.setJobConf(new OrbConfiguration(true)); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(baos); partitionRequest.write(out); DataInput in = new DataInputStream(new ByteArrayInputStream(baos.toByteArray())); partitionRequestOut = new PartitionRequest(); partitionRequestOut.readFields(in); /* * Start of user / non-generated code -- any code written outside of this block will be * removed in subsequent code generations. */ /* End of user / non-generated code */ }
75. TestOrbTrackerMember#testOrbTrackerMember()
Project: goldenorb
File: TestOrbTrackerMember.java
File: TestOrbTrackerMember.java
/* * Start of user / non-generated code -- any code written outside of this block will be * removed in subsequent code generations. */ /* End of user / non-generated code */ @Before public void testOrbTrackerMember() throws IOException { /* * Start of user / non-generated code -- any code written outside of this block will be * removed in subsequent code generations. */ /* End of user / non-generated code */ orbTrackerMember = new OrbTrackerMember(); orbTrackerMember.setPartitionCapacity(INT_PARTITIONCAPACITY_VALUE); orbTrackerMember.setAvailablePartitions(INT_AVAILABLEPARTITIONS_VALUE); orbTrackerMember.setReservedPartitions(INT_RESERVEDPARTITIONS_VALUE); orbTrackerMember.setInUsePartitions(INT_INUSEPARTITIONS_VALUE); orbTrackerMember.setHostname(STRING_HOSTNAME_VALUE); orbTrackerMember.setLeader(BOOLEAN_LEADER_VALUE); orbTrackerMember.setPort(INT_PORT_VALUE); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(baos); orbTrackerMember.write(out); DataInput in = new DataInputStream(new ByteArrayInputStream(baos.toByteArray())); orbTrackerMemberOut = new OrbTrackerMember(); orbTrackerMemberOut.readFields(in); /* * Start of user / non-generated code -- any code written outside of this block will be * removed in subsequent code generations. */ /* End of user / non-generated code */ }
76. TestOrbPartitionMember#testOrbPartitionMember()
Project: goldenorb
File: TestOrbPartitionMember.java
File: TestOrbPartitionMember.java
/* * Start of user / non-generated code -- any code written outside of this block will be * removed in subsequent code generations. */ /* End of user / non-generated code */ @Before public void testOrbPartitionMember() throws IOException { /* * Start of user / non-generated code -- any code written outside of this block will be * removed in subsequent code generations. */ /* End of user / non-generated code */ orbPartitionMember = new OrbPartitionMember(); orbPartitionMember.setPartitionID(INT_PARTITIONID_VALUE); orbPartitionMember.setNumberOfVertices(INT_NUMBEROFVERTICES_VALUE); orbPartitionMember.setSuperStep(INT_SUPERSTEP_VALUE); orbPartitionMember.setMessagesSent(INT_MESSAGESSENT_VALUE); orbPartitionMember.setPercentComplete(FLOAT_PERCENTCOMPLETE_VALUE); orbPartitionMember.setHostname(STRING_HOSTNAME_VALUE); orbPartitionMember.setLeader(BOOLEAN_LEADER_VALUE); orbPartitionMember.setPort(INT_PORT_VALUE); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(baos); orbPartitionMember.write(out); DataInput in = new DataInputStream(new ByteArrayInputStream(baos.toByteArray())); orbPartitionMemberOut = new OrbPartitionMember(); orbPartitionMemberOut.readFields(in); /* * Start of user / non-generated code -- any code written outside of this block will be * removed in subsequent code generations. */ /* End of user / non-generated code */ }
77. ZookeeperUtils#byteArrayToWritable()
Project: goldenorb
File: ZookeeperUtils.java
File: ZookeeperUtils.java
/** * * @param byteArray * - byte[] * @param w * - Writable * @returns Writable */ public static Writable byteArrayToWritable(byte[] byteArray, Writable w) throws IOException { ByteArrayInputStream bais = new ByteArrayInputStream(byteArray); DataInput in = new DataInputStream(bais); w.readFields(in); return w; }
78. SendWorkerAggregatorsRequest#doRequest()
Project: giraph
File: SendWorkerAggregatorsRequest.java
File: SendWorkerAggregatorsRequest.java
@Override public void doRequest(ServerData serverData) { DataInput input = getDataInput(); OwnerAggregatorServerData aggregatorData = serverData.getOwnerAggregatorData(); try { int numAggregators = input.readInt(); for (int i = 0; i < numAggregators; i++) { String aggregatorName = input.readUTF(); if (aggregatorName.equals(AggregatorUtils.SPECIAL_COUNT_AGGREGATOR)) { LongWritable count = new LongWritable(0); count.readFields(input); aggregatorData.receivedRequestCountFromWorker(count.get(), getSenderTaskId()); } else { Writable aggregatedValue = aggregatorData.createAggregatorInitialValue(aggregatorName); aggregatedValue.readFields(input); aggregatorData.aggregate(aggregatorName, aggregatedValue); } } } catch (IOException e) { throw new IllegalStateException("doRequest: " + "IOException occurred while processing request", e); } aggregatorData.receivedRequestFromWorker(); }
79. SendAggregatorsToWorkerRequest#doRequest()
Project: giraph
File: SendAggregatorsToWorkerRequest.java
File: SendAggregatorsToWorkerRequest.java
@Override public void doRequest(ServerData serverData) { DataInput input = getDataInput(); AllAggregatorServerData aggregatorData = serverData.getAllAggregatorData(); try { int numAggregators = input.readInt(); for (int i = 0; i < numAggregators; i++) { String aggregatorName = input.readUTF(); String aggregatorClassName = input.readUTF(); if (aggregatorName.equals(AggregatorUtils.SPECIAL_COUNT_AGGREGATOR)) { LongWritable count = new LongWritable(0); count.readFields(input); aggregatorData.receivedRequestCountFromWorker(count.get(), getSenderTaskId()); } else { Class<Aggregator<Writable>> aggregatorClass = AggregatorUtils.getAggregatorClass(aggregatorClassName); aggregatorData.registerAggregatorClass(aggregatorName, aggregatorClass); Writable aggregatorValue = aggregatorData.createAggregatorInitialValue(aggregatorName); aggregatorValue.readFields(input); aggregatorData.setAggregatorValue(aggregatorName, aggregatorValue); } } } catch (IOException e) { throw new IllegalStateException("doRequest: " + "IOException occurred while processing request", e); } aggregatorData.receivedRequestFromWorker(); }
80. SendAggregatorsToOwnerRequest#doRequest()
Project: giraph
File: SendAggregatorsToOwnerRequest.java
File: SendAggregatorsToOwnerRequest.java
@Override public void doRequest(ServerData serverData) { DataInput input = getDataInput(); AllAggregatorServerData aggregatorData = serverData.getAllAggregatorData(); try { int numAggregators = input.readInt(); for (int i = 0; i < numAggregators; i++) { String aggregatorName = input.readUTF(); String aggregatorClassName = input.readUTF(); if (aggregatorName.equals(AggregatorUtils.SPECIAL_COUNT_AGGREGATOR)) { LongWritable count = new LongWritable(0); count.readFields(input); aggregatorData.receivedRequestCountFromMaster(count.get(), getSenderTaskId()); } else { Class<Aggregator<Writable>> aggregatorClass = AggregatorUtils.getAggregatorClass(aggregatorClassName); aggregatorData.registerAggregatorClass(aggregatorName, aggregatorClass); Writable aggregatorValue = aggregatorData.createAggregatorInitialValue(aggregatorName); aggregatorValue.readFields(input); aggregatorData.setAggregatorValue(aggregatorName, aggregatorValue); serverData.getOwnerAggregatorData().registerAggregator(aggregatorName, aggregatorClass); } } } catch (IOException e) { throw new IllegalStateException("doRequest: " + "IOException occurred while processing request", e); } aggregatorData.receivedRequestFromMaster(getData()); }
81. DataStreamSerializationFactoryV2#read()
Project: geogig
File: DataStreamSerializationFactoryV2.java
File: DataStreamSerializationFactoryV2.java
@Override public RevObject read(ObjectId id, InputStream rawData) throws IOException { DataInput in = new DataInputStream(rawData); final TYPE type = readHeader(in); Serializer<RevObject> serializer = DataStreamSerializationFactoryV2.serializer(type); RevObject object = serializer.readBody(id, in); return object; }
82. WriteableEvent#createWriteableEvent()
Project: flume
File: WriteableEvent.java
File: WriteableEvent.java
public static WriteableEvent createWriteableEvent(byte[] bytes) throws IOException { WriteableEvent we = new WriteableEvent(); DataInput in = new DataInputStream(new ByteArrayInputStream(bytes)); we.readFields(in); return we; }
83. WriteableEvent#create()
Project: flume
File: WriteableEvent.java
File: WriteableEvent.java
public static WriteableEvent create(byte[] raw) throws IOException { WriteableEvent e = new WriteableEvent(); DataInput in = new DataInputStream(new ByteArrayInputStream(raw)); e.readFields(in); return e; }
84. VertexToFaunusBinaryTest#testConversion()
Project: faunus
File: VertexToFaunusBinaryTest.java
File: VertexToFaunusBinaryTest.java
public void testConversion() throws IOException { Graph graph = new TinkerGraph(); Vertex marko = graph.addVertex(1); marko.setProperty("name", "marko"); marko.setProperty("age", 32); Vertex stephen = graph.addVertex(3); stephen.setProperty("name", "stephen"); stephen.setProperty("weight", 160.42); stephen.setProperty("male", true); Edge e = graph.addEdge(null, marko, stephen, "knows"); e.setProperty("weight", 0.2); e.setProperty("type", "coworker"); ByteArrayOutputStream bytes1 = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(bytes1); VertexToFaunusBinary.write(marko, out); VertexToFaunusBinary.write(stephen, out); DataInput in = new DataInputStream(new ByteArrayInputStream(bytes1.toByteArray())); FaunusVertex markoFaunus = new FaunusVertex(in); assertEquals(markoFaunus.getProperty("name"), "marko"); assertEquals(markoFaunus.getProperty("age"), 32); assertEquals(markoFaunus.getPropertyKeys().size(), 2); assertEquals(asList(markoFaunus.getEdges(Direction.OUT)).size(), 1); assertFalse(markoFaunus.getEdges(Direction.IN).iterator().hasNext()); assertTrue(markoFaunus.getEdges(Direction.OUT, "knows").iterator().hasNext()); assertFalse(markoFaunus.getEdges(Direction.OUT, "blah").iterator().hasNext()); FaunusEdge edge = (FaunusEdge) markoFaunus.getEdges(Direction.OUT).iterator().next(); assertEquals(edge.getLabel(), "knows"); assertEquals(edge.getProperty("weight"), 0.2); assertEquals(edge.getProperty("type"), "coworker"); assertEquals(edge.getPropertyKeys().size(), 2); assertEquals(edge.getVertex(Direction.IN).getId(), 3l); assertEquals(edge.getVertex(Direction.OUT).getId(), 1l); FaunusVertex stephenFaunus = new FaunusVertex(in); assertEquals(stephenFaunus.getProperty("name"), "stephen"); assertEquals(stephenFaunus.getProperty("weight"), 160.42); assertTrue((Boolean) stephenFaunus.getProperty("male")); assertEquals(stephenFaunus.getPropertyKeys().size(), 3); assertEquals(asList(stephenFaunus.getEdges(Direction.IN)).size(), 1); assertFalse(stephenFaunus.getEdges(Direction.OUT).iterator().hasNext()); assertTrue(stephenFaunus.getEdges(Direction.IN, "knows").iterator().hasNext()); assertFalse(stephenFaunus.getEdges(Direction.IN, "blah").iterator().hasNext()); edge = (FaunusEdge) stephenFaunus.getEdges(Direction.IN).iterator().next(); assertEquals(edge.getLabel(), "knows"); assertEquals(edge.getProperty("weight"), 0.2); assertEquals(edge.getProperty("type"), "coworker"); assertEquals(edge.getPropertyKeys().size(), 2); assertEquals(edge.getVertex(Direction.IN).getId(), 3l); assertEquals(edge.getVertex(Direction.OUT).getId(), 1l); }
85. DatasourceInputSplitTest#testSerde()
Project: druid
File: DatasourceInputSplitTest.java
File: DatasourceInputSplitTest.java
@Test public void testSerde() throws Exception { Interval interval = Interval.parse("2000/3000"); DatasourceInputSplit expected = new DatasourceInputSplit(Lists.newArrayList(new WindowedDataSegment(new DataSegment("test", Interval.parse("2000/3000"), "ver", ImmutableMap.<String, Object>of("type", "local", "path", "/tmp/index.zip"), ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts"), NoneShardSpec.instance(), 9, 12334), interval)), new String[] { "server1", "server2", "server3" }); ByteArrayDataOutput out = ByteStreams.newDataOutput(); expected.write(out); DataInput in = ByteStreams.newDataInput(out.toByteArray()); DatasourceInputSplit actual = new DatasourceInputSplit(); actual.readFields(in); Assert.assertEquals(expected.getSegments(), actual.getSegments()); Assert.assertArrayEquals(expected.getLocations(), actual.getLocations()); Assert.assertEquals(12334, actual.getLength()); }
86. RubixFile#getKeyData()
Project: Cubert
File: RubixFile.java
File: RubixFile.java
@SuppressWarnings("unchecked") public List<KeyData<K>> getKeyData() throws IOException, ClassNotFoundException { if (keyData != null) return keyData; final FileSystem fs = FileSystem.get(conf); keyData = new ArrayList<KeyData<K>>(); final long filesize = fs.getFileStatus(path).getLen(); FSDataInputStream in = fs.open(path); /* The last long in the file is the start position of the trailer section */ in.seek(filesize - 8); long metaDataStartPos = in.readLong(); in.seek(metaDataStartPos); ObjectMapper mapper = new ObjectMapper(); metadataJson = mapper.readValue(in.readUTF(), JsonNode.class); int keySectionSize = in.readInt(); // load the key section byte[] keySection = new byte[keySectionSize]; in.seek(filesize - keySectionSize - 8); in.read(keySection, 0, keySectionSize); in.close(); ByteArrayInputStream bis = new ByteArrayInputStream(keySection); DataInput dataInput = new DataInputStream(bis); int numberOfBlocks = metadataJson.get("numberOfBlocks").getIntValue(); // load the key section keyClass = (Class<K>) ClassCache.forName(JsonUtils.getText(metadataJson, "keyClass")); valueClass = (Class<V>) ClassCache.forName(JsonUtils.getText(metadataJson, "valueClass")); SerializationFactory serializationFactory = new SerializationFactory(conf); Deserializer<K> deserializer = serializationFactory.getDeserializer(keyClass); deserializer.open(bis); while (bis.available() > 0 && numberOfBlocks > 0) { K key = deserializer.deserialize(null); long offset = dataInput.readLong(); long blockId = dataInput.readLong(); long numRecords = dataInput.readLong(); keyData.add(new KeyData<K>(key, offset, 0, numRecords, blockId)); numberOfBlocks--; } // Assign length to each keydata entry int numEntries = keyData.size(); for (int i = 1; i < numEntries; i++) { KeyData<K> prev = keyData.get(i - 1); KeyData<K> current = keyData.get(i); prev.setLength(current.getOffset() - prev.getOffset()); } if (numEntries > 0) { KeyData<K> last = keyData.get(numEntries - 1); last.setLength(metaDataStartPos - last.offset); } return keyData; }
87. CommitTransactionPayload#getCandidate()
Project: controller
File: CommitTransactionPayload.java
File: CommitTransactionPayload.java
@Override public Entry<Optional<TransactionIdentifier>, DataTreeCandidate> getCandidate() throws IOException { final DataInput in = ByteStreams.newDataInput(serialized); return new SimpleImmutableEntry<>(Optional.of(TransactionIdentifier.readFrom(in)), DataTreeCandidateInputOutput.readDataTreeCandidate(in)); }
88. Stat#readFromDataInput()
Project: commons
File: Stat.java
File: Stat.java
/** * Read the data from the InputStream so it can be used to populate * the current objects state. * * @param in java.io.InputStream to write to. * @throws IOException */ public void readFromDataInput(InputStream in) throws IOException { DataInput di = new DataInputStream(in); readFromDataInput(di); return; }
89. FileLockSerializerTest#readsLockObjectWithoutExpirationData()
Project: che
File: FileLockSerializerTest.java
File: FileLockSerializerTest.java
@Test public void readsLockObjectWithoutExpirationData() throws Exception { String token = Long.toString(System.currentTimeMillis()); DataInput data = mock(DataInput.class); when(data.readUTF()).thenReturn(token); when(data.readLong()).thenThrow(new EOFException()); FileLock lock = lockSerializer.read(data); assertEquals(new FileLock(token, Long.MAX_VALUE), lock); }
90. SparkBatchSourceFactory#deserialize()
Project: cdap
File: SparkBatchSourceFactory.java
File: SparkBatchSourceFactory.java
static SparkBatchSourceFactory deserialize(InputStream inputStream) throws IOException { DataInput input = new DataInputStream(inputStream); Map<String, StreamBatchReadable> streamBatchReadables = Serializations.deserializeMap(input, new Serializations.ObjectReader<StreamBatchReadable>() { @Override public StreamBatchReadable read(DataInput input) throws IOException { return new StreamBatchReadable(URI.create(input.readUTF())); } }); Map<String, InputFormatProvider> inputFormatProviders = Serializations.deserializeMap(input, new Serializations.ObjectReader<InputFormatProvider>() { @Override public InputFormatProvider read(DataInput input) throws IOException { return new BasicInputFormatProvider(input.readUTF(), Serializations.deserializeMap(input, Serializations.createStringObjectReader())); } }); Map<String, DatasetInfo> datasetInfos = Serializations.deserializeMap(input, new Serializations.ObjectReader<DatasetInfo>() { @Override public DatasetInfo read(DataInput input) throws IOException { return DatasetInfo.deserialize(input); } }); Map<String, Set<String>> sourceInputs = Serializations.deserializeMap(input, Serializations.createStringSetObjectReader()); return new SparkBatchSourceFactory(streamBatchReadables, inputFormatProviders, datasetInfos, sourceInputs); }
91. HintsDescriptorTest#deserializeDescriptor()
Project: cassandra
File: HintsDescriptorTest.java
File: HintsDescriptorTest.java
private static HintsDescriptor deserializeDescriptor(byte[] bytes) throws IOException { DataInput in = ByteStreams.newDataInput(bytes); return HintsDescriptor.deserialize(in); }
92. OpenWireFormat#unmarshal()
Project: activemq-openwire
File: OpenWireFormat.java
File: OpenWireFormat.java
public Object unmarshal(DataInput dis) throws IOException { DataInput dataIn = dis; if (!sizePrefixDisabled) { int size = dis.readInt(); if (size > maxFrameSize) { throw new IOException("Frame size of " + (size / (1024 * 1024)) + " MB larger than max allowed " + (maxFrameSize / (1024 * 1024)) + " MB"); } } return doUnmarshal(dataIn); }