org.apache.avro.io.BinaryEncoder

Here are the examples of the java api org.apache.avro.io.BinaryEncoder taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

54 Examples 7

19 Source : KafkaJsonSchema.java
with Apache License 2.0
from streamnative

/**
 * A schema to encoding kafka json value.
 */
@Slf4j
public clreplaced KafkaJsonSchema implements Schema<byte[]> {

    private AvroData avroData = null;

    private final JsonDeserializer jsonDeserializer = new JsonDeserializer();

    private Converter valueConverter = null;

    private SchemaInfo schemaInfo = null;

    private org.apache.avro.Schema avroSchema = null;

    private final Method convertToConnectMethod;

    public KafkaJsonSchema() {
        try {
            this.convertToConnectMethod = JsonConverter.clreplaced.getDeclaredMethod("convertToConnect", org.apache.kafka.connect.data.Schema.clreplaced, JsonNode.clreplaced);
            this.convertToConnectMethod.setAccessible(true);
        } catch (NoSuchMethodException e) {
            throw new RuntimeException("Failed to locate `convertToConnect` method for JsonConverter", e);
        }
    }

    public void setAvroSchema(boolean isKey, AvroData avroData, org.apache.avro.Schema schema, Converter converter) {
        this.valueConverter = converter;
        this.avroData = avroData;
        this.avroSchema = schema;
        this.schemaInfo = SchemaInfo.builder().name(converter instanceof JsonConverter ? "KafkaJson" : "KafkaAvro").type(converter instanceof JsonConverter ? SchemaType.JSON : SchemaType.AVRO).properties(Collections.emptyMap()).schema(schema.toString().getBytes(UTF_8)).build();
        if (converter instanceof AvroConverter) {
            initializeAvroWriter(schema);
        }
    }

    @SuppressWarnings("unchecked")
    @Override
    public byte[] encode(byte[] data) {
        if (null == valueConverter || valueConverter instanceof JsonConverter) {
            return data;
        }
        org.apache.kafka.connect.data.Schema connectSchema = avroData.toConnectSchema(avroSchema);
        JsonNode jsonNode = jsonDeserializer.deserialize("", data);
        Object connectValue;
        try {
            connectValue = convertToConnectMethod.invoke(null, connectSchema, jsonNode);
        } catch (IllegalAccessException e) {
            throw new SchemaSerializationException("Can not call JsonConverter#convertToConnect");
        } catch (InvocationTargetException e) {
            throw new SchemaSerializationException(e.getCause());
        }
        Object avroValue = avroData.fromConnectData(connectSchema, connectValue);
        return writeAvroRecord((GenericRecord) avroValue);
    }

    private GenericDatumWriter<GenericRecord> writer;

    private BinaryEncoder encoder;

    private ByteArrayOutputStream byteArrayOutputStream;

    synchronized void initializeAvroWriter(org.apache.avro.Schema schema) {
        this.writer = new GenericDatumWriter<>(schema);
        this.byteArrayOutputStream = new ByteArrayOutputStream();
        this.encoder = EncoderFactory.get().binaryEncoder(this.byteArrayOutputStream, this.encoder);
    }

    synchronized byte[] writeAvroRecord(GenericRecord record) {
        try {
            this.writer.write(record, this.encoder);
            this.encoder.flush();
            return this.byteArrayOutputStream.toByteArray();
        } catch (Exception e) {
            throw new SchemaSerializationException(e);
        } finally {
            this.byteArrayOutputStream.reset();
        }
    }

    @Override
    public SchemaInfo getSchemaInfo() {
        return schemaInfo;
    }
}

19 Source : JdbcAvroRecordConverter.java
with Apache License 2.0
from spotify

public clreplaced JdbcAvroRecordConverter {

    private final JdbcAvroRecord.SqlFunction<ResultSet, Object>[] mappings;

    private final int columnCount;

    private final ResultSet resultSet;

    private final EncoderFactory encoderFactory = EncoderFactory.get();

    public JdbcAvroRecordConverter(final JdbcAvroRecord.SqlFunction<ResultSet, Object>[] mappings, final int columnCount, final ResultSet resultSet) {
        this.mappings = mappings;
        this.columnCount = columnCount;
        this.resultSet = resultSet;
    }

    public static JdbcAvroRecordConverter create(final ResultSet resultSet) throws SQLException {
        return new JdbcAvroRecordConverter(computeAllMappings(resultSet), resultSet.getMetaData().getColumnCount(), resultSet);
    }

    @SuppressWarnings("unchecked")
    static JdbcAvroRecord.SqlFunction<ResultSet, Object>[] computeAllMappings(final ResultSet resultSet) throws SQLException {
        final ResultSetMetaData meta = resultSet.getMetaData();
        final int columnCount = meta.getColumnCount();
        final JdbcAvroRecord.SqlFunction<ResultSet, Object>[] mappings = new JdbcAvroRecord.SqlFunction[columnCount + 1];
        for (int i = 1; i <= columnCount; i++) {
            mappings[i] = JdbcAvroRecord.computeMapping(meta, i);
        }
        return mappings;
    }

    private BinaryEncoder binaryEncoder = null;

    public static clreplaced MyByteArrayOutputStream extends ByteArrayOutputStream {

        MyByteArrayOutputStream(int size) {
            super(size);
        }

        // provide access to internal buffer, avoiding copy
        byte[] getBufffer() {
            return buf;
        }
    }

    /**
     * Read data from a single row of result set and and encode into a Avro record as byte array.
     * Directly reading and encoding has the benefit of less need for copying bytes between objects.
     *
     * @return a ByteBuffer with binary encoded Avro record
     * @throws SQLException in case reading row from JDBC fails
     * @throws IOException in case binary encoding fails
     */
    public ByteBuffer convertResultSetIntoAvroBytes() throws SQLException, IOException {
        final MyByteArrayOutputStream out = new MyByteArrayOutputStream(columnCount * 64);
        binaryEncoder = encoderFactory.directBinaryEncoder(out, binaryEncoder);
        for (int i = 1; i <= columnCount; i++) {
            final Object value = mappings[i].apply(resultSet);
            if (value == null || resultSet.wasNull()) {
                binaryEncoder.writeIndex(0);
                binaryEncoder.writeNull();
            } else {
                binaryEncoder.writeIndex(1);
                if (value instanceof String) {
                    binaryEncoder.writeString((String) value);
                } else if (value instanceof Long) {
                    binaryEncoder.writeLong((Long) value);
                } else if (value instanceof Integer) {
                    binaryEncoder.writeInt((Integer) value);
                } else if (value instanceof Boolean) {
                    binaryEncoder.writeBoolean((Boolean) value);
                } else if (value instanceof ByteBuffer) {
                    binaryEncoder.writeBytes((ByteBuffer) value);
                } else if (value instanceof Double) {
                    binaryEncoder.writeDouble((Double) value);
                } else if (value instanceof Float) {
                    binaryEncoder.writeFloat((Float) value);
                }
            }
        }
        binaryEncoder.flush();
        return ByteBuffer.wrap(out.getBufffer(), 0, out.size());
    }
}

19 Source : DoctorKActionReporter.java
with Apache License 2.0
from pinterest

public synchronized void sendMessage(String clusterName, String message) {
    int numRetries = 0;
    while (numRetries < MAX_RETRIES) {
        try {
            long timestamp = System.currentTimeMillis();
            OperatorAction operatorAction = new OperatorAction(timestamp, clusterName, message);
            ByteArrayOutputStream stream = new ByteArrayOutputStream();
            BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream, null);
            avroWriter.write(operatorAction, binaryEncoder);
            binaryEncoder.flush();
            IOUtils.closeQuietly(stream);
            String key = Long.toString(System.currentTimeMillis());
            ProducerRecord<byte[], byte[]> producerRecord = new ProducerRecord<>(topic, key.getBytes(), stream.toByteArray());
            Future<RecordMetadata> future = kafkaProducer.send(producerRecord);
            future.get();
            LOG.info("Send an message {} to action report : ", message);
            break;
        } catch (Exception e) {
            LOG.error("Failed to publish report message {}: {}", clusterName, message, e);
            numRetries++;
        }
    }
}

19 Source : KafkaAvroWithoutRegistrySerializer.java
with Mozilla Public License 2.0
from opfab

@Override
public byte[] serialize(String topic, T record) {
    if (record == null) {
        return new byte[0];
    } else {
        try {
            Schema schema = record.getSchema();
            ByteArrayOutputStream out = new ByteArrayOutputStream();
            // write first five bytes with 0 value
            out.write(ByteBuffer.allocate(5).putInt(0).array());
            BinaryEncoder encoder = this.encoderFactory.directBinaryEncoder(out, null);
            DatumWriter<T> writer = new SpecificDatumWriter<>(schema);
            writer.write(record, encoder);
            encoder.flush();
            byte[] bytes = out.toByteArray();
            out.close();
            return bytes;
        } catch (RuntimeException | IOException ioEx) {
            throw new SerializationException("Error serializing Avro message", ioEx);
        }
    }
}

19 Source : AvroSerializer.java
with MIT License
from mincloud1501

@Override
public byte[] serialize(String topic, T data) {
    try {
        byte[] result = null;
        if (data != null) {
            LOGGER.debug("data='{}'", data);
            ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
            BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
            DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(data.getSchema());
            datumWriter.write(data, binaryEncoder);
            binaryEncoder.flush();
            byteArrayOutputStream.close();
            result = byteArrayOutputStream.toByteArray();
            LOGGER.debug("@@@@@serialized data='{}'", DatatypeConverter.printHexBinary(result));
        }
        return result;
    } catch (IOException ex) {
        throw new SerializationException("Can't serialize data='" + data + "' for topic='" + topic + "'", ex);
    }
}

19 Source : AvroRowSerializer.java
with Apache License 2.0
from microsoft

public clreplaced AvroRowSerializer {

    private final static Logger logger = Logger.getLogger(AvroRowSerializer.clreplaced);

    // avro writer infra
    private ByteArrayOutputStream binaryBuffer = new ByteArrayOutputStream();

    private DatumWriter<IndexedRecord> writer;

    private BinaryEncoder encoder;

    private AvroFastRecord finalRecord;

    private int[] sourceIndicies;

    public AvroRowSerializer(Schema schema) {
        List<Field> fieldList = schema.getFields().stream().filter(f -> Boolean.parseBoolean(f.getProp(AvroSchemaBuilder.PROPERTY_OUTPUT))).map(f -> new Schema.Field(f.name(), f.schema(), f.doc(), f.defaultVal())).collect(Collectors.toList());
        // check if the schema pruned fields?
        if (fieldList.size() != schema.getFields().size()) {
            Schema prunedSchema = Schema.createRecord(fieldList);
            this.finalRecord = new AvroFastRecord(prunedSchema);
            // initialize source to target mapping
            this.sourceIndicies = new int[fieldList.size()];
            for (Field field : prunedSchema.getFields()) {
                logger.info("Pruned field: " + field.name());
                this.sourceIndicies[field.pos()] = schema.getField(field.name()).pos();
            }
            schema = prunedSchema;
        }
        this.writer = new SpecificDatumWriter<>(schema);
        this.encoder = EncoderFactory.get().binaryEncoder(binaryBuffer, null);
    }

    public byte[] serialize(IndexedRecord record) throws IOException {
        // make sure we're at the beginning again
        this.binaryBuffer.reset();
        // copying to final output schema
        if (this.sourceIndicies != null) {
            for (int i = 0; i < this.sourceIndicies.length; i++) this.finalRecord.put(i, record.get(this.sourceIndicies[i]));
            record = this.finalRecord;
        }
        // serialize the record
        this.writer.write(record, encoder);
        this.encoder.flush();
        this.binaryBuffer.flush();
        return this.binaryBuffer.toByteArray();
    }
}

19 Source : Avro18GenericData.java
with BSD 2-Clause "Simplified" License
from linkedin

/**
 * Gets the default value of the given field, if any.
 * @param field the field whose default value should be retrieved.
 * @return the default value replacedociated with the given field,
 * or null if none is specified in the schema.
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static Object getDefaultValue(Schema.Field field) {
    JsonNode json = field.defaultValue();
    if (json == null)
        throw new AvroRuntimeException("Field " + field + " not set and has no default value");
    if (json.isNull() && (field.schema().getType() == Schema.Type.NULL || (field.schema().getType() == Schema.Type.UNION && field.schema().getTypes().get(0).getType() == Schema.Type.NULL))) {
        return null;
    }
    // Check the cache
    Object defaultValue = CACHED_DEFAULTS.get(field);
    // If not cached, get the default Java value by encoding the default JSON
    // value and then decoding it:
    if (defaultValue == null)
        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            BinaryEncoder encoder = new BinaryEncoder(baos);
            Avro14ResolvingGrammarGeneratorAccessUtil.encode(encoder, field.schema(), json);
            encoder.flush();
            BinaryDecoder decoder = DecoderFactory.defaultFactory().createBinaryDecoder(baos.toByteArray(), null);
            // TODO - difference between specific and generic here
            defaultValue = new GenericDatumReader<>(field.schema()).read(null, decoder);
            CACHED_DEFAULTS.put(field, defaultValue);
        } catch (IOException e) {
            throw new AvroRuntimeException(e);
        }
    return defaultValue;
}

19 Source : AvroSerializer.java
with Mozilla Public License 2.0
from corunet

@Override
public byte[] serialize(String topic, T data) {
    try {
        byte[] result = null;
        if (data != null) {
            log.debug("data='{}'", data);
            ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
            byteArrayOutputStream.write(MAGIC_BYTE);
            byteArrayOutputStream.write(ByteBuffer.allocate(idSize).putInt(data.getSchemaMetadata().getId()).array());
            BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
            DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(((GenericRecord) data.getGenericRecord()).getSchema());
            datumWriter.write((GenericRecord) data.getGenericRecord(), binaryEncoder);
            binaryEncoder.flush();
            byteArrayOutputStream.close();
            result = byteArrayOutputStream.toByteArray();
            log.debug("serialized data='{}'", DatatypeConverter.printHexBinary(result));
        }
        return result;
    } catch (IOException ex) {
        throw new SerializationException("Can't serialize data='" + data + "' for topic='" + topic + "'", ex);
    }
}

19 Source : AvroSerializer.java
with MIT License
from code-not-found

@Override
public byte[] serialize(String topic, T data) {
    try {
        byte[] result = null;
        if (data != null) {
            LOGGER.debug("data='{}'", data);
            ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
            BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
            DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(data.getSchema());
            datumWriter.write(data, binaryEncoder);
            binaryEncoder.flush();
            byteArrayOutputStream.close();
            result = byteArrayOutputStream.toByteArray();
            LOGGER.debug("serialized data='{}'", DatatypeConverter.printHexBinary(result));
        }
        return result;
    } catch (IOException ex) {
        throw new SerializationException("Can't serialize data='" + data + "' for topic='" + topic + "'", ex);
    }
}

19 Source : AvroObjectOutput.java
with Apache License 2.0
from apache

public clreplaced AvroObjectOutput implements ObjectOutput {

    private static EncoderFactory encoderFactory = EncoderFactory.get();

    private BinaryEncoder encoder;

    public AvroObjectOutput(OutputStream out) {
        encoder = encoderFactory.binaryEncoder(out, null);
    }

    @Override
    public void writeBool(boolean v) throws IOException {
        encoder.writeBoolean(v);
    }

    @Override
    public void writeByte(byte v) throws IOException {
        encoder.writeFixed(new byte[] { v });
    }

    @Override
    public void writeShort(short v) throws IOException {
        encoder.writeInt(v);
    }

    @Override
    public void writeInt(int v) throws IOException {
        encoder.writeInt(v);
    }

    @Override
    public void writeLong(long v) throws IOException {
        encoder.writeLong(v);
    }

    @Override
    public void writeFloat(float v) throws IOException {
        encoder.writeFloat(v);
    }

    @Override
    public void writeDouble(double v) throws IOException {
        encoder.writeDouble(v);
    }

    @Override
    public void writeUTF(String v) throws IOException {
        encoder.writeString(new Utf8(v));
    }

    @Override
    public void writeBytes(byte[] v) throws IOException {
        encoder.writeString(new String(v, StandardCharsets.UTF_8));
    }

    @Override
    public void writeBytes(byte[] v, int off, int len) throws IOException {
        byte[] v2 = Arrays.copyOfRange(v, off, off + len);
        encoder.writeString(new String(v2, StandardCharsets.UTF_8));
    }

    @Override
    public void flushBuffer() throws IOException {
        encoder.flush();
    }

    @Override
    @SuppressWarnings(value = { "rawtypes", "unchecked" })
    public void writeObject(Object obj) throws IOException {
        if (obj == null) {
            encoder.writeNull();
            return;
        }
        ReflectDatumWriter dd = new ReflectDatumWriter<>(obj.getClreplaced());
        dd.write(obj, encoder);
    }
}

18 Source : AvroUtils.java
with Apache License 2.0
from wangrenlei

/**
 * Writes provided {@link GenericRecord} into the provided
 * {@link OutputStream}.
 */
public static void write(GenericRecord record, OutputStream out) {
    BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
    DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema());
    try {
        writer.write(record, encoder);
        encoder.flush();
    } catch (Exception e) {
        throw new IllegalStateException("Failed to write AVRO record", e);
    }
}

18 Source : PulsarSink.java
with Apache License 2.0
from streamnative

public clreplaced PulsarSink extends AbstractSink implements Configurable, BatchSizeSupported {

    private static final Logger log = LoggerFactory.getLogger(PulsarSink.clreplaced);

    private long batchSize;

    private boolean useAvroEventFormat;

    private SinkCounter counter = null;

    private Producer<byte[]> producer;

    private PulsarClient client;

    private Optional<ByteArrayOutputStream> tempOutStream = Optional.absent();

    private Optional<SpecificDatumWriter<AvroFlumeEvent>> writer = Optional.absent();

    private ProducerBuilder producerBuilder;

    private ClientBuilder clientBuilder;

    private String authPluginClreplacedName;

    private String authParamsString;

    private String tlsCertFile;

    private String tlsKeyFile;

    private Boolean useTLS;

    private Integer operationTimeout;

    private Integer numIoThreads;

    private Integer connectionsPerBroker;

    private Map<String, Object> config = new HashMap<>();

    private String serviceUrl;

    private String topicName;

    private String producerName;

    private Integer sendTimeout;

    private Boolean blockIfQueueFull;

    private Boolean enableBatching;

    private Integer batchMessagesMaxMessagesPerBatch;

    private Long batchDelay;

    private Integer messageRoutingMode;

    private Integer hashingScheme;

    private Integer compressionType;

    private Boolean enableTcpNoDelay;

    private String tlsTrustCertsFilePath;

    private Boolean allowTlsInsecureConnection;

    private Boolean enableTlsHostnameVerification;

    private Integer statsInterval;

    private Integer maxConcurrentLookupRequests;

    private Integer maxLookupRequests;

    private Integer maxNumberOfRejectedRequestPerConnection;

    private Integer keepAliveIntervalSeconds;

    private Integer connectionTimeout;

    private Integer numListenerThreads;

    private Boolean syncMode;

    // Fine to use null for initial value, Avro will create new ones if this
    // is null
    private BinaryEncoder encoder = null;

    @Override
    public void configure(Context context) {
        batchSize = context.getInteger(BATCH_SIZE, DEFAULT_BATCH_SIZE);
        useAvroEventFormat = context.getBoolean("useAvroEventFormat", false);
        // client options
        serviceUrl = context.getString("serviceUrl", "localhost:6650");
        authPluginClreplacedName = context.getString("authPluginClreplacedName", "");
        authParamsString = context.getString("authParamsString", "");
        tlsCertFile = context.getString("tlsCertFile", "");
        tlsKeyFile = context.getString("tlsKeyFile", "");
        useTLS = context.getBoolean("useTLS", false);
        operationTimeout = context.getInteger("operationTimeout", 0);
        numIoThreads = context.getInteger("numIoThreads", 0);
        connectionsPerBroker = context.getInteger("connectionsPerBroker", 0);
        enableTcpNoDelay = context.getBoolean("enableTcpNoDelay", false);
        tlsTrustCertsFilePath = context.getString("tlsTrustCertsFilePath", "");
        allowTlsInsecureConnection = context.getBoolean("allowTlsInsecureConnection", false);
        enableTlsHostnameVerification = context.getBoolean("enableTlsHostnameVerification", false);
        statsInterval = context.getInteger("statsInterval", 0);
        maxConcurrentLookupRequests = context.getInteger("maxConcurrentLookupRequests", 0);
        maxLookupRequests = context.getInteger("maxLookupRequests", 0);
        maxNumberOfRejectedRequestPerConnection = context.getInteger("maxNumberOfRejectedRequestPerConnection", 0);
        keepAliveIntervalSeconds = context.getInteger("keepAliveIntervalSeconds", 0);
        connectionTimeout = context.getInteger("connectionTimeout", 0);
        numListenerThreads = context.getInteger("numListenerThreads", 0);
        // producer options
        topicName = context.getString("topicName", "");
        producerName = context.getString("producerName", "");
        sendTimeout = context.getInteger("sendTimeout", 10);
        blockIfQueueFull = context.getBoolean("blockIfQueueFull", false);
        enableBatching = context.getBoolean("enableBatching", false);
        batchMessagesMaxMessagesPerBatch = context.getInteger("batchMessagesMaxMessagesPerBatch", 1000);
        batchDelay = context.getLong("batchDelay", 0L);
        messageRoutingMode = context.getInteger("messageRoutingMode", -1);
        hashingScheme = context.getInteger("hashingSchema", -1);
        compressionType = context.getInteger("compressionType", -1);
        // message options
        syncMode = context.getBoolean("syncMode", true);
    }

    @Override
    public long getBatchSize() {
        return batchSize;
    }

    @Override
    public Status process() throws EventDeliveryException {
        Status result = Status.READY;
        Channel channel = getChannel();
        Transaction transaction = null;
        Event event = null;
        if (null == producer || null == client) {
            initPulsarClient();
            initPulsarProducer();
        }
        try {
            transaction = channel.getTransaction();
            transaction.begin();
            long processedEvents = 0;
            for (; processedEvents < batchSize; processedEvents += 1) {
                event = channel.take();
                if (event == null) {
                    // no events available in the channel
                    if (processedEvents == 0) {
                        result = Status.BACKOFF;
                        counter.incrementBatchEmptyCount();
                    } else if (processedEvents < batchSize) {
                        counter.incrementBatchUnderflowCount();
                    } else {
                        counter.incrementBatchCompleteCount();
                    }
                    break;
                }
                TypedMessageBuilder<byte[]> newMessage = producer.newMessage();
                if (event.getHeaders() != null) {
                    if (event.getHeaders().get("key") != null) {
                        newMessage = newMessage.key(event.getHeaders().get("key"));
                    }
                    newMessage.value(serializeEvent(event, useAvroEventFormat)).properties(event.getHeaders());
                } else {
                    newMessage.value(serializeEvent(event, useAvroEventFormat));
                }
                if (syncMode) {
                    newMessage.send();
                } else {
                    newMessage.sendAsync();
                }
            }
            if (!syncMode) {
                producer.flush();
            }
            transaction.commit();
        } catch (Exception ex) {
            log.error("Failed to publish events", ex);
            counter.incrementEventWriteOrChannelFail(ex);
            result = Status.BACKOFF;
            if (transaction != null) {
                try {
                    // If the transaction wasn't committed before we got the exception, we
                    // need to rollback.
                    transaction.rollback();
                } catch (RuntimeException e) {
                    log.error("Transaction rollback failed: " + e.getLocalizedMessage());
                    log.debug("Exception follows.", e);
                } finally {
                    transaction.close();
                    transaction = null;
                }
            }
        } finally {
            if (transaction != null) {
                transaction.close();
            }
        }
        return result;
    }

    @Override
    public synchronized void start() {
        try {
            log.info("start pulsar producer");
            initPulsarClient();
            initPulsarProducer();
            this.counter = new SinkCounter("flume-sink");
            super.start();
        } catch (Exception e) {
            log.error("init pulsar failed:{}", e.getMessage());
        }
    }

    @Override
    public synchronized void stop() {
        try {
            log.info("stop pulsar producer");
            producer.close();
            client.close();
        } catch (Exception e) {
            log.error("stop pulsar failed");
        }
        super.stop();
    }

    private void initPulsarClient() {
        try {
            clientBuilder = PulsarClient.builder();
            if (authPluginClreplacedName.length() > 0 && authParamsString.length() > 0) {
                clientBuilder.authentication(authPluginClreplacedName, authParamsString);
            }
            if (useTLS) {
                clientBuilder.serviceUrl("pulsar://+ssl" + serviceUrl);
            } else {
                clientBuilder.serviceUrl("pulsar://" + serviceUrl);
            }
            if (tlsCertFile.length() > 0 && tlsKeyFile.length() > 0) {
                Map<String, String> authParams = new HashMap<>();
                authParams.put("tlsCertFile", tlsCertFile);
                authParams.put("tlsKeyFile", tlsKeyFile);
                Authentication tlsAuth = AuthenticationFactory.create(AuthenticationTls.clreplaced.getName(), authParams);
                clientBuilder.authentication(tlsAuth);
            }
            if (operationTimeout > 0) {
                clientBuilder.operationTimeout(operationTimeout, TimeUnit.SECONDS);
            }
            if (numIoThreads > 0) {
                clientBuilder.ioThreads(numIoThreads);
            }
            if (numListenerThreads > 0) {
                clientBuilder.listenerThreads(numListenerThreads);
            }
            if (connectionsPerBroker > 0) {
                clientBuilder.connectionsPerBroker(connectionsPerBroker);
            }
            if (enableTcpNoDelay) {
                clientBuilder.enableTcpNoDelay(enableTcpNoDelay);
            }
            if (tlsTrustCertsFilePath.length() > 0) {
                clientBuilder.tlsTrustCertsFilePath(tlsTrustCertsFilePath);
            }
            if (allowTlsInsecureConnection) {
                clientBuilder.allowTlsInsecureConnection(allowTlsInsecureConnection);
            }
            if (enableTlsHostnameVerification) {
                clientBuilder.enableTlsHostnameVerification(enableTlsHostnameVerification);
            }
            if (statsInterval > 0) {
                clientBuilder.statsInterval(statsInterval, TimeUnit.SECONDS);
            }
            if (maxConcurrentLookupRequests > 0) {
                clientBuilder.maxConcurrentLookupRequests(maxConcurrentLookupRequests);
            }
            if (maxLookupRequests > 0) {
                clientBuilder.maxLookupRequests(maxLookupRequests);
            }
            if (maxNumberOfRejectedRequestPerConnection > 0) {
                clientBuilder.maxNumberOfRejectedRequestPerConnection(maxNumberOfRejectedRequestPerConnection);
            }
            if (keepAliveIntervalSeconds > 0) {
                clientBuilder.keepAliveInterval(keepAliveIntervalSeconds, TimeUnit.SECONDS);
            }
            if (connectionTimeout > 0) {
                clientBuilder.connectionTimeout(connectionTimeout, TimeUnit.SECONDS);
            }
            client = clientBuilder.build();
        } catch (Exception e) {
            log.error("init pulsar client failed:{}", e.getMessage());
        }
    }

    private void initPulsarProducer() {
        try {
            producerBuilder = client.newProducer();
            if (topicName.length() > 0) {
                producerBuilder = producerBuilder.topic(topicName);
            }
            if (producerName.length() > 0) {
                producerBuilder = producerBuilder.producerName(producerName);
            }
            if (sendTimeout > 0) {
                producerBuilder.sendTimeout(sendTimeout, TimeUnit.SECONDS);
            } else {
                producerBuilder.sendTimeout(10, TimeUnit.SECONDS);
            }
            if (blockIfQueueFull) {
                producerBuilder.blockIfQueueFull(blockIfQueueFull);
            }
            if (enableBatching) {
                producerBuilder.enableBatching(enableBatching);
            }
            if (batchMessagesMaxMessagesPerBatch > 0) {
                producerBuilder.batchingMaxMessages(batchMessagesMaxMessagesPerBatch);
            }
            if (batchDelay > 0) {
                producerBuilder.batchingMaxPublishDelay(batchDelay, TimeUnit.MILLISECONDS);
            }
            if (MessageRoutingMode.SingleParreplacedion.equals(messageRoutingMode)) {
                producerBuilder.messageRoutingMode(MessageRoutingMode.SingleParreplacedion);
            } else if (MessageRoutingMode.CustomParreplacedion.equals(messageRoutingMode)) {
                producerBuilder.messageRoutingMode(MessageRoutingMode.CustomParreplacedion);
            } else if (MessageRoutingMode.RoundRobinParreplacedion.equals(messageRoutingMode)) {
                producerBuilder.messageRoutingMode(MessageRoutingMode.RoundRobinParreplacedion);
            }
            if (HashingScheme.JavaStringHash.equals(hashingScheme)) {
                producerBuilder.hashingScheme(HashingScheme.JavaStringHash);
            } else if (HashingScheme.Murmur3_32Hash.equals(hashingScheme)) {
                producerBuilder.hashingScheme(HashingScheme.Murmur3_32Hash);
            }
            if (CompressionType.LZ4.equals(compressionType)) {
                producerBuilder.compressionType(CompressionType.LZ4);
            } else if (CompressionType.ZLIB.equals(compressionType)) {
                producerBuilder.compressionType(CompressionType.ZLIB);
            } else if (CompressionType.ZSTD.equals(compressionType)) {
                producerBuilder.compressionType(CompressionType.ZSTD);
            } else if (CompressionType.NONE.equals(compressionType)) {
                producerBuilder.compressionType(CompressionType.NONE);
            }
            producer = producerBuilder.create();
        } catch (Exception e) {
            log.error("init pulsar producer failed:{}", e.getMessage());
        }
    }

    private byte[] serializeEvent(Event event, boolean useAvroEventFormat) throws IOException {
        byte[] bytes;
        if (useAvroEventFormat) {
            if (!tempOutStream.isPresent()) {
                tempOutStream = Optional.of(new ByteArrayOutputStream());
            }
            if (!writer.isPresent()) {
                writer = Optional.of(new SpecificDatumWriter<AvroFlumeEvent>(AvroFlumeEvent.clreplaced));
            }
            tempOutStream.get().reset();
            AvroFlumeEvent e = new AvroFlumeEvent(toCharSeqMap(event.getHeaders()), ByteBuffer.wrap(event.getBody()));
            encoder = EncoderFactory.get().directBinaryEncoder(tempOutStream.get(), encoder);
            writer.get().write(e, encoder);
            encoder.flush();
            bytes = tempOutStream.get().toByteArray();
        } else {
            bytes = event.getBody();
        }
        return bytes;
    }

    private static Map<CharSequence, CharSequence> toCharSeqMap(Map<String, String> stringMap) {
        Map<CharSequence, CharSequence> charSeqMap = new HashMap<CharSequence, CharSequence>();
        for (Map.Entry<String, String> entry : stringMap.entrySet()) {
            charSeqMap.put(entry.getKey(), entry.getValue());
        }
        return charSeqMap;
    }
}

18 Source : Generate14TestResources.java
with BSD 2-Clause "Simplified" License
from linkedin

public static void main(String[] args) {
    if (args == null || args.length != 1) {
        System.err.println("exactly single argument required - output path. instead got " + Arrays.toString(args));
        System.exit(1);
    }
    Path outputRoot = Paths.get(args[0].trim()).toAbsolutePath();
    Path by14Root = outputRoot.resolve("by14");
    by14.RecordWithUnion outer = new by14.RecordWithUnion();
    outer.f = new by14.InnerUnionRecord();
    outer.f.f = 14;
    try {
        SpecificDatumWriter<by14.RecordWithUnion> writer = new SpecificDatumWriter<>(outer.getSchema());
        Path binaryRecordWithUnion = TestUtil.getNewFile(by14Root, "RecordWithUnion.binary");
        BinaryEncoder binaryEnc = new BinaryEncoder(Files.newOutputStream(binaryRecordWithUnion));
        Path jsonRecordWithUnion = TestUtil.getNewFile(by14Root, "RecordWithUnion.json");
        JsonEncoder jsonEnc = new JsonEncoder(outer.getSchema(), Files.newOutputStream(jsonRecordWithUnion));
        writer.write(outer, binaryEnc);
        binaryEnc.flush();
        writer.write(outer, jsonEnc);
        jsonEnc.flush();
    } catch (Exception e) {
        System.err.println("failed to generate payloads");
        e.printStackTrace(System.err);
        System.exit(1);
    }
}

18 Source : AvroCompatibilityHelper.java
with BSD 2-Clause "Simplified" License
from linkedin

// encoders/decoders
/**
 * constructs a {@link BinaryEncoder} on top of the given output stream
 * @param out an output stream
 * @param buffered true for buffered encoder (when supported by runtime version of avro). these perform better
 * @param reuse a given encoder to reuse, if supported by the runtime avro
 * @return a {@link BinaryEncoder}
 */
public static BinaryEncoder newBinaryEncoder(OutputStream out, boolean buffered, BinaryEncoder reuse) {
    replacedertAvroAvailable();
    return ADAPTER.newBinaryEncoder(out, buffered, reuse);
}

18 Source : IcebergEncoder.java
with Apache License 2.0
from apache

@Override
public void encode(D datum, OutputStream stream) throws IOException {
    BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(stream, ENCODER.get());
    ENCODER.set(encoder);
    writer.write(datum, encoder);
    encoder.flush();
}

17 Source : KafkaKeySerializer.java
with Apache License 2.0
from rayokota

@Override
public byte[] serialize(String topic, Comparable[] object) {
    if (object == null) {
        return null;
    }
    try {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        BinaryEncoder encoder = encoderFactory.directBinaryEncoder(out, null);
        writer.write(toRecord(object), encoder);
        encoder.flush();
        byte[] bytes = out.toByteArray();
        out.close();
        return bytes;
    } catch (IOException | RuntimeException e) {
        // avro serialization can throw AvroRuntimeException, NullPointerException,
        // ClreplacedCastException, etc
        LOG.error("Error serializing Avro key " + e.getMessage());
        throw new SerializationException("Error serializing Avro key", e);
    }
}

17 Source : Avro14Adapter.java
with BSD 2-Clause "Simplified" License
from linkedin

@Override
public BinaryEncoder newBinaryEncoder(OutputStream out, boolean buffered, BinaryEncoder reuse) {
    if (reuse != null && reuse instanceof Avro18BufferedBinaryEncoder) {
        try {
            reuse.flush();
            reuse.init(out);
        } catch (IOException e) {
            throw new AvroRuntimeException("Failure flushing old output", e);
        }
        return reuse;
    }
    return buffered ? new Avro18BufferedBinaryEncoder(out) : new BinaryEncoder(out);
}

17 Source : HoodieAvroUtils.java
with Apache License 2.0
from apache

public static <T extends IndexedRecord> byte[] indexedRecordToBytes(T record) {
    GenericDatumWriter<T> writer = new GenericDatumWriter<>(record.getSchema());
    try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
        BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, reuseEncoder.get());
        reuseEncoder.set(encoder);
        writer.write(record, encoder);
        encoder.flush();
        return out.toByteArray();
    } catch (IOException e) {
        throw new HoodieIOException("Cannot convert GenericRecord to bytes", e);
    }
}

16 Source : TransformTest.java
with Apache License 2.0
from OneCricketeer

private ByteArrayOutputStream encodeAvroObject(org.apache.avro.Schema schema, int sourceId, Object datum) throws IOException {
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    out.write(MAGIC_BYTE);
    out.write(ByteBuffer.allocate(ID_SIZE).putInt(sourceId).array());
    EncoderFactory encoderFactory = EncoderFactory.get();
    BinaryEncoder encoder = encoderFactory.directBinaryEncoder(out, null);
    Object value = datum instanceof NonRecordContainer ? ((NonRecordContainer) datum).getValue() : datum;
    DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
    writer.write(value, encoder);
    encoder.flush();
    return out;
}

16 Source : Avro19Adapter.java
with BSD 2-Clause "Simplified" License
from linkedin

@Override
public BinaryEncoder newBinaryEncoder(OutputStream out, boolean buffered, BinaryEncoder reuse) {
    if (buffered) {
        return EncoderFactory.get().binaryEncoder(out, reuse);
    } else {
        return EncoderFactory.get().directBinaryEncoder(out, reuse);
    }
}

16 Source : DefaultAvroSerDesHandler.java
with Apache License 2.0
from hortonworks

@Override
public void handlePayloadSerialization(OutputStream outputStream, Object input) {
    try {
        Schema schema = AvroUtils.computeSchema(input);
        Schema.Type schemaType = schema.getType();
        if (Schema.Type.BYTES.equals(schemaType)) {
            // incase of byte arrays, no need to go through avro as there is not much to optimize and avro is expecting
            // the payload to be ByteBuffer instead of a byte array
            outputStream.write((byte[]) input);
        } else if (Schema.Type.STRING.equals(schemaType)) {
            // get UTF-8 bytes and directly send those over instead of using avro.
            outputStream.write(input.toString().getBytes("UTF-8"));
        } else {
            BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null);
            DatumWriter<Object> writer;
            boolean isSpecificRecord = input instanceof SpecificRecord;
            if (isSpecificRecord) {
                writer = new SpecificDatumWriter<>(schema);
            } else {
                writer = new GenericDatumWriter<>(schema);
            }
            writer.write(input, encoder);
            encoder.flush();
        }
    } catch (IOException e) {
        throw new AvroRetryableException(e);
    } catch (RuntimeException e) {
        throw new AvroException(e);
    }
}

16 Source : ConfluentAvroSerDesHandler.java
with Apache License 2.0
from hortonworks

@Override
public void handlePayloadSerialization(OutputStream outputStream, Object input) {
    try {
        Schema schema = AvroUtils.computeSchema(input);
        if (input instanceof byte[]) {
            outputStream.write((byte[]) input);
        } else {
            BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null);
            DatumWriter<Object> writer;
            if (input instanceof SpecificRecord) {
                writer = new SpecificDatumWriter<>(schema);
            } else {
                writer = new GenericDatumWriter<>(schema);
            }
            writer.write(input, encoder);
            encoder.flush();
        }
    } catch (IOException e) {
        throw new AvroRetryableException("Error serializing Avro message", e);
    } catch (RuntimeException e) {
        // avro serialization can throw AvroRuntimeException, NullPointerException,
        // ClreplacedCastException, etc
        throw new AvroException("Error serializing Avro message", e);
    }
}

15 Source : Generate19TestResources.java
with BSD 2-Clause "Simplified" License
from linkedin

public static void main(String[] args) {
    if (args == null || args.length != 1) {
        System.err.println("exactly single argument required - output path. instead got " + Arrays.toString(args));
        System.exit(1);
    }
    Path outputRoot = Paths.get(args[0].trim()).toAbsolutePath();
    Path by19Root = outputRoot.resolve("by19");
    by19.RecordWithUnion outer = new by19.RecordWithUnion();
    outer.setF(new by19.InnerUnionRecord());
    outer.getF().setF(19);
    try {
        SpecificDatumWriter<by19.RecordWithUnion> writer = new SpecificDatumWriter<>(outer.getSchema());
        Path binaryRecordWithUnion = TestUtil.getNewFile(by19Root, "RecordWithUnion.binary");
        BinaryEncoder binaryEnc = EncoderFactory.get().binaryEncoder(Files.newOutputStream(binaryRecordWithUnion), null);
        Path jsonRecordWithUnion = TestUtil.getNewFile(by19Root, "RecordWithUnion.json");
        JsonEncoder jsonEnc = EncoderFactory.get().jsonEncoder(outer.getSchema(), Files.newOutputStream(jsonRecordWithUnion));
        writer.write(outer, binaryEnc);
        binaryEnc.flush();
        writer.write(outer, jsonEnc);
        jsonEnc.flush();
    } catch (Exception e) {
        System.err.println("failed to generate payloads");
        e.printStackTrace(System.err);
        System.exit(1);
    }
}

15 Source : Generate18TestResources.java
with BSD 2-Clause "Simplified" License
from linkedin

public static void main(String[] args) {
    if (args == null || args.length != 1) {
        System.err.println("exactly single argument required - output path. instead got " + Arrays.toString(args));
        System.exit(1);
    }
    Path outputRoot = Paths.get(args[0].trim()).toAbsolutePath();
    Path by18Root = outputRoot.resolve("by18");
    by18.RecordWithUnion outer = new by18.RecordWithUnion();
    outer.setF(new by18.InnerUnionRecord());
    outer.getF().setF(18);
    try {
        SpecificDatumWriter<by18.RecordWithUnion> writer = new SpecificDatumWriter<>(outer.getSchema());
        Path binaryRecordWithUnion = TestUtil.getNewFile(by18Root, "RecordWithUnion.binary");
        BinaryEncoder binaryEnc = EncoderFactory.get().binaryEncoder(Files.newOutputStream(binaryRecordWithUnion), null);
        Path jsonRecordWithUnion = TestUtil.getNewFile(by18Root, "RecordWithUnion.json");
        JsonEncoder jsonEnc = EncoderFactory.get().jsonEncoder(outer.getSchema(), Files.newOutputStream(jsonRecordWithUnion));
        writer.write(outer, binaryEnc);
        binaryEnc.flush();
        writer.write(outer, jsonEnc);
        jsonEnc.flush();
    } catch (Exception e) {
        System.err.println("failed to generate payloads");
        e.printStackTrace(System.err);
        System.exit(1);
    }
}

15 Source : Generate17TestResources.java
with BSD 2-Clause "Simplified" License
from linkedin

public static void main(String[] args) {
    if (args == null || args.length != 1) {
        System.err.println("exactly single argument required - output path. instead got " + Arrays.toString(args));
        System.exit(1);
    }
    Path outputRoot = Paths.get(args[0].trim()).toAbsolutePath();
    Path by17Root = outputRoot.resolve("by17");
    by17.RecordWithUnion outer = new by17.RecordWithUnion();
    outer.setF(new by17.InnerUnionRecord());
    outer.getF().setF(17);
    try {
        SpecificDatumWriter<by17.RecordWithUnion> writer = new SpecificDatumWriter<>(outer.getSchema());
        Path binaryRecordWithUnion = TestUtil.getNewFile(by17Root, "RecordWithUnion.binary");
        BinaryEncoder binaryEnc = EncoderFactory.get().binaryEncoder(Files.newOutputStream(binaryRecordWithUnion), null);
        Path jsonRecordWithUnion = TestUtil.getNewFile(by17Root, "RecordWithUnion.json");
        JsonEncoder jsonEnc = EncoderFactory.get().jsonEncoder(outer.getSchema(), Files.newOutputStream(jsonRecordWithUnion));
        writer.write(outer, binaryEnc);
        binaryEnc.flush();
        writer.write(outer, jsonEnc);
        jsonEnc.flush();
    } catch (Exception e) {
        System.err.println("failed to generate payloads");
        e.printStackTrace(System.err);
        System.exit(1);
    }
}

15 Source : Generate16TestResources.java
with BSD 2-Clause "Simplified" License
from linkedin

public static void main(String[] args) {
    if (args == null || args.length != 1) {
        System.err.println("exactly single argument required - output path. instead got " + Arrays.toString(args));
        System.exit(1);
    }
    Path outputRoot = Paths.get(args[0].trim()).toAbsolutePath();
    Path by16Root = outputRoot.resolve("by16");
    by16.RecordWithUnion outer = new by16.RecordWithUnion();
    outer.setF(new by16.InnerUnionRecord());
    outer.getF().setF(16);
    try {
        SpecificDatumWriter<by16.RecordWithUnion> writer = new SpecificDatumWriter<>(outer.getSchema());
        Path binaryRecordWithUnion = TestUtil.getNewFile(by16Root, "RecordWithUnion.binary");
        BinaryEncoder binaryEnc = EncoderFactory.get().binaryEncoder(Files.newOutputStream(binaryRecordWithUnion), null);
        Path jsonRecordWithUnion = TestUtil.getNewFile(by16Root, "RecordWithUnion.json");
        JsonEncoder jsonEnc = EncoderFactory.get().jsonEncoder(outer.getSchema(), Files.newOutputStream(jsonRecordWithUnion));
        writer.write(outer, binaryEnc);
        binaryEnc.flush();
        writer.write(outer, jsonEnc);
        jsonEnc.flush();
    } catch (Exception e) {
        System.err.println("failed to generate payloads");
        e.printStackTrace(System.err);
        System.exit(1);
    }
}

15 Source : Generate15TestResources.java
with BSD 2-Clause "Simplified" License
from linkedin

public static void main(String[] args) {
    if (args == null || args.length != 1) {
        System.err.println("exactly single argument required - output path. instead got " + Arrays.toString(args));
        System.exit(1);
    }
    Path outputRoot = Paths.get(args[0].trim()).toAbsolutePath();
    Path by15Root = outputRoot.resolve("by15");
    by15.RecordWithUnion outer = new by15.RecordWithUnion();
    outer.f = new by15.InnerUnionRecord();
    outer.f.f = 15;
    try {
        SpecificDatumWriter<by15.RecordWithUnion> writer = new SpecificDatumWriter<>(outer.getSchema());
        Path binaryRecordWithUnion = TestUtil.getNewFile(by15Root, "RecordWithUnion.binary");
        BinaryEncoder binaryEnc = EncoderFactory.get().binaryEncoder(Files.newOutputStream(binaryRecordWithUnion), null);
        Path jsonRecordWithUnion = TestUtil.getNewFile(by15Root, "RecordWithUnion.json");
        JsonEncoder jsonEnc = EncoderFactory.get().jsonEncoder(outer.getSchema(), Files.newOutputStream(jsonRecordWithUnion));
        writer.write(outer, binaryEnc);
        binaryEnc.flush();
        writer.write(outer, jsonEnc);
        jsonEnc.flush();
    } catch (Exception e) {
        System.err.println("failed to generate payloads");
        e.printStackTrace(System.err);
        System.exit(1);
    }
}

15 Source : Generate110TestResources.java
with BSD 2-Clause "Simplified" License
from linkedin

public static void main(String[] args) {
    if (args == null || args.length != 1) {
        System.err.println("exactly single argument required - output path. instead got " + Arrays.toString(args));
        System.exit(1);
    }
    Path outputRoot = Paths.get(args[0].trim()).toAbsolutePath();
    Path by110Root = outputRoot.resolve("by110");
    by110.RecordWithUnion outer = new by110.RecordWithUnion();
    outer.setF(new by110.InnerUnionRecord());
    outer.getF().setF(110);
    try {
        SpecificDatumWriter<by110.RecordWithUnion> writer = new SpecificDatumWriter<>(outer.getSchema());
        Path binaryRecordWithUnion = TestUtil.getNewFile(by110Root, "RecordWithUnion.binary");
        BinaryEncoder binaryEnc = EncoderFactory.get().binaryEncoder(Files.newOutputStream(binaryRecordWithUnion), null);
        Path jsonRecordWithUnion = TestUtil.getNewFile(by110Root, "RecordWithUnion.json");
        JsonEncoder jsonEnc = EncoderFactory.get().jsonEncoder(outer.getSchema(), Files.newOutputStream(jsonRecordWithUnion));
        writer.write(outer, binaryEnc);
        binaryEnc.flush();
        writer.write(outer, jsonEnc);
        jsonEnc.flush();
    } catch (Exception e) {
        System.err.println("failed to generate payloads");
        e.printStackTrace(System.err);
        System.exit(1);
    }
}

15 Source : Avro17DefaultValuesCache.java
with BSD 2-Clause "Simplified" License
from linkedin

/**
 * Gets the default value of the given field, if any.
 * @param field the field whose default value should be retrieved.
 * @return the default value replacedociated with the given field,
 * or null if none is specified in the schema.
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static Object getDefaultValue(Schema.Field field, boolean specific) {
    JsonNode json = field.defaultValue();
    if (json == null) {
        throw new AvroRuntimeException("Field " + field + " not set and has no default value");
    }
    if (json.isNull() && (field.schema().getType() == Schema.Type.NULL || (field.schema().getType() == Schema.Type.UNION && field.schema().getTypes().get(0).getType() == Schema.Type.NULL))) {
        return null;
    }
    Map<Schema.Field, Object> cache = specific ? SPECIFIC_CACHED_DEFAULTS : GENERIC_CACHED_DEFAULTS;
    // Check the cache
    Object defaultValue = cache.get(field);
    // If not cached, get the default Java value by encoding the default JSON
    // value and then decoding it:
    if (defaultValue == null)
        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(baos, null);
            ResolvingGrammarGenerator.encode(encoder, field.schema(), json);
            encoder.flush();
            ByteArrayInputStream is = new ByteArrayInputStream(baos.toByteArray());
            BinaryDecoder decoder = DecoderFactory.get().directBinaryDecoder(is, null);
            DatumReader reader;
            if (specific) {
                reader = new SpecificDatumReader(field.schema());
            } else {
                reader = new GenericDatumReader(field.schema());
            }
            defaultValue = reader.read(null, decoder);
            cache.put(field, defaultValue);
        } catch (IOException e) {
            throw new AvroRuntimeException(e);
        }
    return defaultValue;
}

15 Source : Avro15DefaultValuesCache.java
with BSD 2-Clause "Simplified" License
from linkedin

/**
 * Gets the default value of the given field, if any.
 * @param field the field whose default value should be retrieved.
 * @return the default value replacedociated with the given field,
 * or null if none is specified in the schema.
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static Object getDefaultValue(Schema.Field field, boolean specific) {
    JsonNode json = field.defaultValue();
    if (json == null) {
        throw new AvroRuntimeException("Field " + field + " not set and has no default value");
    }
    if (json.isNull() && (field.schema().getType() == Schema.Type.NULL || (field.schema().getType() == Schema.Type.UNION && field.schema().getTypes().get(0).getType() == Schema.Type.NULL))) {
        return null;
    }
    Map<Schema.Field, Object> cache = specific ? SPECIFIC_CACHED_DEFAULTS : GENERIC_CACHED_DEFAULTS;
    // Check the cache
    Object defaultValue = cache.get(field);
    // If not cached, get the default Java value by encoding the default JSON
    // value and then decoding it:
    if (defaultValue == null)
        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(baos, null);
            Avro15ResolvingGrammarGeneratorAccessUtil.encode(encoder, field.schema(), json);
            encoder.flush();
            ByteArrayInputStream is = new ByteArrayInputStream(baos.toByteArray());
            BinaryDecoder decoder = DecoderFactory.get().directBinaryDecoder(is, null);
            DatumReader reader;
            if (specific) {
                reader = new SpecificDatumReader(field.schema());
            } else {
                reader = new GenericDatumReader(field.schema());
            }
            defaultValue = reader.read(null, decoder);
            cache.put(field, defaultValue);
        } catch (IOException e) {
            throw new AvroRuntimeException(e);
        }
    return defaultValue;
}

15 Source : AvroUtils.java
with Apache License 2.0
from jveverka

public static byte[] serializeEmployee(Employee employee) throws IOException {
    DatumWriter<Employee> employeeDatumWriter = new SpecificDatumWriter<>(Employee.clreplaced);
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(os, null);
    employeeDatumWriter.setSchema(Employee.getClreplacedSchema());
    employeeDatumWriter.write(employee, binaryEncoder);
    os.flush();
    return os.toByteArray();
}

14 Source : FastStringableTest.java
with Apache License 2.0
from RTBHOUSE

private <T> Decoder serializeSpecificFast(T data, Schema schema) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null);
    try {
        FastSpecificSerializerGenerator<T> fastSpecificSerializerGenerator = new FastSpecificSerializerGenerator<>(schema, tempDir, clreplacedLoader, null);
        FastSerializer<T> fastSerializer = fastSpecificSerializerGenerator.generateSerializer();
        fastSerializer.serialize(data, binaryEncoder);
        binaryEncoder.flush();
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null);
}

14 Source : FastGenericSerializerGeneratorTest.java
with Apache License 2.0
from RTBHOUSE

private <T> Decoder serializeGenericFast(T data, Schema schema) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null);
    try {
        FastGenericSerializerGenerator<T> fastGenericSerializerGenerator = new FastGenericSerializerGenerator<>(schema, tempDir, clreplacedLoader, null);
        FastSerializer<T> fastSerializer = fastGenericSerializerGenerator.generateSerializer();
        fastSerializer.serialize(data, binaryEncoder);
        binaryEncoder.flush();
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null);
}

14 Source : KafkaValueSerializer.java
with Apache License 2.0
from rayokota

@Override
public byte[] serialize(String topic, NavigableMap<Long, VersionedValue> object) {
    if (object == null) {
        return null;
    }
    try {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        out.write(MAGIC_BYTE);
        out.write(ByteBuffer.allocate(VERSION_SIZE).putInt(version).array());
        BinaryEncoder encoder = encoderFactory.directBinaryEncoder(out, null);
        writer.write(toArray(object), encoder);
        encoder.flush();
        byte[] bytes = out.toByteArray();
        out.close();
        return bytes;
    } catch (IOException | RuntimeException e) {
        // avro serialization can throw AvroRuntimeException, NullPointerException,
        // ClreplacedCastException, etc
        LOG.error("Error serializing Avro value " + e.getMessage());
        throw new SerializationException("Error serializing Avro value", e);
    }
}

14 Source : KafkaAvroPublisher.java
with Apache License 2.0
from pinterest

public void publish(BrokerStats brokerStats) throws IOException {
    try {
        ByteArrayOutputStream stream = new ByteArrayOutputStream();
        BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream, null);
        avroEventWriter.write(brokerStats, binaryEncoder);
        binaryEncoder.flush();
        IOUtils.closeQuietly(stream);
        String key = brokerStats.getName() + "_" + System.currentTimeMillis();
        int numParreplacedions = kafkaProducer.parreplacedionsFor(destTopic).size();
        int parreplacedion = brokerStats.getId() % numParreplacedions;
        Future<RecordMetadata> future = kafkaProducer.send(new ProducerRecord<>(destTopic, parreplacedion, key.getBytes(), stream.toByteArray()));
        future.get();
        OpenTsdbMetricConverter.incr("kafka.stats.collector.success", 1, "host=" + HOSTNAME);
    } catch (Exception e) {
        LOG.error("Failure in publish stats", e);
        OpenTsdbMetricConverter.incr("kafka.stats.collector.failure", 1, "host=" + HOSTNAME);
        throw new RuntimeException("Avro serialization failure", e);
    }
}

14 Source : AvroTestUtils.java
with Apache License 2.0
from ljygz

/**
 * Writes given record using specified schema.
 * @param record record to serialize
 * @param schema schema to use for serialization
 * @return serialized record
 */
public static byte[] writeRecord(GenericRecord record, Schema schema) throws IOException {
    ByteArrayOutputStream stream = new ByteArrayOutputStream();
    BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(stream, null);
    new GenericDatumWriter<>(schema).write(record, encoder);
    encoder.flush();
    return stream.toByteArray();
}

14 Source : AvroEncoderUtil.java
with Apache License 2.0
from apache

public static <T> byte[] encode(T datum, Schema avroSchema) throws IOException {
    try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
        DataOutputStream dataOut = new DataOutputStream(out);
        // Write the magic bytes
        dataOut.write(MAGIC_BYTES);
        // Write avro schema
        dataOut.writeUTF(avroSchema.toString());
        // Encode the datum with avro schema.
        BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
        DatumWriter<T> writer = new GenericAvroWriter<>(avroSchema);
        writer.write(datum, encoder);
        encoder.flush();
        return out.toByteArray();
    }
}

14 Source : AbstractMergeHelper.java
with Apache License 2.0
from apache

protected GenericRecord transformRecordBasedOnNewSchema(GenericDatumReader<GenericRecord> gReader, GenericDatumWriter<GenericRecord> gWriter, ThreadLocal<BinaryEncoder> encoderCache, ThreadLocal<BinaryDecoder> decoderCache, GenericRecord gRec) {
    ByteArrayOutputStream inStream = null;
    try {
        inStream = new ByteArrayOutputStream();
        BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(inStream, encoderCache.get());
        encoderCache.set(encoder);
        gWriter.write(gRec, encoder);
        encoder.flush();
        BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(inStream.toByteArray(), decoderCache.get());
        decoderCache.set(decoder);
        GenericRecord transformedRec = gReader.read(null, decoder);
        return transformedRec;
    } catch (IOException e) {
        throw new HoodieException(e);
    } finally {
        try {
            inStream.close();
        } catch (IOException ioe) {
            throw new HoodieException(ioe.getMessage(), ioe);
        }
    }
}

13 Source : RecordToBigtableConverter.java
with MIT License
from mercari

private static byte[] toBytes(final GenericRecord record) {
    final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(record.getSchema());
    try (final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {
        final BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
        datumWriter.write(record, binaryEncoder);
        binaryEncoder.flush();
        return byteArrayOutputStream.toByteArray();
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

13 Source : Avro14DefaultValuesCache.java
with BSD 2-Clause "Simplified" License
from linkedin

/**
 * Gets the default value of the given field, if any.
 * @param field the field whose default value should be retrieved.
 * @return the default value replacedociated with the given field,
 * or null if none is specified in the schema.
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static Object getDefaultValue(Schema.Field field, boolean specific) {
    JsonNode json = field.defaultValue();
    if (json == null) {
        // avro 1.4 Field has no decent toString()
        String fieldStr = field.name() + " type:" + field.schema().getType() + " pos:" + field.pos();
        throw new AvroRuntimeException("Field " + fieldStr + " not set and has no default value");
    }
    if (json.isNull() && (field.schema().getType() == Schema.Type.NULL || (field.schema().getType() == Schema.Type.UNION && field.schema().getTypes().get(0).getType() == Schema.Type.NULL))) {
        return null;
    }
    Map<Schema.Field, Object> cache = specific ? SPECIFIC_CACHED_DEFAULTS : GENERIC_CACHED_DEFAULTS;
    // Check the cache
    Object defaultValue = cache.get(field);
    // If not cached, get the default Java value by encoding the default JSON
    // value and then decoding it:
    if (defaultValue == null)
        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            BinaryEncoder encoder = new BinaryEncoder(baos);
            Avro14ResolvingGrammarGeneratorAccessUtil.encode(encoder, field.schema(), json);
            encoder.flush();
            BinaryDecoder decoder = DecoderFactory.defaultFactory().createBinaryDecoder(baos.toByteArray(), null);
            DatumReader reader;
            if (specific) {
                reader = new SpecificDatumReader(field.schema());
            } else {
                reader = new GenericDatumReader(field.schema());
            }
            defaultValue = reader.read(null, decoder);
            cache.put(field, defaultValue);
        } catch (IOException e) {
            throw new AvroRuntimeException(e);
        }
    return defaultValue;
}

12 Source : TestAvroConfluentRowDecoder.java
with Apache License 2.0
from trinodb

private static byte[] serializeRecord(Object record, Schema schema, int schemaId) {
    try {
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        outputStream.write(0);
        outputStream.write(ByteBuffer.allocate(4).putInt(schemaId).array());
        BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(outputStream, null);
        GenericDatumWriter<Object> avroRecordWriter = new GenericDatumWriter<>(schema);
        avroRecordWriter.write(record, encoder);
        encoder.flush();
        byte[] serializedRecord = outputStream.toByteArray();
        outputStream.close();
        return serializedRecord;
    } catch (IOException e) {
        throw new UncheckedIOException(e);
    }
}

12 Source : FastSerdeTestsSupport.java
with Apache License 2.0
from RTBHOUSE

public static <T> Decoder serializeGeneric(T data, Schema schema) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null);
    try {
        GenericDatumWriter<T> writer = new GenericDatumWriter<>(schema);
        writer.write(data, binaryEncoder);
        binaryEncoder.flush();
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null);
}

12 Source : FastSerdeTestsSupport.java
with Apache License 2.0
from RTBHOUSE

public static <T> Decoder serializeSpecific(T record, Schema schema) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null);
    try {
        SpecificDatumWriter<T> writer = new SpecificDatumWriter<>(schema);
        writer.write(record, binaryEncoder);
        binaryEncoder.flush();
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null);
}

12 Source : AvroRender.java
with Apache License 2.0
from noear

@Override
public void render(Object obj, Context ctx) throws Throwable {
    // 非序列化处理
    // 
    if (obj == null) {
        return;
    }
    if (obj instanceof Throwable) {
        throw (Throwable) obj;
    }
    if (obj instanceof String) {
        // 不能做为json输出
        ctx.output((String) obj);
        return;
    }
    DatumWriter datumWriter = new SpecificDatumWriter(obj.getClreplaced());
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
    datumWriter.write(obj, encoder);
    String txt = out.toString();
    if (XPluginImp.output_meta) {
        ctx.headerSet("solon.serialization", "AvroRender");
    }
    ctx.attrSet("output", txt);
    ctx.outputAsJson(txt);
}

12 Source : AvroCompatibilityHelperGeneratedFixedClassesTest.java
with BSD 2-Clause "Simplified" License
from linkedin

private void roundtrip(Object thingie) throws Exception {
    Schema schema = SpecificData.get().getSchema(thingie.getClreplaced());
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    BinaryEncoder binaryEncoder = AvroCompatibilityHelper.newBinaryEncoder(os, false, null);
    SpecificDatumWriter<Object> writer = new SpecificDatumWriter<>(schema);
    writer.write(thingie, binaryEncoder);
    binaryEncoder.flush();
    byte[] serialized = os.toByteArray();
    ByteArrayInputStream is = new ByteArrayInputStream(serialized);
    BinaryDecoder binaryDecoder = AvroCompatibilityHelper.newBinaryDecoder(is, false, null);
    SpecificDatumReader<Object> reader = new SpecificDatumReader<>(schema);
    Object deserialize = reader.read(null, binaryDecoder);
    replacedert.replacedertEquals(deserialize, thingie);
}

12 Source : AvroSchemaRegistrySqsAsyncClient.java
with MIT License
from JaidenAshmore

private <T> String serializeObject(final T payload, final Schema schema) {
    final Clreplaced<?> clazz = payload.getClreplaced();
    final DatumWriter<Object> writer = avroSchemaServiceManager.getDatumWriter(clazz, schema);
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
    try {
        writer.write(payload, encoder);
        encoder.flush();
    } catch (IOException ioException) {
        throw new RuntimeException("Error serializing payload", ioException);
    }
    return Base64.getEncoder().encodeToString(baos.toByteArray());
}

10 Source : BinaryAvroDecoderTest.java
with Apache License 2.0
from data-integrations

@Test
public void testBasicFunctionality() throws Exception {
    // Parse schema and validate fields.
    Schema.Parser parser = new Schema.Parser();
    Schema schema = parser.parse(SCHEMA);
    replacedert.replacedertTrue(schema != null);
    List<Schema.Field> fields = schema.getFields();
    replacedert.replacedertEquals(3, fields.size());
    // Create generic rows.
    GenericRecord user1 = new GenericData.Record(schema);
    user1.put("name", "Root");
    user1.put("favorite_number", 8);
    GenericRecord user2 = new GenericData.Record(schema);
    user2.put("name", "Ben");
    user2.put("favorite_number", 7);
    user2.put("favorite_color", "red");
    // Write rows to byte array stream.
    DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
    datumWriter.write(user1, encoder);
    datumWriter.write(user2, encoder);
    encoder.flush();
    out.close();
    byte[] bytes = out.toByteArray();
    BinaryAvroDecoder decoder = new BinaryAvroDecoder(schema);
    List<Row> rows = decoder.decode(bytes);
    replacedert.replacedertEquals(2, rows.size());
    replacedert.replacedertEquals("Root", rows.get(0).getValue("name"));
    replacedert.replacedertEquals("Ben", rows.get(1).getValue("name"));
}

7 Source : SimpleAvroTest.java
with Apache License 2.0
from datafibers-community

public static void main(String[] args) throws InterruptedException {
    Schema.Parser parser = new Schema.Parser();
    Schema schema = parser.parse(USER_SCHEMA);
    GenericRecord user1 = new GenericData.Record(schema);
    user1.put("name", "Alyssa");
    user1.put("symbol", "CHINA");
    user1.put("exchangecode", "TEST");
    try {
        final ByteArrayOutputStream out = new ByteArrayOutputStream();
        GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
        BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(out, null);
        writer.write(user1, encoder);
        encoder.flush();
        GenericDatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(schema);
        BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(out.toByteArray(), null);
        GenericRecord gr = reader.read(null, decoder);
        System.out.println(gr.toString());
        System.out.println("******Get Fields Names");
        List<String> stringList = new ArrayList<String>();
        if (RECORD.equals(schema.getType()) && schema.getFields() != null && !schema.getFields().isEmpty()) {
            for (org.apache.avro.Schema.Field field : schema.getFields()) {
                stringList.add(field.name());
            }
        }
        String[] fieldNames = stringList.toArray(new String[] {});
        for (String element : fieldNames) {
            System.out.println(element);
        }
        System.out.println("******Get Fields Types");
        int fieldsLen = schema.getFields().size();
        Clreplaced<?>[] fieldTypes = new Clreplaced[fieldsLen];
        int index = 0;
        String typeName;
        try {
            if (RECORD.equals(schema.getType()) && schema.getFields() != null && !schema.getFields().isEmpty()) {
                for (org.apache.avro.Schema.Field field : schema.getFields()) {
                    typeName = field.schema().getType().getName().toLowerCase();
                    // Mapping Avro type to Java type - TODO Complex type is not supported yet
                    switch(typeName) {
                        case "boolean":
                        case "string":
                        case "long":
                        case "float":
                            fieldTypes[index] = Clreplaced.forName("java.lang." + StringUtils.capitalize(typeName));
                            break;
                        case "bytes":
                            fieldTypes[index] = Clreplaced.forName("java.util.Byte");
                            break;
                        case "int":
                            fieldTypes[index] = Clreplaced.forName("java.lang.Integer");
                            break;
                        default:
                            fieldTypes[index] = Clreplaced.forName("java.lang." + StringUtils.capitalize(typeName));
                    }
                    index++;
                }
            }
        } catch (ClreplacedNotFoundException cnf) {
            cnf.printStackTrace();
        }
        for (Clreplaced<?> element : fieldTypes) {
            System.out.println(element);
        }
        System.out.println("TestCase_Test Schema Register Client");
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "localhost:9092");
        properties.setProperty("group.id", "consumer_test");
        properties.setProperty("schema.subject", "test-value");
        properties.setProperty("schema.registry", "localhost:8081");
        try {
            Schema schema1 = SchemaRegistryClient.getLatestSchemaFromProperty(properties);
            System.out.println("raw schema1 for name is " + schema1.getField("name"));
            String USER_SCHEMA = "{" + "\"type\":\"record\"," + "\"name\":\"test\"," + "\"fields\":[" + "  { \"name\":\"name\", \"type\":\"string\" }," + "  { \"name\":\"symbol\", \"type\":\"string\" }," + "  { \"name\":\"exchangecode\", \"type\":\"string\" }" + "]}";
            Schema.Parser parser2 = new Schema.Parser();
            Schema schema2 = parser2.parse(USER_SCHEMA);
            System.out.println("raw schema2 for name is " + schema.getField("name"));
        } catch (Exception e) {
            e.printStackTrace();
        }
    } catch (IOException ioe) {
        ioe.printStackTrace();
    }
}

6 Source : TestConvertAvroToJSON.java
with Apache License 2.0
from wangrenlei

@Test
public void testSingleSchemalessAvroMessage() throws IOException {
    final TestRunner runner = TestRunners.newTestRunner(new ConvertAvroToJSON());
    Schema schema = new Schema.Parser().parse(new File("src/test/resources/user.avsc"));
    String stringSchema = schema.toString();
    runner.setProperty(ConvertAvroToJSON.SCHEMA, stringSchema);
    final GenericRecord user1 = new GenericData.Record(schema);
    user1.put("name", "Alyssa");
    user1.put("favorite_number", 256);
    final ByteArrayOutputStream out1 = new ByteArrayOutputStream();
    final BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out1, null);
    final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
    datumWriter.write(user1, encoder);
    encoder.flush();
    out1.flush();
    byte[] test = out1.toByteArray();
    runner.enqueue(test);
    runner.run();
    runner.replacedertAllFlowFilesTransferred(ConvertAvroToJSON.REL_SUCCESS, 1);
    final MockFlowFile out = runner.getFlowFilesForRelationship(ConvertAvroToJSON.REL_SUCCESS).get(0);
    out.replacedertContentEquals("{\"name\": \"Alyssa\", \"favorite_number\": 256, \"favorite_color\": null}");
}

5 Source : TestConvertAvroToJSON.java
with Apache License 2.0
from wangrenlei

@Test
public void testSingleSchemalessAvroMessage_noContainer() throws IOException {
    final TestRunner runner = TestRunners.newTestRunner(new ConvertAvroToJSON());
    runner.setProperty(ConvertAvroToJSON.CONTAINER_OPTIONS, ConvertAvroToJSON.CONTAINER_NONE);
    Schema schema = new Schema.Parser().parse(new File("src/test/resources/user.avsc"));
    String stringSchema = schema.toString();
    runner.setProperty(ConvertAvroToJSON.SCHEMA, stringSchema);
    final GenericRecord user1 = new GenericData.Record(schema);
    user1.put("name", "Alyssa");
    user1.put("favorite_number", 256);
    final ByteArrayOutputStream out1 = new ByteArrayOutputStream();
    final BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out1, null);
    final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
    datumWriter.write(user1, encoder);
    encoder.flush();
    out1.flush();
    byte[] test = out1.toByteArray();
    runner.enqueue(test);
    runner.run();
    runner.replacedertAllFlowFilesTransferred(ConvertAvroToJSON.REL_SUCCESS, 1);
    final MockFlowFile out = runner.getFlowFilesForRelationship(ConvertAvroToJSON.REL_SUCCESS).get(0);
    out.replacedertContentEquals("{\"name\": \"Alyssa\", \"favorite_number\": 256, \"favorite_color\": null}");
}

See More Examples