org.apache.arrow.memory.BufferAllocator

Here are the examples of the java api org.apache.arrow.memory.BufferAllocator taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

459 Examples 7

19 Source : TestArrowFileReader.java
with Apache License 2.0
from dremio

/**
 * Helper method which creates a empty list vector
 */
private static ListVector testEmptyListVector(BufferAllocator allocator) {
    final ListVector vector = new ListVector("emptyListVector", allocator, FieldType.nullable(ArrowType.Null.INSTANCE), null);
    vector.allocateNew();
    return vector;
}

19 Source : TestArrowFileReader.java
with Apache License 2.0
from dremio

/**
 * Helper method which creates a test varchar vector
 */
private static VarCharVector testVarCharVector(BufferAllocator allocator) {
    VarCharVector colVarCharV = new VarCharVector("colVarChar", allocator);
    colVarCharV.allocateNew(500, 5);
    for (int i = 0; i < TEST_VARCHAR_VALUES.size(); i++) {
        if (TEST_VARCHAR_VALUES.get(i) == null) {
            colVarCharV.setNull(i);
        } else {
            colVarCharV.set(i, TEST_VARCHAR_VALUES.get(i).getBytes());
        }
    }
    return colVarCharV;
}

19 Source : TestArrowFileReader.java
with Apache License 2.0
from dremio

public List<RecordBatchHolder> getRecords(ArrowFileReader reader, long start, long limit, BufferAllocator allocator) throws Exception {
    return reader.read(start, limit);
}

19 Source : TestArrowFileReader.java
with Apache License 2.0
from dremio

/**
 * Helper method which creates a test bit vector
 */
private static BitVector testBitVector(BufferAllocator allocator) {
    BitVector colBitV = new BitVector("colBit", allocator);
    colBitV.allocateNew(5);
    for (int i = 0; i < TEST_BIT_VALUES.size(); i++) {
        if (TEST_BIT_VALUES.get(i) == null) {
            colBitV.setNull(i);
        } else {
            colBitV.set(i, TEST_BIT_VALUES.get(i) ? 1 : 0);
        }
    }
    return colBitV;
}

19 Source : BasicClientWithConnection.java
with Apache License 2.0
from dremio

public abstract clreplaced BasicClientWithConnection<T extends EnumLite, HANDSHAKE_SEND extends MessageLite, HANDSHAKE_RESPONSE extends MessageLite> extends BasicClient<T, ServerConnection, HANDSHAKE_SEND, HANDSHAKE_RESPONSE> {

    static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BasicClientWithConnection.clreplaced);

    private BufferAllocator alloc;

    private final String connectionName;

    public BasicClientWithConnection(RpcConfig rpcMapping, BufferAllocator alloc, EventLoopGroup eventLoopGroup, T handshakeType, Clreplaced<HANDSHAKE_RESPONSE> responseClreplaced, Parser<HANDSHAKE_RESPONSE> handshakeParser, String connectionName, Optional<SSLEngineFactory> engineFactory) throws RpcException {
        super(rpcMapping, new ArrowByteBufAllocator(alloc), eventLoopGroup, handshakeType, responseClreplaced, handshakeParser, engineFactory);
        this.alloc = alloc;
        this.connectionName = connectionName;
    }

    @Override
    protected Response handle(ServerConnection connection, int rpcType, byte[] pBody, ByteBuf dBody) throws RpcException {
        return handleReponse((ConnectionThrottle) connection, rpcType, pBody, dBody);
    }

    protected abstract Response handleReponse(ConnectionThrottle throttle, int rpcType, byte[] pBody, ByteBuf dBody) throws RpcException;

    @Override
    public ServerConnection initRemoteConnection(SocketChannel channel) {
        return new ServerConnection(connectionName, channel, alloc);
    }

    public static clreplaced ServerConnection extends RemoteConnection {

        private final BufferAllocator alloc;

        public ServerConnection(String name, SocketChannel channel, BufferAllocator alloc) {
            super(channel, name);
            this.alloc = alloc;
        }

        @Override
        public BufferAllocator getAllocator() {
            return alloc;
        }
    }
}

19 Source : TpchGenerator.java
with Apache License 2.0
from dremio

/**
 * Create a monolithic parreplacedion generator.
 * @param table
 * @param target
 * @param scale
 * @return
 */
public static TpchGenerator singleGenerator(GenerationDefinition.TpchTable table, double scale, BufferAllocator allocator, String... includedColumns) {
    GenerationDefinition def = new GenerationDefinition(scale, Long.MAX_VALUE);
    switch(table) {
        case CUSTOMER:
            return new CustomerGenerator(allocator, def, 1, TpchTable.CUSTOMER, includedColumns);
        case CUSTOMER_LIMITED:
            return new CustomerGenerator(allocator, def, 1, TpchTable.CUSTOMER_LIMITED, includedColumns);
        case REGION:
            return new RegionGenerator(allocator, def, includedColumns);
        case NATION:
            return new NationGenerator(allocator, def, includedColumns);
        default:
            throw new UnsupportedOperationException();
    }
}

19 Source : TestQueriesClerkBase.java
with Apache License 2.0
from dremio

/**
 * Base clreplaced for TestQueriesClerk
 *
 * Owns the test allocator, the root test allocator, rules for creation/destruction of the root test allocator, etc.
 */
public clreplaced TestQueriesClerkBase {

    // a mock-up of a buffer allocator, useful only for counting children
    protected BufferAllocator mockedRootAlloc;

    @Before
    public void setup() {
        replacedertEquals(0, getNumAllocators());
        mockedRootAlloc = new TestAllocator(null, "root", Long.MAX_VALUE);
    }

    @After
    public void teardown() throws Exception {
        AutoCloseables.close(mockedRootAlloc);
        replacedertEquals(0, getNumAllocators());
    }

    // Bare-bones plan fragment: only contains the major and minor fragment IDs
    protected PlanFragmentFull getDummyPlan(UserBitShared.QueryId queryId, int majorFragmentId, int minorFragmentId) {
        ExecProtos.FragmentHandle handle = ExecProtos.FragmentHandle.newBuilder().setQueryId(queryId).setMajorFragmentId(majorFragmentId).build();
        CoordExecRPC.FragmentPriority priority = CoordExecRPC.FragmentPriority.newBuilder().setWorkloadClreplaced(UserBitShared.WorkloadClreplaced.GENERAL).build();
        return new PlanFragmentFull(CoordExecRPC.PlanFragmentMajor.newBuilder().setHandle(handle).setPriority(priority).build(), CoordExecRPC.PlanFragmentMinor.newBuilder().setMinorFragmentId(minorFragmentId).build());
    }

    protected CoordExecRPC.SchedulingInfo getDummySchedulingInfo() {
        return CoordExecRPC.SchedulingInfo.newBuilder().setQueueId("unused").setWorkloadClreplaced(UserBitShared.WorkloadClreplaced.GENERAL).build();
    }

    private static int numAllocators = 0;

    protected static int getNumAllocators() {
        return numAllocators;
    }

    /**
     * Count the number of major fragments running
     * (Note: phases are easy to count. Fragments, on the other hand, are not -- fragments are not tracked within the
     * phase ticket, so counting them is impossible)
     */
    protected void replacedertLivePhasesCount(QueriesClerk clerk, int expectedValue) {
        List<QueryTicket> queryTickets = new ArrayList<>();
        for (WorkloadTicket workloadTicket : clerk.getWorkloadTickets()) {
            queryTickets.addAll(workloadTicket.getActiveQueryTickets());
        }
        int numPhases = 0;
        for (QueryTicket c : queryTickets) {
            replacedertTrue(String.format("Fewer refcounts (%d) than active phase tickets (%d)", c.getNumChildren(), c.getActivePhaseTickets().size()), c.getNumChildren() >= c.getActivePhaseTickets().size());
            numPhases += c.getActivePhaseTickets().size();
        }
        replacedertEquals(expectedValue, numPhases);
    }

    protected QueriesClerk makeClerk(WorkloadTicketDepot ticketDepot) {
        ExecToCoordTunnelCreator mockTunnelCreator = mock(ExecToCoordTunnelCreator.clreplaced);
        ExecToCoordTunnel mockTunnel = mock(ExecToCoordTunnel.clreplaced);
        return new QueriesClerk(ticketDepot);
    }

    public final clreplaced TestAllocator implements BufferAllocator {

        private BufferAllocator parent;

        private String name;

        private long limit;

        private TestAllocator(BufferAllocator parent, String name, long limit) {
            this.parent = parent;
            this.name = name;
            this.limit = limit;
            numAllocators++;
        }

        public BufferAllocator newChildAllocator(String childName, long dummy1, long limit) {
            return newChildAllocator(childName, null, dummy1, limit);
        }

        public BufferAllocator newChildAllocator(String childName, AllocationListener listener, long dummy1, long limit) {
            return new TestAllocator(this, childName, limit);
        }

        public void close() {
            numAllocators--;
        }

        // NB: None of the functions below are implemented
        public ArrowBuf buffer(long var1) {
            throw new UnsupportedOperationException();
        }

        public ArrowBuf buffer(long var1, BufferManager var2) {
            throw new UnsupportedOperationException();
        }

        public long getAllocatedMemory() {
            throw new UnsupportedOperationException();
        }

        public long getLimit() {
            return limit;
        }

        public long getInitReservation() {
            throw new UnsupportedOperationException();
        }

        public void setLimit(long var1) {
            throw new UnsupportedOperationException();
        }

        public long getPeakMemoryAllocation() {
            return 0;
        }

        public long getHeadroom() {
            throw new UnsupportedOperationException();
        }

        @Override
        public BufferAllocator getParentAllocator() {
            throw new UnsupportedOperationException();
        }

        @Override
        public Collection<BufferAllocator> getChildAllocators() {
            throw new UnsupportedOperationException();
        }

        public AllocationReservation newReservation() {
            throw new UnsupportedOperationException();
        }

        public ArrowBuf getEmpty() {
            throw new UnsupportedOperationException();
        }

        public String getName() {
            return name;
        }

        public boolean isOverLimit() {
            throw new UnsupportedOperationException();
        }

        public String toVerboseString() {
            throw new UnsupportedOperationException();
        }

        public void replacedertOpen() {
            throw new UnsupportedOperationException();
        }

        @Override
        public AllocationListener getListener() {
            throw new UnsupportedOperationException();
        }

        @Override
        public void releaseBytes(long size) {
            throw new UnsupportedOperationException();
        }

        @Override
        public boolean forceAllocate(long size) {
            throw new UnsupportedOperationException();
        }

        @Override
        public BufferAllocator getRoot() {
            return this;
        }
    }

    /**
     * Used as a callback to the clerk's "build and start query" -- whose only job in this test is to actually
     * get the query ticket
     */
    public final clreplaced QueryTicketGetter implements QueryStarter, AutoCloseable {

        QueryTicket obtainedTicket = null;

        @Override
        public void buildAndStartQuery(QueryTicket ticket) {
            obtainedTicket = ticket;
        }

        @Override
        public void unableToBuildQuery(Exception e) {
            fail(String.format("Unable to build query. Received exception: %s", e.toString()));
        }

        @Override
        public void close() {
            if (obtainedTicket != null) {
                obtainedTicket.release();
            }
        }

        public QueryTicket getObtainedTicket() {
            return obtainedTicket;
        }
    }
}

19 Source : CustomHashAggDataGenerator.java
with Apache License 2.0
from dremio

private void InternalInit(int numRows, BufferAllocator allocator, final boolean largeVarChars) {
    this.numRows = numRows;
    this.batches = numRows / BATCH_SIZE;
    this.largeVarChars = largeVarChars;
    createBigSchemaAndInputContainer(allocator);
    buildInputTableDataAndResultset();
}

19 Source : TestSpillingHashAgg.java
with Apache License 2.0
from dremio

@Test
public void testCloseWithoutSetup() throws Exception {
    final HashAggregate agg = getHashAggregate(1_000_000, 12_000_000);
    SabotContext context = mock(SabotContext.clreplaced);
    try (BufferAllocator allocator = allocatorRule.newAllocator("test-spilling-hashagg", 0, Long.MAX_VALUE)) {
        when(context.getAllocator()).thenReturn(allocator);
        OptionManager optionManager = mock(OptionManager.clreplaced);
        when(optionManager.getOptionValidatorListing()).thenReturn(mock(OptionValidatorListing.clreplaced));
        try (BufferAllocator alloc = context.getAllocator().newChildAllocator("sample-alloc", 0, Long.MAX_VALUE);
            OperatorContextImpl operatorContext = new OperatorContextImpl(context.getConfig(), alloc, optionManager, 1000);
            final VectorizedHashAggOperator op = new VectorizedHashAggOperator(agg, operatorContext)) {
        }
    }
}

19 Source : RuntimeFilterTestUtils.java
with Apache License 2.0
from dremio

/**
 * Test utilities
 */
public clreplaced RuntimeFilterTestUtils {

    private BufferAllocator testAllocator;

    public RuntimeFilterTestUtils(BufferAllocator testAllocator) {
        this.testAllocator = testAllocator;
    }

    public OutOfBandMessage newOOB(int sendingMinorFragment, List<String> parreplacedionCols, ArrowBuf bloomFilterBuf, ValueListFilter... nonParreplacedionColFilters) {
        List<Integer> allFragments = Lists.newArrayList(1, 2, 3, 4);
        allFragments.removeIf(val -> val == sendingMinorFragment);
        ExecProtos.RuntimeFilter.Builder runtimeFilter = ExecProtos.RuntimeFilter.newBuilder().setProbeScanOperatorId(101).setProbeScanMajorFragmentId(1);
        List<ArrowBuf> bufsToMerge = new ArrayList<>(nonParreplacedionColFilters.length + 1);
        if (!parreplacedionCols.isEmpty()) {
            ExecProtos.CompositeColumnFilter parreplacedionColFilter = ExecProtos.CompositeColumnFilter.newBuilder().setFilterType(ExecProtos.RuntimeFilterType.BLOOM_FILTER).setSizeBytes(64).setValueCount(0).addAllColumns(parreplacedionCols).build();
            runtimeFilter.setParreplacedionColumnFilter(parreplacedionColFilter);
            bloomFilterBuf.readerIndex(0);
            bloomFilterBuf.writerIndex(64);
            bufsToMerge.add(bloomFilterBuf);
        }
        if (nonParreplacedionColFilters.length > 0) {
            for (ValueListFilter vlf : nonParreplacedionColFilters) {
                ExecProtos.CompositeColumnFilter nonParreplacedionColFilter = ExecProtos.CompositeColumnFilter.newBuilder().setFilterType(ExecProtos.RuntimeFilterType.VALUE_LIST).setSizeBytes(vlf.getSizeInBytes()).setValueCount(vlf.getValueCount()).addColumns(vlf.getFieldName()).build();
                runtimeFilter.addNonParreplacedionColumnFilter(nonParreplacedionColFilter);
                bufsToMerge.add(vlf.buf());
            }
        }
        ArrowBuf mergedBuf = getMergedBuf(bufsToMerge);
        OutOfBandMessage msg = new OutOfBandMessage(null, 1, allFragments, 101, 1, sendingMinorFragment, 101, new OutOfBandMessage.Payload(runtimeFilter.build()), new ArrowBuf[] { mergedBuf }, true);
        // Compensate for retain in this constructor
        msg.getBuffers()[0].close();
        return msg;
    }

    public ValueListFilter prepareNewValueListFilter(String fieldName, boolean insertNull, int... values) throws Exception {
        try (ValueListFilterBuilder valueListFilterBuilder = new ValueListFilterBuilder(testAllocator, 1024, (byte) 4, false)) {
            valueListFilterBuilder.setup();
            valueListFilterBuilder.setFieldType(Types.MinorType.INT);
            valueListFilterBuilder.setName(fieldName);
            valueListFilterBuilder.setFieldName(fieldName);
            try (ArrowBuf keyBuf = testAllocator.buffer(4)) {
                for (int val : values) {
                    keyBuf.setInt(0, val);
                    valueListFilterBuilder.insert(keyBuf);
                }
            }
            if (insertNull) {
                valueListFilterBuilder.insertNull();
            }
            return valueListFilterBuilder.build();
        }
    }

    public ValueListFilter prepareNewValueListBooleanFilter(String fieldName, boolean insertNull, boolean insertFalse, boolean insertTrue) throws Exception {
        try (ValueListFilterBuilder valueListFilterBuilder = new ValueListFilterBuilder(testAllocator, 31, (byte) 0, true)) {
            valueListFilterBuilder.setup();
            valueListFilterBuilder.setFieldType(Types.MinorType.BIT);
            valueListFilterBuilder.setName(fieldName);
            valueListFilterBuilder.setFieldName(fieldName);
            if (insertNull) {
                valueListFilterBuilder.insertNull();
            }
            if (insertFalse) {
                valueListFilterBuilder.insertBooleanVal(false);
            }
            if (insertTrue) {
                valueListFilterBuilder.insertBooleanVal(true);
            }
            return valueListFilterBuilder.build();
        }
    }

    public ArrowBuf getMergedBuf(List<ArrowBuf> bufs) {
        long neededSize = bufs.stream().map(ArrowBuf::writerIndex).reduce(0L, Long::sum);
        ArrowBuf mergedBuf = testAllocator.buffer(neededSize);
        long runningIdx = 0;
        for (ArrowBuf buf : bufs) {
            int len = (int) buf.writerIndex();
            Copier.copy(buf.memoryAddress(), mergedBuf.memoryAddress() + runningIdx, len);
            runningIdx += len;
            buf.close();
        }
        return mergedBuf;
    }

    public List<Integer> getValues(ValueListFilter valueListFilter) {
        List<Integer> vals = new ArrayList<>(valueListFilter.getValueCount());
        ArrowBuf valueBuf = valueListFilter.valOnlyBuf();
        for (int i = 0; i < valueListFilter.getValueCount(); i++) {
            vals.add(valueBuf.getInt(i * valueListFilter.getBlockSize()));
        }
        return vals;
    }
}

19 Source : RuntimeFilterManagerTest.java
with Apache License 2.0
from dremio

/**
 * Tests for {@link RuntimeFilterManager}
 */
public clreplaced RuntimeFilterManagerTest {

    private static final int MAX_VALS = 1000;

    private BufferAllocator allocator = new RootAllocator();

    private final int majorFragment1 = 1;

    private final int opId1 = 101;

    private final int majorFragment2 = 2;

    private final int opId2 = 202;

    @Test
    public void testParreplacedionColMergeSingleProbe() {
        RuntimeFilterManager filterManager = new RuntimeFilterManager(allocator, MAX_VALS, Sets.newHashSet(1, 2, 3));
        RuntimeFilter frMinor1Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), EMPTY_LIST);
        BloomFilter bf = mockedBloom();
        RuntimeFilterManagerEntry entry1 = filterManager.coalesce(frMinor1Target1, Optional.of(bf), EMPTY_LIST, 1);
        replacedertFalse(entry1.isComplete());
        replacedertEquals(Sets.newHashSet(2, 3), entry1.getRemainingMinorFragments());
        RuntimeFilter frMinor2Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), EMPTY_LIST);
        RuntimeFilterManagerEntry entry2 = filterManager.coalesce(frMinor2Target1, Optional.of(mockedBloom()), EMPTY_LIST, 2);
        replacedertFalse(entry2.isComplete());
        replacedertEquals(entry1, entry2);
        replacedertEquals(Sets.newHashSet(3), entry2.getRemainingMinorFragments());
        RuntimeFilter frMinor3Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), EMPTY_LIST);
        RuntimeFilterManagerEntry entry3 = filterManager.coalesce(frMinor3Target1, Optional.of(mockedBloom()), EMPTY_LIST, 3);
        replacedertTrue(entry1.isComplete());
        replacedertFalse(entry1.isDropped());
        replacedertEquals(entry1, entry3);
        verify(bf, times(2)).merge(any(BloomFilter.clreplaced));
        verify(bf, times(2)).isCrossingMaxFPP();
    }

    @Test
    public void testNonParreplacedionColMergeSingleProbe() throws Exception {
        RuntimeFilterManager filterManager = new RuntimeFilterManager(allocator, MAX_VALS, Sets.newHashSet(1, 2, 3));
        RuntimeFilter frMinor1Target1 = newFilter(opId1, majorFragment1, EMPTY_LIST, Lists.newArrayList("col1", "col2"));
        ValueListFilter vA1 = newValListFilter("col1", Lists.newArrayList(4, 1, 3, 2));
        ValueListFilter vA2 = newValListFilter("col2", Lists.newArrayList(11, 12, 13, 14));
        RuntimeFilterManagerEntry entry1 = filterManager.coalesce(frMinor1Target1, Optional.empty(), Lists.newArrayList(vA1, vA2), 1);
        replacedertFalse(entry1.isComplete());
        replacedertFalse(entry1.isDropped());
        replacedertEquals(Sets.newHashSet(2, 3), entry1.getRemainingMinorFragments());
        RuntimeFilter frMinor2Target1 = newFilter(opId1, majorFragment1, EMPTY_LIST, Lists.newArrayList("col1", "col2"));
        ValueListFilter vB1 = newValListFilter("col1", Lists.newArrayList(6, 4, 5, 3));
        ValueListFilter vB2 = newValListFilter("col2", Lists.newArrayList(15, 16, 17, 18));
        RuntimeFilterManagerEntry entry2 = filterManager.coalesce(frMinor2Target1, Optional.empty(), Lists.newArrayList(vB1, vB2), 2);
        replacedertFalse(entry2.isComplete());
        replacedertFalse(entry1.isDropped());
        replacedertEquals(entry1, entry2);
        replacedertEquals(Sets.newHashSet(3), entry2.getRemainingMinorFragments());
        RuntimeFilter frMinor3Target1 = newFilter(opId1, majorFragment1, EMPTY_LIST, Lists.newArrayList("col1", "col2"));
        ValueListFilter vC1 = newValListFilter("col1", Lists.newArrayList(6, 9, 8, 3));
        ValueListFilter vC2 = newValListFilter("col2", Lists.newArrayList(19, 20, 21, 22));
        RuntimeFilterManagerEntry entry3 = filterManager.coalesce(frMinor3Target1, Optional.empty(), Lists.newArrayList(vC1, vC2), 3);
        replacedertTrue(entry1.isComplete());
        replacedertFalse(entry1.isDropped());
        replacedertEquals(entry1, entry3);
        ValueListFilter vR1 = entry1.getNonParreplacedionColFilter("col1");
        List<Integer> expectedVR1 = Lists.newArrayList(1, 2, 3, 4, 5, 6, 8, 9);
        replacedertEquals(expectedVR1.size(), vR1.getValueCount());
        List<Integer> actualList1 = new ArrayList<>(vR1.getValueCount());
        IntStream.range(0, vR1.getValueCount()).forEach(i -> actualList1.add(vR1.valOnlyBuf().getInt(i * 4)));
        replacedertEquals(expectedVR1, actualList1);
        ValueListFilter vR2 = entry1.getNonParreplacedionColFilter("col2");
        List<Integer> expectedVR2 = IntStream.range(11, 23).mapToObj(i -> i).collect(Collectors.toList());
        replacedertEquals(expectedVR2.size(), vR2.getValueCount());
        List<Integer> actualList2 = new ArrayList<>(vR1.getValueCount());
        IntStream.range(0, vR2.getValueCount()).forEach(i -> actualList2.add(vR2.valOnlyBuf().getInt(i * 4)));
        replacedertEquals(expectedVR2, actualList2);
        // No change to incoming buffers refcount
        replacedertEquals(1, vA1.buf().refCnt());
        replacedertEquals(1, vA2.buf().refCnt());
        replacedertEquals(1, vB1.buf().refCnt());
        replacedertEquals(1, vB2.buf().refCnt());
        replacedertEquals(1, vC1.buf().refCnt());
        replacedertEquals(1, vC2.buf().refCnt());
        // Retain the base buffers
        replacedertEquals(1, vR1.buf().refCnt());
        replacedertEquals(1, vR2.buf().refCnt());
        AutoCloseables.close(vA1, vA2, vB1, vB2, vC1, vC2);
        AutoCloseables.close(entry1);
    }

    @Test
    public void testMultiCategoryColMerge() throws Exception {
        RuntimeFilterManager filterManager = new RuntimeFilterManager(allocator, MAX_VALS, Sets.newHashSet(1, 2, 3));
        RuntimeFilter frMinor1Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), Lists.newArrayList("col3", "col4"));
        BloomFilter bf = mockedBloom();
        when(bf.getNumBitsSet()).thenReturn(10L);
        ValueListFilter vA1 = newValListFilter("col3", Lists.newArrayList(4, 1, 3, 2));
        ValueListFilter vA2 = newValListFilter("col4", Lists.newArrayList(11, 12, 13, 14));
        RuntimeFilterManagerEntry entry1 = filterManager.coalesce(frMinor1Target1, Optional.of(bf), Lists.newArrayList(vA1, vA2), 1);
        replacedertFalse(entry1.isComplete());
        replacedertEquals(Sets.newHashSet(2, 3), entry1.getRemainingMinorFragments());
        RuntimeFilter frMinor2Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), Lists.newArrayList("col3", "col4"));
        ValueListFilter vB1 = newValListFilter("col3", Lists.newArrayList(6, 4, 5, 3));
        ValueListFilter vB2 = newValListFilter("col4", Lists.newArrayList(15, 16, 17, 18));
        RuntimeFilterManagerEntry entry2 = filterManager.coalesce(frMinor2Target1, Optional.of(bf), Lists.newArrayList(vB1, vB2), 2);
        replacedertFalse(entry2.isComplete());
        replacedertEquals(entry1, entry2);
        replacedertEquals(Sets.newHashSet(3), entry2.getRemainingMinorFragments());
        RuntimeFilter frMinor3Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), Lists.newArrayList("col3", "col4"));
        ValueListFilter vC1 = newValListFilter("col3", Lists.newArrayList(6, 9, 8, 3));
        ValueListFilter vC2 = newValListFilter("col4", Lists.newArrayList(19, 20, 21, 22));
        RuntimeFilterManagerEntry entry3 = filterManager.coalesce(frMinor3Target1, Optional.of(bf), Lists.newArrayList(vC1, vC2), 3);
        replacedertTrue(entry1.isComplete());
        replacedertFalse(entry1.isDropped());
        replacedertEquals(entry1, entry3);
        verify(bf, times(2)).merge(any(BloomFilter.clreplaced));
        verify(bf, times(2)).isCrossingMaxFPP();
        ValueListFilter vR1 = entry1.getNonParreplacedionColFilter("col3");
        List<Integer> expectedVR1 = Lists.newArrayList(1, 2, 3, 4, 5, 6, 8, 9);
        replacedertEquals(expectedVR1.size(), vR1.getValueCount());
        List<Integer> actualList1 = new ArrayList<>(vR1.getValueCount());
        IntStream.range(0, vR1.getValueCount()).forEach(i -> actualList1.add(vR1.valOnlyBuf().getInt(i * 4)));
        replacedertEquals(expectedVR1, actualList1);
        ValueListFilter vR2 = entry1.getNonParreplacedionColFilter("col4");
        List<Integer> expectedVR2 = IntStream.range(11, 23).mapToObj(i -> i).collect(Collectors.toList());
        replacedertEquals(expectedVR2.size(), vR2.getValueCount());
        List<Integer> actualList2 = new ArrayList<>(vR1.getValueCount());
        IntStream.range(0, vR2.getValueCount()).forEach(i -> actualList2.add(vR2.valOnlyBuf().getInt(i * 4)));
        replacedertEquals(expectedVR2, actualList2);
        AutoCloseables.close(vR1, vR2);
        RuntimeFilter protoFilter = entry1.getCompositeFilter();
        replacedertEquals(entry1.getParreplacedionColFilter().getNumBitsSet(), protoFilter.getParreplacedionColumnFilter().getValueCount());
        replacedertEquals(entry1.getNonParreplacedionColFilter("col3").getValueCount(), protoFilter.getNonParreplacedionColumnFilter(0).getValueCount());
        replacedertEquals(entry1.getNonParreplacedionColFilter("col3").getSizeInBytes(), protoFilter.getNonParreplacedionColumnFilter(0).getSizeBytes());
        replacedertEquals(entry1.getNonParreplacedionColFilter("col4").getValueCount(), protoFilter.getNonParreplacedionColumnFilter(1).getValueCount());
        replacedertEquals(entry1.getNonParreplacedionColFilter("col4").getSizeInBytes(), protoFilter.getNonParreplacedionColumnFilter(1).getSizeBytes());
    }

    @Test
    public void testFilterMergeMultiProbeTargets() throws Exception {
        RuntimeFilterManager filterManager = new RuntimeFilterManager(allocator, MAX_VALS, Sets.newHashSet(1, 2));
        BloomFilter bf1 = mockedBloom();
        RuntimeFilter frMinor1Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("colT1"), EMPTY_LIST);
        RuntimeFilterManagerEntry entry1 = filterManager.coalesce(frMinor1Target1, Optional.of(bf1), EMPTY_LIST, 1);
        BloomFilter bf2 = mockedBloom();
        RuntimeFilter frMinor1Target2 = newFilter(opId2, majorFragment2, Lists.newArrayList("colT2"), EMPTY_LIST);
        RuntimeFilterManagerEntry entry2 = filterManager.coalesce(frMinor1Target2, Optional.of(bf2), EMPTY_LIST, 1);
        replacedertNotEquals(entry1, entry2);
        RuntimeFilter frMinor2Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("colT1"), EMPTY_LIST);
        RuntimeFilterManagerEntry entry3 = filterManager.coalesce(frMinor2Target1, Optional.of(mockedBloom()), EMPTY_LIST, 2);
        replacedertEquals(entry1, entry3);
        replacedertTrue(entry1.isComplete());
        replacedertFalse(entry1.isDropped());
        replacedertFalse(entry2.isComplete());
        RuntimeFilter frMinor2Target2 = newFilter(opId2, majorFragment2, Lists.newArrayList("colT2"), EMPTY_LIST);
        RuntimeFilterManagerEntry entry4 = filterManager.coalesce(frMinor2Target2, Optional.of(mockedBloom()), EMPTY_LIST, 2);
        replacedertEquals(entry2, entry4);
        replacedertTrue(entry2.isComplete());
        replacedertFalse(entry2.isDropped());
        verify(bf1, times(1)).merge(any(BloomFilter.clreplaced));
        verify(bf1, times(1)).isCrossingMaxFPP();
        verify(bf2, times(1)).merge(any(BloomFilter.clreplaced));
        verify(bf2, times(1)).isCrossingMaxFPP();
    }

    @Test
    public void testRuntimeFilterDropCapReached() throws Exception {
        RuntimeFilterManager filterManager = new RuntimeFilterManager(allocator, MAX_VALS, Sets.newHashSet(1, 2, 3));
        RuntimeFilter frMinor1Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), Lists.newArrayList("col3", "col4"));
        BloomFilter bf = mockedBloom();
        ValueListFilter vA1 = newValListFilter("col3", Lists.newArrayList(4, 1, 3, 2));
        ValueListFilter vA2 = newValListFilter("col4", Lists.newArrayList(11, 12, 13, 14));
        RuntimeFilterManagerEntry entry1 = filterManager.coalesce(frMinor1Target1, Optional.of(bf), Lists.newArrayList(vA1, vA2), 1);
        replacedertFalse(entry1.isComplete());
        replacedertEquals(Sets.newHashSet(2, 3), entry1.getRemainingMinorFragments());
        when(bf.isCrossingMaxFPP()).thenReturn(true);
        RuntimeFilter frMinor2Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), Lists.newArrayList("col3", "col4"));
        ValueListFilter vB1 = newValListFilter("col3", Lists.newArrayList(6, 4, 5, 3));
        ValueListFilter vB2 = newValListFilter("col4", Lists.newArrayList(15, 16, 17, 18));
        RuntimeFilterManagerEntry entry2 = filterManager.coalesce(frMinor2Target1, Optional.of(bf), Lists.newArrayList(vB1, vB2), 2);
        replacedertFalse(entry2.isComplete());
        replacedertEquals(entry1, entry2);
        replacedertFalse(entry1.isDropped());
        replacedertEquals(Sets.newHashSet(3), entry2.getRemainingMinorFragments());
        RuntimeFilter frMinor3Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), Lists.newArrayList("col3", "col4"));
        ValueListFilter vC1 = newValListFilter("col3", IntStream.range(0, 10_000).mapToObj(i -> i).collect(Collectors.toList()));
        ValueListFilter vC2 = newValListFilter("col4", IntStream.range(0, 10_000).mapToObj(i -> i).collect(Collectors.toList()));
        RuntimeFilterManagerEntry entry3 = filterManager.coalesce(frMinor3Target1, Optional.of(bf), Lists.newArrayList(vC1, vC2), 3);
        replacedertTrue(entry3.isComplete());
        replacedertTrue(entry3.isDropped());
    }

    @Test
    public void testPartialFilterDrop() throws Exception {
        RuntimeFilterManager filterManager = new RuntimeFilterManager(allocator, MAX_VALS, Sets.newHashSet(1, 2, 3));
        RuntimeFilter frMinor1Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), Lists.newArrayList("col3", "col4"));
        BloomFilter bf = mockedBloom();
        ValueListFilter vA1 = newValListFilter("col3", Lists.newArrayList(4, 1, 3, 2));
        ValueListFilter vA2 = newValListFilter("col4", Lists.newArrayList(11, 12, 13, 14));
        RuntimeFilterManagerEntry entry1 = filterManager.coalesce(frMinor1Target1, Optional.of(bf), Lists.newArrayList(vA1, vA2), 1);
        replacedertFalse(entry1.isComplete());
        replacedertEquals(Sets.newHashSet(2, 3), entry1.getRemainingMinorFragments());
        when(bf.isCrossingMaxFPP()).thenReturn(true);
        RuntimeFilter frMinor2Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), Lists.newArrayList("col3", "col4"));
        ValueListFilter vB1 = newValListFilter("col3", Lists.newArrayList(6, 4, 5, 3));
        ValueListFilter vB2 = newValListFilter("col4", Lists.newArrayList(15, 16, 17, 18));
        RuntimeFilterManagerEntry entry2 = filterManager.coalesce(frMinor2Target1, Optional.of(bf), Lists.newArrayList(vB1, vB2), 2);
        replacedertFalse(entry2.isComplete());
        replacedertEquals(entry1, entry2);
        replacedertFalse(entry1.isDropped());
        replacedertEquals(Sets.newHashSet(3), entry2.getRemainingMinorFragments());
        RuntimeFilter frMinor3Target1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), Lists.newArrayList("col3", "col4"));
        ValueListFilter vC1 = newValListFilter("col3", IntStream.range(0, 10_000).mapToObj(i -> i).collect(Collectors.toList()));
        ValueListFilter vC2 = newValListFilter("col4", IntStream.range(0, 10).mapToObj(i -> i).collect(Collectors.toList()));
        RuntimeFilterManagerEntry entry3 = filterManager.coalesce(frMinor3Target1, Optional.of(bf), Lists.newArrayList(vC1, vC2), 3);
        replacedertTrue(entry3.isComplete());
        replacedertFalse(entry3.isDropped());
        replacedertNull(entry1.getNonParreplacedionColFilter("col3"));
        replacedertNotNull(entry1.getNonParreplacedionColFilter("col4"));
        replacedertNull(entry1.getParreplacedionColFilter());
        RuntimeFilter filterMsg = entry1.getCompositeFilter();
        replacedertFalse(filterMsg.hasParreplacedionColumnFilter());
        replacedertTrue(filterMsg.getNonParreplacedionColumnFilterList().size() == 1);
        replacedertEquals("col4", filterMsg.getNonParreplacedionColumnFilterList().get(0).getColumns(0));
        filterManager.close();
    }

    @Test
    public void testFilterDropDueToFppTolerance() throws Exception {
        RuntimeFilterManager filterManager = new RuntimeFilterManager(allocator, MAX_VALS, Sets.newHashSet(1, 2, 3));
        RuntimeFilter filter1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), EMPTY_LIST);
        BloomFilter bf = mockedBloom();
        when(bf.isCrossingMaxFPP()).thenReturn(true);
        RuntimeFilterManagerEntry entry1 = filterManager.coalesce(filter1, Optional.of(bf), EMPTY_LIST, 1);
        RuntimeFilter filter2 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), EMPTY_LIST);
        RuntimeFilterManagerEntry entry2 = filterManager.coalesce(filter2, Optional.of(mockedBloom()), EMPTY_LIST, 2);
        replacedertTrue(entry1.isDropped());
        replacedertEquals(entry1, entry2);
        verify(bf, times(1)).isCrossingMaxFPP();
        filterManager.remove(entry1);
        replacedertEquals(1, filterManager.getFilterDropCount());
    }

    @Test
    public void testDropFilterDueToMergeFailure() throws Exception {
        RuntimeFilterManager filterManager = new RuntimeFilterManager(allocator, MAX_VALS, Sets.newHashSet(1, 2, 3));
        RuntimeFilter filter1 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), EMPTY_LIST);
        BloomFilter bf = mockedBloom();
        doThrow(new IllegalArgumentException("Incompatible BloomFilter, different hashing technique.")).when(bf).merge(any(BloomFilter.clreplaced));
        RuntimeFilterManagerEntry entry1 = filterManager.coalesce(filter1, Optional.of(bf), EMPTY_LIST, 1);
        RuntimeFilter filter2 = newFilter(opId1, majorFragment1, Lists.newArrayList("col1", "col2"), EMPTY_LIST);
        RuntimeFilterManagerEntry entry2 = filterManager.coalesce(filter2, Optional.of(mockedBloom()), EMPTY_LIST, 2);
        replacedertTrue(entry1.isDropped());
        replacedertEquals(entry1, entry2);
        verify(bf, times(1)).merge(any(BloomFilter.clreplaced));
        filterManager.remove(entry1);
        replacedertEquals(1, filterManager.getFilterDropCount());
    }

    private BloomFilter mockedBloom() {
        BloomFilter bloom = mock(BloomFilter.clreplaced);
        when(bloom.isCrossingMaxFPP()).thenReturn(false);
        doNothing().when(bloom).merge(any(BloomFilter.clreplaced));
        return bloom;
    }

    private RuntimeFilter newFilter(int opId, int majorFragment, List<String> parreplacedionColNames, List<String> nonParreplacedionColNames) {
        CompositeColumnFilter parreplacedionColFilter = CompositeColumnFilter.newBuilder().setFilterType(BLOOM_FILTER).addAllColumns(parreplacedionColNames).setSizeBytes(64).build();
        List<CompositeColumnFilter> nonParreplacedionColFilters = nonParreplacedionColNames.stream().map(f -> {
            CompositeColumnFilter npColFilter = CompositeColumnFilter.newBuilder().setFilterType(VALUE_LIST).addAllColumns(Lists.newArrayList(f)).setSizeBytes(64).build();
            return npColFilter;
        }).collect(Collectors.toList());
        RuntimeFilter runtimeFilter = RuntimeFilter.newBuilder().setProbeScanMajorFragmentId(majorFragment).setProbeScanOperatorId(opId).setParreplacedionColumnFilter(parreplacedionColFilter).addAllNonParreplacedionColumnFilter(nonParreplacedionColFilters).build();
        return runtimeFilter;
    }

    private ValueListFilter newValListFilter(String col1, List<Integer> vals) throws Exception {
        try (ValueListFilterBuilder builder = new ValueListFilterBuilder(allocator, vals.size(), (byte) 4, false);
            ArrowBuf keyBuf = allocator.buffer(4)) {
            builder.setup();
            builder.setName("Test").setFieldType(Types.MinorType.INT, (byte) 0, (byte) 0).setFieldName(col1);
            for (int v : vals) {
                keyBuf.setInt(0, v);
                builder.insert(keyBuf);
            }
            return builder.build();
        }
    }
}

19 Source : ArrowInPlaceMergeSorterTest.java
with Apache License 2.0
from dremio

public clreplaced ArrowInPlaceMergeSorterTest {

    private BufferAllocator testAllocator = new RootAllocator();

    @Test
    public void testSortInts() {
        try (ArrowBuf baseValues = testAllocator.buffer(1024)) {
            Random random = new Random(System.nanoTime());
            Set<Integer> inputVals = IntStream.range(0, 100).mapToObj(i -> random.nextInt()).collect(Collectors.toSet());
            baseValues.writerIndex(0);
            inputVals.forEach(baseValues::writeInt);
            ArrowInPlaceMergeSorter sorter = new ArrowInPlaceMergeSorter(baseValues, 4, ArrowCrossBufComparatorProvider.get4ByteNumComparator());
            sorter.sort(0, inputVals.size());
            IntStream.range(1, inputVals.size()).forEach(i -> replacedertTrue(baseValues.getInt(4 * (i - 1)) <= baseValues.getInt(4 * i)));
        }
    }

    @Test
    public void testSortFloat() {
        try (ArrowBuf baseValues = testAllocator.buffer(1024)) {
            Random random = new Random(System.nanoTime());
            Set<Float> inputVals = IntStream.range(0, 100).mapToObj(i -> random.nextFloat()).collect(Collectors.toSet());
            baseValues.writerIndex(0);
            inputVals.forEach(baseValues::writeFloat);
            ArrowInPlaceMergeSorter sorter = new ArrowInPlaceMergeSorter(baseValues, 4, ArrowCrossBufComparatorProvider.get4ByteNumComparator());
            sorter.sort(0, inputVals.size());
            IntStream.range(1, inputVals.size()).forEach(i -> replacedertTrue(baseValues.getFloat(4 * (i - 1)) <= baseValues.getFloat(4 * i)));
        }
    }

    @Test
    public void testSortDouble() {
        try (ArrowBuf baseValues = testAllocator.buffer(1024)) {
            Random random = new Random(System.nanoTime());
            Set<Double> inputVals = IntStream.range(0, 100).mapToObj(i -> random.nextDouble()).collect(Collectors.toSet());
            baseValues.writerIndex(0);
            inputVals.forEach(baseValues::writeDouble);
            ArrowInPlaceMergeSorter sorter = new ArrowInPlaceMergeSorter(baseValues, 8, ArrowCrossBufComparatorProvider.get8ByteNumComparator());
            sorter.sort(0, inputVals.size());
            IntStream.range(1, inputVals.size()).forEach(i -> replacedertTrue(baseValues.getDouble(8 * (i - 1)) <= baseValues.getDouble(8 * i)));
        }
    }

    @Test
    public void testSortShort() {
        try (ArrowBuf baseValues = testAllocator.buffer(1024)) {
            Random random = new Random(System.nanoTime());
            Set<Short> inputVals = IntStream.range(0, 100).mapToObj(i -> (short) random.nextInt(Short.MAX_VALUE)).collect(Collectors.toSet());
            baseValues.writerIndex(0);
            inputVals.forEach(baseValues::writeShort);
            ArrowInPlaceMergeSorter sorter = new ArrowInPlaceMergeSorter(baseValues, 2, ArrowCrossBufComparatorProvider.get2ByteNumComparator());
            sorter.sort(0, inputVals.size());
            IntStream.range(1, inputVals.size()).forEach(i -> replacedertTrue(baseValues.getShort(2 * (i - 1)) <= baseValues.getShort(2 * i)));
        }
    }

    @Test
    public void testBigInt() {
        try (ArrowBuf baseValues = testAllocator.buffer(2048)) {
            Random random = new Random(System.nanoTime());
            byte[] inputVal = new byte[16];
            List<BigInteger> expectedVals = new ArrayList<>(100);
            for (int i = 0; i < 100; i++) {
                random.nextBytes(inputVal);
                baseValues.setBytes(i * 16, inputVal);
                expectedVals.add(new BigInteger(inputVal));
            }
            Collections.sort(expectedVals);
            ArrowInPlaceMergeSorter sorter = new ArrowInPlaceMergeSorter(baseValues, 16, ArrowCrossBufComparatorProvider.getCustomByteNumComparator(16));
            sorter.sort(0, expectedVals.size());
            List<BigInteger> actualVals = new ArrayList<>(100);
            for (int i = 0; i < 100; i++) {
                baseValues.getBytes(i * 16, inputVal);
                actualVals.add(new BigInteger(inputVal));
            }
            replacedertEquals(expectedVals, actualVals);
        }
    }

    @Test
    public void testSortDecimals() {
        try (ArrowBuf baseValues = testAllocator.buffer(2048)) {
            Random random = new Random(System.nanoTime());
            byte[] inputVal = new byte[16];
            List<BigDecimal> expectedVals = new ArrayList<>(10);
            for (int i = 0; i < 10; i++) {
                random.nextBytes(inputVal);
                BigDecimal val = new BigDecimal(new BigInteger(inputVal), 2);
                expectedVals.add(val);
                baseValues.setBytes(i * 16, DecimalUtils.convertBigDecimalToArrowByteArray(val));
            }
            Collections.sort(expectedVals);
            ArrowInPlaceMergeSorter sorter = new ArrowInPlaceMergeSorter(baseValues, 16, ArrowCrossBufComparatorProvider.getDecimalComparator(16));
            sorter.sort(0, expectedVals.size());
            List<BigDecimal> actualVals = new ArrayList<>(10);
            for (int i = 0; i < 10; i++) {
                byte[] tmp = new byte[16];
                final BigDecimal bigDecimalVal = DecimalUtils.getBigDecimalFromLEBytes(baseValues.memoryAddress() + (i * 16), tmp, 2);
                actualVals.add(bigDecimalVal);
            }
            replacedertEquals(expectedVals, actualVals);
        }
    }

    @Test
    public void testSortString() {
        final int blockSize = 36;
        try (ArrowBuf baseValues = testAllocator.buffer(blockSize * 100)) {
            List<String> inputVals = IntStream.range(0, 100).mapToObj(i -> UUID.randomUUID().toString()).collect(Collectors.toList());
            for (int i = 0; i < inputVals.size(); i++) {
                byte[] b = inputVals.get(i).getBytes(StandardCharsets.UTF_8);
                byte len = (byte) b.length;
                baseValues.setByte(i * blockSize, len);
                baseValues.setBytes((i * blockSize) + 1, b);
            }
            ArrowInPlaceMergeSorter sorter = new ArrowInPlaceMergeSorter(baseValues, blockSize, new ValueListVarWidthFilterComparator(blockSize));
            sorter.sort(0, inputVals.size());
            for (int i = blockSize; i < blockSize * inputVals.size(); i += blockSize) {
                byte[] val1 = new byte[baseValues.getByte(i - blockSize)];
                baseValues.getBytes(i - blockSize + 1, val1);
                byte[] val2 = new byte[baseValues.getByte(i)];
                baseValues.getBytes(i + 1, val2);
                replacedertTrue(new String(val1, StandardCharsets.UTF_8).compareTo(new String(val2, StandardCharsets.UTF_8)) <= 0);
            }
        }
    }

    @Test
    public void testSortSmallStrings() {
        final int blockSize = 16;
        try (ArrowBuf baseValues = testAllocator.buffer(blockSize * 100)) {
            List<String> inputVals = IntStream.range(0, 100).mapToObj(i -> UUID.randomUUID().toString().substring(0, 10)).collect(Collectors.toList());
            for (int i = 0; i < inputVals.size(); i++) {
                byte[] b = inputVals.get(i).getBytes(StandardCharsets.UTF_8);
                byte len = (byte) b.length;
                baseValues.setByte(i * blockSize, len);
                baseValues.setBytes((i * blockSize) + (blockSize - len), b);
            }
            ArrowInPlaceMergeSorter sorter = new ArrowInPlaceMergeSorter(baseValues, blockSize, new ValueListVarWidthFilterComparator(blockSize));
            sorter.sort(0, inputVals.size());
            for (int i = blockSize; i < blockSize * inputVals.size(); i += blockSize) {
                int len1 = baseValues.getByte(i - blockSize);
                byte[] val1 = new byte[len1];
                baseValues.getBytes(i - len1, val1);
                int len2 = baseValues.getByte(i);
                byte[] val2 = new byte[len2];
                baseValues.getBytes(i + blockSize - len2, val2);
                replacedertTrue(new String(val1, StandardCharsets.UTF_8).compareTo(new String(val2, StandardCharsets.UTF_8)) <= 0);
            }
        }
    }
}

19 Source : SplaySorter.java
with Apache License 2.0
from dremio

public SelectionVector4 getFinalSort(BufferAllocator copyTargetAllocator, int targetBatchSize) {
    return treeManager.getFinalSort(copyTargetAllocator, targetBatchSize);
}

19 Source : QuickSorter.java
with Apache License 2.0
from dremio

public SelectionVector4 getFinalSort(BufferAllocator copyTargetAllocator, int targetBatchSize) {
    return quickSorter.getFinalSort(copyTargetAllocator, targetBatchSize);
}

19 Source : ExternalSortTracer.java
with Apache License 2.0
from dremio

public void setDiskRunCopyAllocatorState(final BufferAllocator diskRunCopyAllocator) {
    if (diskRunCopyAllocator == null) {
        diskRunCopyAllocatorState.valid = false;
    } else {
        diskRunCopyAllocatorState.valid = true;
        diskRunCopyAllocatorState.name = diskRunCopyAllocator.getName();
        diskRunCopyAllocatorState.allocatedMemory = diskRunCopyAllocator.getAllocatedMemory();
        diskRunCopyAllocatorState.peakAllocation = diskRunCopyAllocator.getPeakMemoryAllocation();
        diskRunCopyAllocatorState.maxAllowed = diskRunCopyAllocator.getLimit();
        diskRunCopyAllocatorState.headRoom = diskRunCopyAllocator.getHeadroom();
        diskRunCopyAllocatorState.initReservation = diskRunCopyAllocator.getInitReservation();
    }
}

19 Source : ExternalSortTracer.java
with Apache License 2.0
from dremio

public void setSpillCopyAllocatorState(final BufferAllocator spillCopyAllocator) {
    if (spillCopyAllocator == null) {
        spillCopyAllocatorState.valid = false;
    } else {
        spillCopyAllocatorState.valid = true;
        spillCopyAllocatorState.name = spillCopyAllocator.getName();
        spillCopyAllocatorState.allocatedMemory = spillCopyAllocator.getAllocatedMemory();
        spillCopyAllocatorState.peakAllocation = spillCopyAllocator.getPeakMemoryAllocation();
        spillCopyAllocatorState.maxAllowed = spillCopyAllocator.getLimit();
        spillCopyAllocatorState.headRoom = spillCopyAllocator.getHeadroom();
        spillCopyAllocatorState.initReservation = spillCopyAllocator.getInitReservation();
    }
}

19 Source : ExternalSortTracer.java
with Apache License 2.0
from dremio

public void setExternalSortAllocatorState(final BufferAllocator sortAllocator) {
    /* this is the parent allocator preplaceded to ExternalSortOperator as part of
     * operator context. it is then used by MemoryRun and DiskRunManager to create
     * child allocators.
     */
    if (sortAllocator == null) {
        sortAllocatorState.valid = false;
    } else {
        sortAllocatorState.valid = true;
        sortAllocatorState.name = sortAllocator.getName();
        sortAllocatorState.allocatedMemory = sortAllocator.getAllocatedMemory();
        sortAllocatorState.peakAllocation = sortAllocator.getPeakMemoryAllocation();
        sortAllocatorState.maxAllowed = sortAllocator.getLimit();
        sortAllocatorState.headRoom = sortAllocator.getHeadroom();
        sortAllocatorState.initReservation = sortAllocator.getInitReservation();
    }
}

19 Source : IncomingDataBatch.java
with Apache License 2.0
from dremio

/**
 * Check if the batch size is acceptable.
 * throws exception if not acceptable.
 */
public void checkAcceptance(final BufferAllocator allocator) {
    AllocatorUtil.ensureHeadroom(allocator, size());
    sender.increment();
    sender.sendOk();
}

19 Source : ValueListFilterBuilder.java
with Apache License 2.0
from dremio

/**
 * Get quick plain instance for non continuous insertion cases.
 * @param allocator
 * @param blockSize
 * @param maxElements
 * @return
 */
public static ValueListFilter buildPlainInstance(BufferAllocator allocator, byte blockSize, long maxElements, boolean isBoolean) {
    if (isBoolean) {
        checkArgument(blockSize == 0, "Block size should be zero for boolean fields");
    } else {
        checkArgument(blockSize > 0, "Block size should be greater than zero for non-boolean fields");
    }
    long valueBufferSize = isBoolean ? 0 : (maxElements * blockSize);
    long minRequiredCapacity = valueBufferSize + ValueListFilter.META_SIZE;
    ValueListFilter valueListFilter = new ValueListFilter(allocator.buffer(minRequiredCapacity));
    valueListFilter.setBoolField(isBoolean);
    valueListFilter.setBlockSize(blockSize);
    return valueListFilter;
}

19 Source : GlobalDictionaryBuilder.java
with Apache License 2.0
from dremio

public static VectorContainer readDictionary(FileSystem fs, Path dictionaryRootDir, String columnFullPath, BufferAllocator bufferAllocator) throws IOException {
    return readDictionary(fs, dictionaryFilePath(dictionaryRootDir, columnFullPath), bufferAllocator);
}

19 Source : ArrowFileReader.java
with Apache License 2.0
from dremio

public static RecordBatchHolder getEmptyBatch(ArrowFileFooter footer, BufferAllocator allocator) throws IOException {
    return getEmptyBatch(fromBean(footer), allocator);
}

19 Source : DirectWriterCommand.java
with Apache License 2.0
from dremio

private OperatorContextImpl createContext(Writer writer) {
    BufferAllocator allocator = context.getAllocator().newChildAllocator("direct-command", 0, Long.MAX_VALUE);
    final OperatorStats stats = new OperatorStats(new OpProfileDef(0, 0, 0), allocator);
    final OperatorContextImpl oc = new OperatorContextImpl(context.getConfig(), FragmentHandle.newBuilder().setQueryId(context.getQueryId()).setMajorFragmentId(0).setMinorFragmentId(0).build(), writer, allocator, allocator, null, stats, null, null, null, context.getFunctionRegistry(), null, context.getOptions(), null, NodeDebugContextProvider.NOOP, 60000, null, ImmutableList.of(), ImmutableList.of(), null, new EndpointsIndex(), null);
    return oc;
}

19 Source : TestUDFs.java
with Apache License 2.0
from dremio

/**
 * test UDFs
 */
public clreplaced TestUDFs extends BaseTestServer {

    private BufferAllocator allocator;

    @Before
    public void setUp() {
        allocator = getSabotContext().getAllocator().newChildAllocator(getClreplaced().getName(), 0, Long.MAX_VALUE);
    }

    @After
    public void cleanUp() {
        allocator.close();
    }

    @Test
    public void testFormatList() throws Exception {
        String sql = String.format("select %s(b, ',') as a, b from cp.\"json/nested.json\"", FormatList.NAME);
        try (final JobDataFragment result = runQueryAndGetResults(sql)) {
            List<String> actual = new ArrayList<>();
            for (int i = 0; i < result.getReturnedRowCount(); i++) {
                Object a = result.extractValue("a", i);
                Object b = result.extractValue("b", i);
                actual.add(String.format("%s => %s", b, a));
            }
            replacedert.replacedertEquals(Arrays.asList("[\"A\",\"B\",\"C\"] => A,B,C", "[\"D\"] => D", "[\"E\",\"F\"] => E,F", "[] => "), actual);
        }
    }

    private JobDataFragment runQueryAndGetResults(String sql) throws JobNotFoundException {
        return submitJobAndGetData(l(JobsService.clreplaced), JobRequest.newBuilder().setSqlQuery(new SqlQuery(sql, ImmutableList.of("cp"), DEFAULT_USERNAME)).build(), 0, 500, allocator);
    }

    @Test
    public void testFormatListWithWhereWithNull() throws Exception {
        String sql = String.format("select %s(b, ',') as a, b from cp.\"json/nested.json\" where b is null", FormatList.NAME);
        try (final JobDataFragment result = runQueryAndGetResults(sql)) {
            List<String> actual = new ArrayList<>();
            for (int i = 0; i < result.getReturnedRowCount(); i++) {
                Object a = result.extractValue("a", i);
                Object b = result.extractValue("b", i);
                actual.add(String.format("%s => %s", b, a));
            }
            replacedert.replacedertEquals(0, actual.size());
        }
    }

    @Test
    public void testUnionType() throws Exception {
        String sql = "select * from cp.\"json/mixed.json\"";
        try (final JobDataFragment result = runQueryAndGetResults(sql)) {
            List<String> actual = new ArrayList<>();
            for (int i = 0; i < result.getReturnedRowCount(); i++) {
                Object a = result.extractValue("a", i);
                Object b = result.extractValue("b", i);
                String type = result.extractType("a", i).name();
                actual.add(String.format("%s, %s:%s", b, a, type));
            }
            replacedert.replacedertEquals(Arrays.asList("123, abc:TEXT", "123, 123:INTEGER", "123.0, 0.123:FLOAT", "123, {\"foo\":\"bar\"}:MAP", "123, 123:INTEGER", "100.0, 0.123:FLOAT", "0.0, 0.123:FLOAT", "-123.0, 0.123:FLOAT", "0, 0.123:FLOAT"), actual);
        }
    }

    private void validateCleanDataSingleField(String call, String col, String... expected) throws JobNotFoundException {
        // {"a": "abc", "b":"123" }
        // {"a": 123, "b":123 }
        // {"a": 0.123, "b":123.0 }
        // {"a": { "foo" : "bar"}, "b":123 }
        String sql = format("select %s as actual, %s from cp.\"json/mixed.json\"", call, col);
        try (final JobDataFragment result = runQueryAndGetResults(sql)) {
            replacedert.replacedertEquals(expected.length, result.getReturnedRowCount());
            for (int i = 0; i < expected.length; i++) {
                String expRow = expected[i];
                String actual = result.extractString("actual", i);
                replacedert.replacedertEquals(call + " on " + col + "=" + result.extractString(col, i) + " row:" + i, expRow, actual);
            }
        }
    }

    @Test
    public void testCleanDataBoolean() throws Exception {
        validateCleanDataSingleField("clean_data_to_Boolean(d, 1, 0, false)", "a", "true", "false", "true", "false", "true", "true", "true", "false", "false");
    }

    @Test
    public void testCleanDataDefault() throws Exception {
        validateCleanDataSingleField("clean_data_to_TEXT(a, 0, 0, 'blah')", "a", "abc", "blah", "blah", "blah", "blah", "blah", "blah", "blah", "blah");
    }

    @Test
    public void testCleanDataCast() throws Exception {
        validateCleanDataSingleField("clean_data_to_TEXT(a, 1, 1, '')", "a", "abc", "123", "0.123", "{\n  \"foo\" : \"bar\"\n}", "123", "0.123", "0.123", "0.123", "0.123");
    }

    @Test
    public void testCleanDataNull() throws Exception {
        validateCleanDataSingleField("clean_data_to_TEXT(a, 0, 1, '')", "a", "abc", null, null, null, null, null, null, null, null);
    }

    @Test
    public void testIsCleanDataFloat() throws Exception {
        validateCleanDataSingleField("is_convertible_data(c, 1, cast('FLOAT' as VARCHAR))", "c", "true", "true", "true", "true", "true", "true", "true", "true", "true");
    }

    @Test
    public void testConvertToFloatScientificNotation() throws Exception {
        validateCleanDataSingleField("convert_to_FLOAT(c, 1, 1, 0.0)", "c", "3.2E-90", "-5.0E-10", "1.0E7", "3.0E10", "-5.0E-10", "2.0E10", "2.0E7", "-3.0E10", "5.0E-10");
    }

    @Test
    public void testCastScientificNotation() throws Exception {
        validateCleanDataSingleField("cast(c as DOUBLE)", "c", "3.2E-90", "-5.0E-10", "1.0E7", "3.0E10", "-5.0E-10", "2.0E10", "2.0E7", "-3.0E10", "5.0E-10");
    }

    @Test
    public void testCleanDataIntCast() throws Exception {
        validateCleanDataSingleField("clean_data_to_Integer(b, 1, 0, 0)", "b", "123", "123", "123", "123", "123", "100", "0", "-123", "0");
    }

    @Test
    public void testCleanDataIntDefault() throws Exception {
        validateCleanDataSingleField("clean_data_to_Integer(b, 0, 0, 0)", "b", "0", "123", "0", "123", "123", "0", "0", "0", "0");
    }

    @Test
    public void testCleanDataIntDefaultNull() throws Exception {
        validateCleanDataSingleField("clean_data_to_Integer(b, 0, 1, 0)", "b", null, "123", null, "123", "123", null, null, null, "0");
    }

    @Test
    public void testExtractListSingle0() throws Exception {
        validate("select a[0] as a from cp.\"json/extract_list.json\"", "a", "Shopping", "Bars", "Bars");
    }

    @Test
    public void testExtractListSingle4() throws Exception {
        validate("select a[4] as a from cp.\"json/extract_list.json\"", "a", null, "Restaurants", null);
    }

    private void validate(String sql, String col, String... values) throws Exception {
        try (final JobDataFragment result = runQueryAndGetResults(sql)) {
            List<String> actual = new ArrayList<>();
            for (int i = 0; i < result.getReturnedRowCount(); i++) {
                actual.add(result.extractString(col, i));
            }
            replacedert.replacedertEquals(asList(values), actual);
        }
    }

    @Test
    @Ignore("flakey")
    public void testExtractListRange4() throws Exception {
        try (final JobDataFragment result = runQueryAndGetResults("select extract_list_range(a, 2, 1, 3, -1)['root'] as a from cp.\"json/extract_list.json\"")) {
            String column = "a";
            replacedert.replacedertEquals(result.getReturnedRowCount(), 3);
            replacedert.replacedertTrue(result.extractString(column, 0).equals("[]") || result.extractString(column, 0).equals("null"));
            replacedert.replacedertEquals("[\"Nightlife\"]", result.extractString(column, 1));
            replacedert.replacedertTrue(result.extractString(column, 0).equals("[]") || result.extractString(column, 0).equals("null"));
        }
    }

    @Test
    public void testreplacedleCase() throws Exception {
        validate("select replacedle(a) as a from cp.\"json/convert_case.json\"", "a", "Los Angeles", "Los Angeles", "Los Angeles", "Los Angeles", "Los Angeles", "Washington", "Washington");
    }

    @Test
    public void testDremioTypeOfInteger() throws Exception {
        validate("SELECT \"t\" AS \"t2\"" + "FROM cp.\"json/mixed_example.json\" WHERE dremio_type_of(\"t\") = 'INTEGER' LIMIT 5", "t2", "0", "0", "0", "0", "0");
    }

    @Test
    public void testDremioTypeOfText() throws Exception {
        validate("SELECT \"t\" AS \"t2\"" + "FROM cp.\"json/mixed_example.json\" WHERE dremio_type_of(\"t\") = 'TEXT' ORDER BY t2 DESC LIMIT 5", "t2", "zero", "zero", "nan", "nan", "nan");
    }

    @Test
    public void testDremioTypeOfFloat() throws Exception {
        validate("SELECT \"t\" AS \"t2\" " + "FROM cp.\"json/mixed_example.json\" WHERE dremio_type_of(\"t\") = 'FLOAT'", "t2", "0.0", "0.0", "0.0", "1.0", "1.0");
    }

    @Test
    public void testExtractMap() throws Exception {
        String extracted = "{\"close\":\"19:00\",\"open\":\"10:00\"}";
        validate("SELECT map_table.a.Tuesday as t " + "FROM cp.\"json/extract_map.json\" map_table", "t", extracted, extracted, extracted);
    }

    @Test
    public void testExtractMapNested() throws Exception {
        String extracted = "19:00";
        validate("SELECT map_table.a.Tuesday.\"close\" as t " + "FROM cp.\"json/extract_map.json\" map_table", "t", extracted, extracted, extracted);
    }
}

19 Source : TestHistogramGenerator.java
with Apache License 2.0
from dremio

/**
 * Test clreplaced for Histogram Generation
 */
public clreplaced TestHistogramGenerator {

    private BufferAllocator allocator;

    @Before
    public void setUp() {
        allocator = DremioRootAllocator.create(0, Long.MAX_VALUE);
    }

    @After
    public void cleanUp() {
        allocator.close();
    }

    @Test
    public void testTimeIntervals() throws Exception {
        HistogramGenerator hg = new HistogramGenerator(null);
        String myTimeStr = "2016-02-29 13:59:01";
        DateTimeFormatter dtf = DateFunctionsUtils.getISOFormatterForFormatString("YYYY-MM-DD HH24:MI:SS");
        LocalDateTime myTime = dtf.parseLocalDateTime(myTimeStr);
        System.out.println("Exact time: " + myTime + ", Month: " + myTime.getMonthOfYear());
        for (HistogramGenerator.TruncEvalEnum value : HistogramGenerator.TruncEvalEnum.getSortedAscValues()) {
            LocalDateTime roundedDown = hg.roundTime(myTime, value, true);
            LocalDateTime roundedUp = hg.roundTime(myTime, value, false);
            switch(value) {
                case SECOND:
                    replacedertEquals(myTime, roundedDown);
                    replacedertEquals(myTime, roundedUp);
                    break;
                case MINUTE:
                    replacedertEquals(dtf.parseLocalDateTime("2016-02-29 13:59:00"), roundedDown);
                    replacedertEquals(dtf.parseLocalDateTime("2016-02-29 14:00:00"), roundedUp);
                    break;
                case HOUR:
                    replacedertEquals(dtf.parseLocalDateTime("2016-02-29 13:00:00"), roundedDown);
                    replacedertEquals(dtf.parseLocalDateTime("2016-02-29 14:00:00"), roundedUp);
                    break;
                case DAY:
                    replacedertEquals(dtf.parseLocalDateTime("2016-02-29 00:00:00"), roundedDown);
                    replacedertEquals(dtf.parseLocalDateTime("2016-03-01 00:00:00"), roundedUp);
                    break;
                case WEEK:
                    replacedertEquals(dtf.parseLocalDateTime("2016-02-29 00:00:00"), roundedDown);
                    replacedertEquals(dtf.parseLocalDateTime("2016-03-07 00:00:00"), roundedUp);
                    break;
                case MONTH:
                    replacedertEquals(dtf.parseLocalDateTime("2016-02-01 00:00:00"), roundedDown);
                    replacedertEquals(dtf.parseLocalDateTime("2016-03-01 00:00:00"), roundedUp);
                    break;
                case QUARTER:
                    replacedertEquals(dtf.parseLocalDateTime("2016-01-01 00:00:00"), roundedDown);
                    replacedertEquals(dtf.parseLocalDateTime("2016-04-01 00:00:00"), roundedUp);
                    break;
                case YEAR:
                    replacedertEquals(dtf.parseLocalDateTime("2016-01-01 00:00:00"), roundedDown);
                    replacedertEquals(dtf.parseLocalDateTime("2017-01-01 00:00:00"), roundedUp);
                    break;
                case DECADE:
                    replacedertEquals(dtf.parseLocalDateTime("2010-01-01 00:00:00"), roundedDown);
                    replacedertEquals(dtf.parseLocalDateTime("2020-01-01 00:00:00"), roundedUp);
                    break;
                case CENTURY:
                    replacedertEquals(dtf.parseLocalDateTime("2000-01-01 00:00:00"), roundedDown);
                    replacedertEquals(dtf.parseLocalDateTime("2100-01-01 00:00:00"), roundedUp);
                    break;
                case MILLENNIUM:
                    replacedertEquals(dtf.parseLocalDateTime("2000-01-01 00:00:00"), roundedDown);
                    replacedertEquals(dtf.parseLocalDateTime("3000-01-01 00:00:00"), roundedUp);
                    break;
                default:
                    fail();
            }
        }
    }

    @Test
    public void testProduceRanges() {
        List<Number> ranges = new ArrayList<>();
        HistogramGenerator.produceRanges(ranges, new LocalDateTime(1970, 1, 1, 1, 0, 0), new LocalDateTime(1970, 1, 1, 11, 59, 0), TruncEvalEnum.HOUR);
        List<Number> expected = new ArrayList<>();
        for (int i = 0; i < 13; i++) {
            expected.add((i + 1) * 3600_000L);
        }
        replacedert.replacedertEquals(expected.size(), ranges.size());
        replacedert.replacedertEquals(expected, ranges);
    }

    @Test
    public void testSelectionCount() {
        testSelectionCountHelper("colName = 'val1'", 1562383L, DataType.TEXT, ImmutableSet.of("val1"));
        testSelectionCountHelper("colName = 'val1' OR colName = 'val2'", 1562L, DataType.TEXT, ImmutableSet.of("val1", "val2"));
        Set<String> selectedValues = new LinkedHashSet<>(Arrays.asList("val1", null));
        testSelectionCountHelper("colName = 'val1' OR colName IS NULL", 1562L, DataType.TEXT, selectedValues);
        testSelectionCountHelper("colName = 1 OR colName = 2", 15432L, DataType.INTEGER, ImmutableSet.of("1", "2"));
        testSelectionCountHelper("colName = DATE '2017-05-03' OR colName = DATE '2035-12-12'", 23L, DataType.DATE, ImmutableSet.of("2017-05-03", "2035-12-12"));
        testSelectionCountHelper("colName = TIMESTAMP '2017-05-03 12:23:24' OR colName = TIMESTAMP '2035-12-12 05:23:23'", 2L, DataType.DATETIME, ImmutableSet.of("2017-05-03 12:23:24", "2035-12-12 05:23:23"));
        testSelectionCountHelper("colName = TIME '12:23:24' OR colName = TIME '05:23:23'", 6L, DataType.TIME, ImmutableSet.of("12:23:24", "05:23:23"));
        testSelectionCountHelper("colName = ''", 1562383L, DataType.TEXT, ImmutableSet.of(""));
        testSelectionCountHelper(null, 0L, DataType.INTEGER, ImmutableSet.<String>of());
    }

    private void testSelectionCountHelper(final String expFilter, final long expCount, DataType type, Set<String> selectedValues) {
        final DatasetPath datasetPath = new DatasetPath(Arrays.asList("dfs", "parquet", "lineitem.parquet"));
        final DatasetVersion datasetVersion = DatasetVersion.newVersion();
        final QueryExecutor queryExecutor = mock(QueryExecutor.clreplaced);
        when(queryExecutor.runQueryWithListener(any(SqlQuery.clreplaced), any(QueryType.clreplaced), any(DatasetPath.clreplaced), any(DatasetVersion.clreplaced), any(JobStatusListener.clreplaced))).thenAnswer((Answer<JobData>) invocation -> {
            final String query = invocation.getArgumentAt(0, SqlQuery.clreplaced).getSql();
            final JobStatusListener listener = invocation.getArgumentAt(4, JobStatusListener.clreplaced);
            JobData jobData = mock(JobDataWrapper.clreplaced);
            when(jobData.getJobResultsTable()).thenReturn("jobResults.previewJob");
            listener.jobCompleted();
            return jobData;
        });
        when(queryExecutor.runQueryAndWaitForCompletion(any(SqlQuery.clreplaced), any(QueryType.clreplaced), any(DatasetPath.clreplaced), any(DatasetVersion.clreplaced))).thenAnswer((Answer<JobData>) invocation -> {
            final String query = invocation.getArgumentAt(0, SqlQuery.clreplaced).getSql();
            JobData jobData = mock(JobDataWrapper.clreplaced);
            if (expFilter != null) {
                replacedertTrue(query, query.contains(expFilter));
            } else {
                replacedertFalse(query, query.contains("WHERE"));
            }
            JobDataFragment fragment = mock(JobDataFragment.clreplaced);
            when(jobData.truncate(allocator, 1)).thenReturn(fragment);
            when(fragment.extractValue("dremio_selection_count", 0)).thenReturn(expCount);
            return jobData;
        });
        HistogramGenerator hg = new HistogramGenerator(queryExecutor);
        long count = hg.getSelectionCount(datasetPath, datasetVersion, new SqlQuery("SELECT * FROM dataset", "user"), type, "colName", selectedValues, allocator);
        replacedertEquals(expCount, count);
    }
}

19 Source : TestSpacesStoragePlugin.java
with Apache License 2.0
from dremio

/**
 * Test spaces storage plugin.
 */
public clreplaced TestSpacesStoragePlugin extends BaseTestServer {

    private BufferAllocator allocator;

    @Before
    public void prepare() {
        allocator = getSabotContext().getAllocator().newChildAllocator(getClreplaced().getName(), 0, Long.MAX_VALUE);
    }

    @After
    public void clear() {
        allocator.close();
    }

    public static void setup(DACDaemon dremioDaemon) throws Exception {
        getPopulator().populateTestUsers();
        final File root = getPopulator().getPath().toFile();
        try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(root, "testSpaceA.json")))) {
            for (int i = 0; i < 1000; ++i) {
                writer.write(String.format("{ \"A\" : %d , \"B\": %d }", i, i));
            }
        }
        try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(root, "testSpaceB.json")))) {
            for (int i = 500; i < 1000; ++i) {
                writer.write(String.format("{ \"C\" : %d , \"D\": %d }", i, i));
            }
        }
        try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(root, "testSpaceC.json")))) {
            for (int i = 750; i < 1000; ++i) {
                writer.write(String.format("{ \"E\" : %d , \"F\": %d }", i, i));
            }
        }
        final NamespaceService namespaceService = newNamespaceService();
        clearAllDataExceptUser();
        getPopulator().populateSources();
        SpaceConfig config = new SpaceConfig();
        config.setName("testA");
        namespaceService.addOrUpdateSpace(new SpacePath(config.getName()).toNamespaceKey(), config);
        config = new SpaceConfig();
        config.setName("testB");
        namespaceService.addOrUpdateSpace(new SpacePath(config.getName()).toNamespaceKey(), config);
        final HomeConfig home1 = new HomeConfig();
        home1.setOwner(DEFAULT_USER_NAME);
        namespaceService.addOrUpdateHome(new HomePath(HomeName.getUserHomePath(home1.getOwner())).toNamespaceKey(), home1);
        getPopulator().putDS("testA", "dsA1", new FromTable("LocalFS1.\"testSpaceA.json\"").wrap());
        getPopulator().putDS("testB", "dsB1", new FromTable("LocalFS1.\"testSpaceB.json\"").wrap());
        getPopulator().putDS("testA", "dsA2", new FromTable("LocalFS1.\"testSpaceC.json\"").wrap());
        getPopulator().putDS("testA", "dsA3", new FromSQL(getFileContentsFromClreplacedPath("queries/tpch/03.sql").replaceAll("\\-\\-.*", "").replace('`', '"').replace(';', ' ')).wrap());
        addFolder(namespaceService, "testA.F1");
        addFolder(namespaceService, "testA.F1.F2");
        addFolder(namespaceService, "testA.F1.F2.F3");
        addFolder(namespaceService, "testA.F1.F2.F3.F4");
        addFolder(namespaceService, "@" + DEFAULT_USER_NAME + ".F1");
        addFolder(namespaceService, "@" + DEFAULT_USER_NAME + ".F1.F2");
        addFolder(namespaceService, "@" + DEFAULT_USER_NAME + ".F1.F2.F3");
        addFolder(namespaceService, "@" + DEFAULT_USER_NAME + ".F1.F2.F3.F4");
        List<FolderName> folderPath = new ArrayList<>();
        folderPath.add(new FolderName("F1"));
        getPopulator().putDS("testA", folderPath, "dsA1", new FromTable("LocalFS1.\"testSpaceA.json\"").wrap());
        getPopulator().putDS("@" + DEFAULT_USER_NAME, folderPath, "dsA1", new FromTable("LocalFS1.\"testSpaceA.json\"").wrap());
        folderPath.add(new FolderName("F2"));
        getPopulator().putDS("testA", folderPath, "dsB1", new FromTable("LocalFS1.\"testSpaceB.json\"").wrap());
        getPopulator().putDS("@" + DEFAULT_USER_NAME, folderPath, "dsB1", new FromTable("LocalFS1.\"testSpaceB.json\"").wrap());
        folderPath.add(new FolderName("F3"));
        getPopulator().putDS("testA", folderPath, "dsA2", new FromTable("LocalFS1.\"testSpaceC.json\"").wrap());
        getPopulator().putDS("@" + DEFAULT_USER_NAME, folderPath, "dsA2", new FromTable("LocalFS1.\"testSpaceC.json\"").wrap());
        folderPath.add(new FolderName("F4"));
        getPopulator().putDS("testA", folderPath, "dsA3", new FromSQL(getFileContentsFromClreplacedPath("queries/tpch/03.sql").replaceAll("\\-\\-.*", "").replace('`', '"').replace(';', ' ')).wrap());
        getPopulator().putDS("@" + DEFAULT_USER_NAME, folderPath, "dsA3", new FromSQL(getFileContentsFromClreplacedPath("queries/tpch/03.sql").replaceAll("\\-\\-.*", "").replace('`', '"').replace(';', ' ')).wrap());
    }

    public static void cleanup(DACDaemon dremioDaemon) throws Exception {
        final NamespaceService namespaceService = newNamespaceService();
        namespaceService.deleteSpace(new SpacePath("testA").toNamespaceKey(), namespaceService.getSpace(new SpacePath("testA").toNamespaceKey()).getTag());
        namespaceService.deleteSpace(new SpacePath("testB").toNamespaceKey(), namespaceService.getSpace(new SpacePath("testB").toNamespaceKey()).getTag());
    }

    private JobDataFragment runExternalQueryAndGetData(String sql, int limit) {
        return submitJobAndGetData(l(JobsService.clreplaced), JobRequest.newBuilder().setSqlQuery(new SqlQuery(sql, Collections.singletonList("@" + DEFAULT_USER_NAME), DEFAULT_USER_NAME)).build(), 0, limit, allocator);
    }

    @Test
    public void testSpacesPlugin() throws Exception {
        setup(getCurrentDremioDaemon());
        // update storage plugin
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from testA.dsA1", 1000)) {
            replacedertEquals(1000, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from testB.dsB1", 500)) {
            replacedertEquals(500, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from testA.dsA2", 250)) {
            replacedertEquals(250, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from testA.dsA1 t1 where t1.A >= 400", 600)) {
            replacedertEquals(600, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from testA.dsA1 t1 inner join testB.dsB1 t2 on t1.A = t2.C inner join testA.dsA2 t3 on t2.C = t3.E where t3.F >= 900", 100)) {
            replacedertEquals(100, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from testA.dsA3", 10)) {
            replacedertEquals(10, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from testA.F1.dsA1", 1000)) {
            replacedertEquals(1000, results.getReturnedRowCount());
        }
        // folder/subschemas
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from testA.F1.F2.dsB1", 500)) {
            replacedertEquals(500, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from testA.F1.F2.F3.dsA2", 250)) {
            replacedertEquals(250, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from testA.F1.F2.F3.F4.dsA3", 10)) {
            replacedertEquals(10, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from \"@" + DEFAULT_USER_NAME + "\".F1.dsA1", 1000)) {
            replacedertEquals(1000, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from \"@" + DEFAULT_USER_NAME + "\".F1.F2.dsB1", 500)) {
            replacedertEquals(500, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from \"@" + DEFAULT_USER_NAME + "\".F1.F2.F3.dsA2", 250)) {
            replacedertEquals(250, results.getReturnedRowCount());
        }
        try (final JobDataFragment results = runExternalQueryAndGetData("select * from \"@" + DEFAULT_USER_NAME + "\".F1.F2.F3.F4.dsA3", 10)) {
            replacedertEquals(10, results.getReturnedRowCount());
        }
        cleanup(getCurrentDremioDaemon());
    }
}

19 Source : BufferAllocatorFactory.java
with Apache License 2.0
from dremio

/**
 * Factory for BufferAllocators to inject child allocators.
 */
public clreplaced BufferAllocatorFactory implements Service {

    private final BufferAllocator allocator;

    public BufferAllocatorFactory(BufferAllocator bootStrapAllocator, String name) {
        allocator = bootStrapAllocator.newChildAllocator(name, 0, Long.MAX_VALUE);
    }

    public BufferAllocator newChildAllocator(String allocatorName) {
        return allocator.newChildAllocator(allocatorName, 0, Long.MAX_VALUE);
    }

    @Override
    public void close() throws Exception {
        AutoCloseables.close(allocator);
    }

    @Override
    public void start() {
    }

    @VisibleForTesting
    BufferAllocator getBaseAllocator() {
        return allocator;
    }
}

19 Source : DatasetTool.java
with Apache License 2.0
from dremio

InitialPreviewResponse createPreviewResponse(DatasetPath path, Transformer.DatasetAndData datasetAndData, BufferAllocator allocator, int maxRecords, boolean catchExecutionError) throws DatasetVersionNotFoundException, NamespaceException, JobNotFoundException {
    return createPreviewResponse(datasetAndData.getDataset(), datasetAndData.getJobData(), new DatasetVersionResourcePath(path, datasetAndData.getDataset().getVersion()), allocator, maxRecords, catchExecutionError);
}

19 Source : MemoryDebugInfo.java
with Apache License 2.0
from dremio

private static String getSummary(BufferAllocator start, int numLevels) {
    final StringBuilder sb = new StringBuilder();
    print(sb, start, 0, numLevels);
    return sb.toString();
}

19 Source : MemoryDebugInfo.java
with Apache License 2.0
from dremio

public static String getSummaryFromRoot(BufferAllocator allocator) {
    // find the root allocator.
    BufferAllocator root = allocator;
    while (root.getParentAllocator() != null) {
        root = root.getParentAllocator();
    }
    return getSummary(root, NUM_LEVELS_FROM_ROOT_TO_OPERATOR);
}

19 Source : BackwardsCompatibilityDecoder.java
with Apache License 2.0
from dremio

/**
 * Decoder step for adding backward compatibility conversion step
 */
clreplaced BackwardsCompatibilityDecoder extends MessageToMessageDecoder<InboundRpcMessage> {

    private final BufferAllocator allocator;

    private final BaseBackwardsCompatibilityHandler handler;

    BackwardsCompatibilityDecoder(final BufferAllocator allocator, BaseBackwardsCompatibilityHandler handler) {
        super(InboundRpcMessage.clreplaced);
        this.allocator = allocator;
        this.handler = handler;
    }

    @Override
    protected void decode(ChannelHandlerContext ctx, InboundRpcMessage msg, List<Object> out) throws Exception {
        if (msg.mode == RpcMode.REQUEST && msg.rpcType == RpcType.QUERY_DATA.getNumber()) {
            final QueryData oldHeader = QueryData.parseFrom(msg.pBody);
            final ByteBuf oldData = msg.dBody;
            final QueryBatch oldBatch = new QueryBatch(oldHeader, oldData);
            final QueryBatch newBatch = handler.makeBatchCompatible(oldBatch);
            // A copy might be needed to consolidate all buffers as client expect
            // one contiguous Arrow buffer.
            final ByteBuf newBuffer = consolidateBuffers(newBatch.getBuffers());
            out.add(new InboundRpcMessage(msg.mode, msg.rpcType, msg.coordinationId, newBatch.getHeader().toByteArray(), newBuffer));
        } else {
            out.add(msg);
        }
    }

    @Override
    public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
        super.handlerRemoved(ctx);
        handler.close();
    }

    private ByteBuf consolidateBuffers(ByteBuf... buffers) {
        if (buffers.length == 0) {
            return NettyArrowBuf.unwrapBuffer(allocator.buffer(0));
        }
        if (buffers.length == 1) {
            return buffers[0];
        }
        int readableBytes = 0;
        for (ByteBuf buffer : buffers) {
            readableBytes += buffer.readableBytes();
        }
        try {
            ByteBuf newBuffer = NettyArrowBuf.unwrapBuffer(allocator.buffer(readableBytes));
            for (ByteBuf buffer : buffers) {
                newBuffer.writeBytes(buffer);
            }
            return newBuffer;
        } finally {
            freeBuffers(buffers);
        }
    }

    private static void freeBuffers(ByteBuf... buffers) {
        for (ByteBuf buffer : buffers) {
            buffer.release();
        }
    }
}

18 Source : ArrowUnionMemoryAllocator.java
with Apache License 2.0
from yahoojapan

public clreplaced ArrowUnionMemoryAllocator implements IMemoryAllocator {

    private final UnionVector vector;

    private final BufferAllocator allocator;

    private final int rowCount;

    /**
     * Set the vector of Union and initialize it.
     */
    public ArrowUnionMemoryAllocator(final BufferAllocator allocator, final UnionVector vector, final int rowCount) {
        this.allocator = allocator;
        this.vector = vector;
        this.rowCount = rowCount;
        vector.allocateNew();
    }

    @Override
    public void setNull(final int index) {
    }

    @Override
    public void setBoolean(final int index, final boolean value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBoolean()");
    }

    @Override
    public void setByte(final int index, final byte value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setByte()");
    }

    @Override
    public void setShort(final int index, final short value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setShort()");
    }

    @Override
    public void setInteger(final int index, final int value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setInteger()");
    }

    @Override
    public void setLong(final int index, final long value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setLong()");
    }

    @Override
    public void setFloat(final int index, final float value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setFloat()");
    }

    @Override
    public void setDouble(final int index, final double value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setDouble()");
    }

    @Override
    public void setBytes(final int index, final byte[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setBytes(final int index, final byte[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setString(final int index, final String value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setArrayIndex(final int index, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setArrayIndex()");
    }

    @Override
    public void setValueCount(final int count) throws IOException {
        vector.setValueCount(count);
    }

    @Override
    public int getValueCount() throws IOException {
        return vector.getValueCount();
    }

    @Override
    public IMemoryAllocator getChild(final String columnName, final ColumnType type) throws IOException {
        return ArrowMemoryAllocatorFactory.getFromUnionVector(type, columnName, allocator, vector, rowCount);
    }
}

18 Source : ArrowMapMemoryAllocator.java
with Apache License 2.0
from yahoojapan

public clreplaced ArrowMapMemoryAllocator implements IMemoryAllocator {

    private final StructVector vector;

    private final BufferAllocator allocator;

    private final int rowCount;

    /**
     * Set the vector of Map and initialize it.
     */
    public ArrowMapMemoryAllocator(final BufferAllocator allocator, final StructVector vector, final int rowCount) {
        this.allocator = allocator;
        this.vector = vector;
        this.rowCount = rowCount;
        vector.allocateNew();
    }

    @Override
    public void setNull(final int index) {
        vector.setNull(index);
    }

    @Override
    public void setBoolean(final int index, final boolean value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBoolean()");
    }

    @Override
    public void setByte(final int index, final byte value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setByte()");
    }

    @Override
    public void setShort(final int index, final short value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setShort()");
    }

    @Override
    public void setInteger(final int index, final int value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setInteger()");
    }

    @Override
    public void setLong(final int index, final long value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setLong()");
    }

    @Override
    public void setFloat(final int index, final float value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setFloat()");
    }

    @Override
    public void setDouble(final int index, final double value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setDouble()");
    }

    @Override
    public void setBytes(final int index, final byte[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setBytes(final int index, final byte[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setString(final int index, final String value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setArrayIndex(final int index, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setArrayIndex()");
    }

    @Override
    public void setValueCount(final int count) throws IOException {
        for (int i = 0; i < count; i++) {
            vector.setIndexDefined(i);
        }
        vector.setValueCount(count);
    }

    @Override
    public int getValueCount() throws IOException {
        return vector.getValueCount();
    }

    @Override
    public IMemoryAllocator getChild(final String columnName, final ColumnType type) throws IOException {
        return ArrowMemoryAllocatorFactory.getFromStructVector(type, columnName, allocator, vector, rowCount);
    }
}

18 Source : ArrowArrayMemoryAllocator.java
with Apache License 2.0
from yahoojapan

public clreplaced ArrowArrayMemoryAllocator implements IMemoryAllocator {

    private final ListVector vector;

    private final BufferAllocator allocator;

    /**
     * Set the vector of Array and initialize it.
     */
    public ArrowArrayMemoryAllocator(final BufferAllocator allocator, final ListVector vector, final int rowCount) {
        this.allocator = allocator;
        this.vector = vector;
        vector.allocateNew();
    }

    @Override
    public void setNull(final int index) {
    }

    @Override
    public void setBoolean(final int index, final boolean value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBoolean()");
    }

    @Override
    public void setByte(final int index, final byte value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setByte()");
    }

    @Override
    public void setShort(final int index, final short value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setShort()");
    }

    @Override
    public void setInteger(final int index, final int value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setInteger()");
    }

    @Override
    public void setLong(final int index, final long value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setLong()");
    }

    @Override
    public void setFloat(final int index, final float value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setFloat()");
    }

    @Override
    public void setDouble(final int index, final double value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setDouble()");
    }

    @Override
    public void setBytes(final int index, final byte[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setBytes(final int index, final byte[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setString(final int index, final String value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setArrayIndex(final int index, final int start, final int length) throws IOException {
        vector.startNewValue(index);
        vector.endValue(index, length);
    }

    @Override
    public void setValueCount(final int count) throws IOException {
        vector.setValueCount(count);
    }

    @Override
    public int getValueCount() throws IOException {
        return vector.getValueCount();
    }

    @Override
    public IMemoryAllocator getArrayChild(final int childLength, final ColumnType type) throws IOException {
        return ArrowMemoryAllocatorFactory.getFromListVector(type, "ARRAY_CHILD", allocator, vector, childLength);
    }
}

18 Source : ArrowUnionMemoryAllocator.java
with Apache License 2.0
from yahoojapan

public clreplaced ArrowUnionMemoryAllocator implements IMemoryAllocator {

    private final UnionVector vector;

    private final BufferAllocator allocator;

    private final int rowCount;

    public ArrowUnionMemoryAllocator(final BufferAllocator allocator, final UnionVector vector, final int rowCount) {
        this.allocator = allocator;
        this.vector = vector;
        this.rowCount = rowCount;
        vector.allocateNew();
    }

    @Override
    public void setNull(final int index) {
    }

    @Override
    public void setBoolean(final int index, final boolean value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBoolean()");
    }

    @Override
    public void setByte(final int index, final byte value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setByte()");
    }

    @Override
    public void setShort(final int index, final short value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setShort()");
    }

    @Override
    public void setInteger(final int index, final int value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setInteger()");
    }

    @Override
    public void setLong(final int index, final long value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setLong()");
    }

    @Override
    public void setFloat(final int index, final float value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setFloat()");
    }

    @Override
    public void setDouble(final int index, final double value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setDouble()");
    }

    @Override
    public void setBytes(final int index, final byte[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setBytes(final int index, final byte[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setString(final int index, final String value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setArrayIndex(final int index, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setArrayIndex()");
    }

    @Override
    public void setValueCount(final int count) throws IOException {
        vector.setValueCount(count);
    }

    @Override
    public int getValueCount() throws IOException {
        return vector.getValueCount();
    }

    @Override
    public IMemoryAllocator getChild(final String columnName, final ColumnType type) throws IOException {
        return ArrowMemoryAllocatorFactory.getFromUnionVector(type, columnName, allocator, vector, rowCount);
    }
}

18 Source : ArrowMapMemoryAllocator.java
with Apache License 2.0
from yahoojapan

public clreplaced ArrowMapMemoryAllocator implements IMemoryAllocator {

    private final StructVector vector;

    private final BufferAllocator allocator;

    private final int rowCount;

    public ArrowMapMemoryAllocator(final BufferAllocator allocator, final StructVector vector, final int rowCount) {
        this.allocator = allocator;
        this.vector = vector;
        this.rowCount = rowCount;
        vector.allocateNew();
    }

    @Override
    public void setNull(final int index) {
        vector.setNull(index);
    }

    @Override
    public void setBoolean(final int index, final boolean value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBoolean()");
    }

    @Override
    public void setByte(final int index, final byte value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setByte()");
    }

    @Override
    public void setShort(final int index, final short value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setShort()");
    }

    @Override
    public void setInteger(final int index, final int value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setInteger()");
    }

    @Override
    public void setLong(final int index, final long value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setLong()");
    }

    @Override
    public void setFloat(final int index, final float value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setFloat()");
    }

    @Override
    public void setDouble(final int index, final double value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setDouble()");
    }

    @Override
    public void setBytes(final int index, final byte[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setBytes(final int index, final byte[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setString(final int index, final String value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setArrayIndex(final int index, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setArrayIndex()");
    }

    @Override
    public void setValueCount(final int count) throws IOException {
        for (int i = 0; i < count; i++) {
            vector.setIndexDefined(i);
        }
        vector.setValueCount(count);
    }

    @Override
    public int getValueCount() throws IOException {
        return vector.getValueCount();
    }

    @Override
    public IMemoryAllocator getChild(final String columnName, final ColumnType type) throws IOException {
        return ArrowMemoryAllocatorFactory.getFromStructVector(type, columnName, allocator, vector, rowCount);
    }
}

18 Source : ArrowArrayMemoryAllocator.java
with Apache License 2.0
from yahoojapan

public clreplaced ArrowArrayMemoryAllocator implements IMemoryAllocator {

    private final ListVector vector;

    private final BufferAllocator allocator;

    public ArrowArrayMemoryAllocator(final BufferAllocator allocator, final ListVector vector, final int rowCount) {
        this.allocator = allocator;
        this.vector = vector;
        vector.allocateNew();
    }

    @Override
    public void setNull(final int index) {
    }

    @Override
    public void setBoolean(final int index, final boolean value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBoolean()");
    }

    @Override
    public void setByte(final int index, final byte value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setByte()");
    }

    @Override
    public void setShort(final int index, final short value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setShort()");
    }

    @Override
    public void setInteger(final int index, final int value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setInteger()");
    }

    @Override
    public void setLong(final int index, final long value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setLong()");
    }

    @Override
    public void setFloat(final int index, final float value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setFloat()");
    }

    @Override
    public void setDouble(final int index, final double value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setDouble()");
    }

    @Override
    public void setBytes(final int index, final byte[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setBytes(final int index, final byte[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setBytes()");
    }

    @Override
    public void setString(final int index, final String value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setString(final int index, final char[] value, final int start, final int length) throws IOException {
        throw new UnsupportedOperationException("Unsupported method setString()");
    }

    @Override
    public void setArrayIndex(final int index, final int start, final int length) throws IOException {
        vector.startNewValue(index);
        vector.endValue(index, length);
    }

    @Override
    public void setValueCount(final int count) throws IOException {
        vector.setValueCount(count);
    }

    @Override
    public int getValueCount() throws IOException {
        return vector.getValueCount();
    }

    @Override
    public IMemoryAllocator getArrayChild(final int childLength, final ColumnType type) throws IOException {
        return ArrowMemoryAllocatorFactory.getFromListVector(type, "ARRAY_CHILD", allocator, vector, childLength);
    }
}

18 Source : FlightClientFactory.java
with Apache License 2.0
from rymurr

public clreplaced FlightClientFactory implements AutoCloseable {

    private final BufferAllocator allocator = new RootAllocator();

    private final Location defaultLocation;

    private final String username;

    private final String preplacedword;

    private final boolean parallel;

    public FlightClientFactory(Location defaultLocation, String username, String preplacedword, boolean parallel) {
        this.defaultLocation = defaultLocation;
        this.username = username;
        this.preplacedword = (preplacedword == null || preplacedword.equals("$NULL$")) ? null : preplacedword;
        this.parallel = parallel;
    }

    public FlightClient apply() {
        FlightClient client = FlightClient.builder(allocator, defaultLocation).build();
        client.authenticateBasic(username, preplacedword);
        if (parallel) {
            Iterator<Result> res = client.doAction(new Action("PARALLEL"));
            res.forEachRemaining(Object::toString);
        }
        return client;
    }

    @Override
    public void close() {
        allocator.close();
    }
}

18 Source : FlightCloseableBindableService.java
with Apache License 2.0
from dremio

/**
 * A wrapper clreplaced around FlightBindingService
 */
public clreplaced FlightCloseableBindableService implements CloseableBindableService {

    private final JobsFlightProducer producer;

    private final BufferAllocator allocator;

    private final BindableService jobsFlightService;

    public FlightCloseableBindableService(BufferAllocator allocator, JobsFlightProducer producer, ServerAuthHandler authHandler, ExecutorService executor) {
        this.producer = producer;
        this.allocator = allocator;
        this.jobsFlightService = FlightGrpcUtils.createFlightService(allocator, producer, authHandler, executor);
    }

    @Override
    public ServerServiceDefinition bindService() {
        return jobsFlightService.bindService();
    }

    @Override
    public void close() throws Exception {
        AutoCloseables.close(producer, allocator);
    }
}

18 Source : JobResultsGrpcServerFacade.java
with Apache License 2.0
from dremio

/**
 * Job Results gRPC service.
 */
public clreplaced JobResultsGrpcServerFacade extends JobResultsServiceGrpc.JobResultsServiceImplBase implements CloseableBindableService {

    private Provider<ExecToCoordResultsHandler> execToCoordResultsHandlerProvider;

    private final BufferAllocator allocator;

    public JobResultsGrpcServerFacade(Provider<ExecToCoordResultsHandler> execToCoordResultsHandlerProvider, BufferAllocator allocator) {
        this.execToCoordResultsHandlerProvider = execToCoordResultsHandlerProvider;
        this.allocator = allocator;
    }

    public StreamObserver<JobResultsRequest> jobResults(StreamObserver<JobResultsResponse> responseObserver) {
        return new StreamObserver<JobResultsRequest>() {

            @Override
            public void onNext(JobResultsRequest request) {
                try {
                    ByteBuf dBody = NettyArrowBuf.unwrapBuffer(allocator.buffer(request.getData().size()));
                    dBody.writeBytes(request.getData().toByteArray());
                    execToCoordResultsHandlerProvider.get().dataArrived(request.getHeader(), dBody, request, new JobResultsGrpcLocalResponseSender(responseObserver, request.getSequenceId()));
                } catch (Exception ex) {
                    responseObserver.onError(ex);
                }
            }

            @Override
            public void onError(Throwable t) {
                responseObserver.onError(t);
            }

            @Override
            public void onCompleted() {
                responseObserver.onCompleted();
            }
        };
    }

    @Override
    public void close() throws Exception {
        AutoCloseables.close(allocator);
    }

    private static clreplaced JobResultsGrpcLocalResponseSender implements ResponseSender {

        private StreamObserver<JobResultsResponse> responseStreamObserver;

        private long sequenceId;

        JobResultsGrpcLocalResponseSender(StreamObserver<JobResultsResponse> responseStreamObserver, long sequenceId) {
            this.responseStreamObserver = responseStreamObserver;
            this.sequenceId = sequenceId;
        }

        @Override
        public void send(Response r) {
            JobResultsResponse.Builder builder = JobResultsResponse.newBuilder();
            builder.setAck(Acks.OK).setSequenceId(sequenceId);
            responseStreamObserver.onNext(builder.build());
        }

        @Override
        public void sendFailure(UserRpcException e) {
        }
    }
}

18 Source : ProtocolBuilder.java
with Apache License 2.0
from dremio

/**
 * Builder for creating simplified protocols on top of the fabric infrastructure.
 */
public final clreplaced ProtocolBuilder {

    private int protocolId;

    private BufferAllocator allocator;

    private String name;

    private long timeoutMillis = 120 * 1000;

    private final ProxyFactory proxyFactory = new ProxyFactory();

    private Map<Integer, ReceiveHandler<MessageLite, MessageLite>> handlers = new HashMap<>();

    private ProtocolBuilder() {
    }

    public static ProtocolBuilder builder() {
        return new ProtocolBuilder();
    }

    @SuppressWarnings("unchecked")
    public <REQUEST extends MessageLite, RESPONSE extends MessageLite> SendEndpointCreator<REQUEST, RESPONSE> register(int id, ReceiveHandler<REQUEST, RESPONSE> handler) {
        Preconditions.checkArgument(id > -1 && id < 2048, "A request id must be between 0 and 2047.");
        Preconditions.checkNotNull(handler);
        Preconditions.checkArgument(!handlers.containsKey(id), "Only a single handler can be registered per id. You tried to register a handler for id %s twice.", id);
        handlers.put(id, (ReceiveHandler<MessageLite, MessageLite>) handler);
        return new EndpointCreator<>(proxyFactory, new PseudoEnum(id), (Clreplaced<RESPONSE>) handler.getDefaultResponse().getClreplaced(), timeoutMillis);
    }

    public ProtocolBuilder name(String name) {
        this.name = name;
        return this;
    }

    public ProtocolBuilder timeout(long timeoutMillis) {
        Preconditions.checkArgument(timeoutMillis > -1);
        Preconditions.checkArgument(handlers.isEmpty(), "You can only set a timeout before registering any handlers. You've already registered %s handlers.", handlers.size());
        this.timeoutMillis = timeoutMillis;
        return this;
    }

    public ProtocolBuilder protocolId(int protocolId) {
        this.protocolId = protocolId;
        validateProtocol();
        return this;
    }

    public ProtocolBuilder allocator(BufferAllocator allocator) {
        Preconditions.checkNotNull(allocator);
        this.allocator = allocator;
        return this;
    }

    private void validateProtocol() {
        Preconditions.checkArgument(protocolId > 1 && protocolId < 64, "ProtocolId must be between 2 and 63. You tried to set it to %s.", protocolId);
    }

    public void register(FabricService fabric) {
        Preconditions.checkArgument(proxyFactory.factory == null, "You can only register a protocol builder once.");
        validateProtocol();
        Preconditions.checkNotNull(name, "Name must be set.");
        Preconditions.checkNotNull(allocator, "Allocator must be set.");
        Preconditions.checkArgument(handlers.size() > 0, "You must add at least one handler to your protocol.");
        FabricProtocol protocol = new SimpleProtocol(protocolId, handlers, allocator, name);
        proxyFactory.factory = fabric.registerProtocol(protocol);
    }

    private static clreplaced SimpleProtocol implements FabricProtocol {

        private final int protocolId;

        private final ReceiveHandler<MessageLite, MessageLite>[] handlers;

        private final MessageLite[] defaultResponseInstances;

        private final MessageLite[] defaultRequestInstances;

        private final BufferAllocator allocator;

        private final RpcConfig config;

        @SuppressWarnings("unchecked")
        public SimpleProtocol(int protocolId, Map<Integer, ReceiveHandler<MessageLite, MessageLite>> handlers, BufferAllocator allocator, String name) {
            super();
            this.protocolId = protocolId;
            this.handlers = new ReceiveHandler[2048];
            this.defaultResponseInstances = new MessageLite[2048];
            this.defaultRequestInstances = new MessageLite[2048];
            RpcConfigBuilder builder = RpcConfig.newBuilder().name(name).timeout(0);
            for (Entry<Integer, ReceiveHandler<MessageLite, MessageLite>> e : handlers.entrySet()) {
                final int id = e.getKey();
                final ReceiveHandler<?, ?> handler = e.getValue();
                final EnumLite num = new PseudoEnum(id);
                builder.add(num, (Clreplaced<? extends MessageLite>) handler.getDefaultRequest().getClreplaced(), num, (Clreplaced<? extends MessageLite>) handler.getDefaultResponse().getClreplaced());
                this.handlers[id] = e.getValue();
                this.defaultResponseInstances[id] = e.getValue().getDefaultResponse();
                this.defaultRequestInstances[id] = e.getValue().getDefaultRequest();
            }
            this.config = builder.build();
            this.allocator = allocator;
        }

        @Override
        public int getProtocolId() {
            return protocolId;
        }

        @Override
        public BufferAllocator getAllocator() {
            return allocator;
        }

        @Override
        public RpcConfig getConfig() {
            return config;
        }

        @Override
        public MessageLite getResponseDefaultInstance(int rpcType) throws RpcException {
            return defaultResponseInstances[rpcType];
        }

        @Override
        public void handle(PhysicalConnection connection, int rpcType, ByteString pBody, ByteBuf dBody, ResponseSender sender) throws RpcException {
            MessageLite defaultInstance = defaultRequestInstances[rpcType];
            try {
                MessageLite value = defaultInstance.getParserForType().parseFrom(pBody);
                ArrowBuf dBody1 = dBody != null ? ((NettyArrowBuf) dBody).arrowBuf() : null;
                SentResponseMessage<MessageLite> response = handlers[rpcType].handle(value, dBody1);
                sender.send(new Response(new PseudoEnum(rpcType), response.getBody(), response.getBuffers()));
            } catch (Exception e) {
                final String fail = String.format("Failure consuming message for protocol[%d], request[%d] in the %s rpc layer.", getProtocolId(), rpcType, getConfig().getName());
                throw new UserRpcException(NodeEndpoint.getDefaultInstance(), fail, e);
            }
        }
    }

    /**
     * A fabric runner factory that proxies another, checking it is set before returning any command runners.
     */
    private clreplaced ProxyFactory implements FabricRunnerFactory {

        private FabricRunnerFactory factory;

        @Override
        public FabricCommandRunner getCommandRunner(String address, int port) {
            Preconditions.checkNotNull(factory, "You must register your protocol before you attempt to send a message.");
            return factory.getCommandRunner(address, port);
        }
    }

    private static clreplaced PseudoEnum implements EnumLite {

        private final int number;

        public PseudoEnum(int number) {
            this.number = number;
        }

        @Override
        public int getNumber() {
            return number;
        }

        @Override
        public String toString() {
            return Integer.toString(number);
        }

        @Override
        public int hashCode() {
            final int prime = 31;
            int result = 1;
            result = prime * result + number;
            return result;
        }

        @Override
        public boolean equals(Object obj) {
            if (this == obj) {
                return true;
            }
            if (obj == null) {
                return false;
            }
            if (getClreplaced() != obj.getClreplaced()) {
                return false;
            }
            PseudoEnum other = (PseudoEnum) obj;
            if (number != other.number) {
                return false;
            }
            return true;
        }
    }
}

18 Source : ProtocolBuilder.java
with Apache License 2.0
from dremio

public ProtocolBuilder allocator(BufferAllocator allocator) {
    Preconditions.checkNotNull(allocator);
    this.allocator = allocator;
    return this;
}

18 Source : FabricServer.java
with Apache License 2.0
from dremio

/**
 * Fabric server that accepts connection.
 */
clreplaced FabricServer extends BasicServer<RpcType, FabricConnection> {

    // private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FabricServer.clreplaced);
    private final FabricMessageHandler handler;

    private final ConnectionManagerRegistry connectionRegistry;

    private final String address;

    private final BufferAllocator allocator;

    private volatile Integer port;

    private volatile FabricIdenreplacedy localIdenreplacedy;

    FabricServer(String address, FabricMessageHandler handler, RpcConfig config, BufferAllocator allocator, ConnectionManagerRegistry connectionRegistry, EventLoopGroup eventLoopGroup) {
        super(config, new ArrowByteBufAllocator(allocator), eventLoopGroup);
        this.connectionRegistry = connectionRegistry;
        this.allocator = allocator;
        this.handler = handler;
        this.address = address;
    }

    @Override
    public int bind(int initialPort, boolean allowPortHunting) {
        port = super.bind(initialPort, allowPortHunting);
        localIdenreplacedy = FabricIdenreplacedy.newBuilder().setAddress(address).setPort(port).build();
        return port;
    }

    @Override
    public MessageLite getResponseDefaultInstance(int rpcType) throws RpcException {
        return FabricMessage.getDefaultInstance();
    }

    @Override
    protected void handle(FabricConnection connection, int rpcType, byte[] pBody, ByteBuf dBody, ResponseSender sender) throws RpcException {
        handler.handle(connection.getIdenreplacedy(), localIdenreplacedy, connection, rpcType, pBody, dBody, sender);
    }

    @Override
    protected Response handle(FabricConnection connection, int rpcType, byte[] pBody, ByteBuf dBody) throws RpcException {
        throw new UnsupportedOperationException();
    }

    @Override
    public FabricConnection initRemoteConnection(SocketChannel channel) {
        return new FabricConnection("fabric server", channel, this, allocator);
    }

    @Override
    protected ServerHandshakeHandler<FabricHandshake> newHandshakeHandler(final FabricConnection connection) {
        return new ServerHandshakeHandler<FabricHandshake>(RpcType.HANDSHAKE, FabricHandshake.PARSER) {

            @Override
            public MessageLite getHandshakeResponse(FabricHandshake inbound) throws Exception {
                // logger.debug("Handling handshake from other bit. {}", inbound);
                if (inbound.getRpcVersion() != FabricRpcConfig.RPC_VERSION) {
                    throw new RpcException(String.format("Invalid rpc version.  Expected %d, actual %d.", inbound.getRpcVersion(), FabricRpcConfig.RPC_VERSION));
                }
                if (!inbound.hasIdenreplacedy() || inbound.getIdenreplacedy().getAddress().isEmpty() || inbound.getIdenreplacedy().getPort() < 1) {
                    throw new RpcException(String.format("RPC didn't provide valid counter idenreplacedy.  Received %s.", inbound.getIdenreplacedy()));
                }
                connection.setIdenreplacedy(inbound.getIdenreplacedy());
                final boolean isLoopback = inbound.getIdenreplacedy().getAddress().equals(address) && inbound.getIdenreplacedy().getPort() == port;
                if (!isLoopback) {
                    FabricConnectionManager manager = connectionRegistry.getConnectionManager(inbound.getIdenreplacedy());
                    // update the close handler.
                    connection.wrapCloseHandler(manager.getCloseHandlerCreator());
                    // add to the connection manager.
                    manager.addExternalConnection(connection);
                }
                return FabricHandshake.newBuilder().setRpcVersion(FabricRpcConfig.RPC_VERSION).build();
            }
        };
    }

    @Override
    protected MessageDecoder newDecoder(BufferAllocator allocator) {
        return new FabricProtobufLengthDecoder(allocator);
    }
}

18 Source : FabricServer.java
with Apache License 2.0
from dremio

@Override
protected MessageDecoder newDecoder(BufferAllocator allocator) {
    return new FabricProtobufLengthDecoder(allocator);
}

18 Source : FabricClient.java
with Apache License 2.0
from dremio

/**
 * Client used to connect to server.
 */
clreplaced FabricClient extends BasicClient<RpcType, FabricConnection, FabricHandshake, FabricHandshake> {

    // private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FabricClient.clreplaced);
    private final FabricMessageHandler handler;

    private final FabricIdenreplacedy remoteIdenreplacedy;

    private final FabricConnectionManager.CloseHandlerCreator closeHandlerFactory;

    private final FabricIdenreplacedy localIdenreplacedy;

    private final BufferAllocator allocator;

    public FabricClient(RpcConfig config, EventLoopGroup eventLoop, BufferAllocator allocator, FabricIdenreplacedy remoteIdenreplacedy, FabricIdenreplacedy localIdenreplacedy, FabricMessageHandler handler, FabricConnectionManager.CloseHandlerCreator closeHandlerFactory, Optional<SSLEngineFactory> engineFactory) throws RpcException {
        super(config, new ArrowByteBufAllocator(allocator), eventLoop, RpcType.HANDSHAKE, FabricHandshake.clreplaced, FabricHandshake.PARSER, engineFactory);
        this.localIdenreplacedy = localIdenreplacedy;
        this.remoteIdenreplacedy = remoteIdenreplacedy;
        this.handler = handler;
        this.closeHandlerFactory = closeHandlerFactory;
        this.allocator = allocator;
    }

    @SuppressWarnings("unchecked")
    @Override
    public FabricConnection initRemoteConnection(SocketChannel channel) {
        return new FabricConnection("fabric client", channel, this, allocator);
    }

    @Override
    protected ChannelFutureListener newCloseListener(SocketChannel ch, FabricConnection connection) {
        return closeHandlerFactory.getHandler(connection, super.newCloseListener(ch, connection));
    }

    @Override
    public MessageLite getResponseDefaultInstance(int rpcType) throws RpcException {
        return FabricMessage.getDefaultInstance();
    }

    @Override
    protected void handle(FabricConnection connection, int rpcType, byte[] pBody, ByteBuf dBody, ResponseSender sender) throws RpcException {
        handler.handle(remoteIdenreplacedy, localIdenreplacedy, connection, rpcType, pBody, dBody, sender);
    }

    @Override
    protected Response handle(FabricConnection connection, int rpcType, byte[] pBody, ByteBuf dBody) throws RpcException {
        throw new UnsupportedOperationException();
    }

    @Override
    protected void validateHandshake(FabricHandshake handshake) throws RpcException {
        if (handshake.getRpcVersion() != FabricRpcConfig.RPC_VERSION) {
            throw new RpcException(String.format("Invalid rpc version.  Expected %d, actual %d.", handshake.getRpcVersion(), FabricRpcConfig.RPC_VERSION));
        }
    }

    @Override
    protected void finalizeConnection(FabricHandshake handshake, FabricConnection connection) {
        connection.setIdenreplacedy(handshake.getIdenreplacedy());
    }

    @Override
    public MessageDecoder newDecoder(BufferAllocator allocator) {
        return new FabricProtobufLengthDecoder(allocator);
    }
}

18 Source : FabricClient.java
with Apache License 2.0
from dremio

@Override
public MessageDecoder newDecoder(BufferAllocator allocator) {
    return new FabricProtobufLengthDecoder(allocator);
}

18 Source : MessageDecoder.java
with Apache License 2.0
from dremio

/**
 * Frame decoder that decodes the rpc header of each message as it is available.
 * Expected to only allocate on heap for message only items. Can hit OOM for
 * message + trailing bytes messages but simply responds to sender that message
 * cannot be accepted due to out of memory. Should be resilient to all off-heap
 * OOM situations.
 */
public clreplaced MessageDecoder extends ByteToMessageDecoder {

    private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(MessageDecoder.clreplaced);

    private BufferAllocator allocator;

    private final AtomicLong messageCounter = new AtomicLong();

    public MessageDecoder(BufferAllocator allocator) {
        super();
        setreplacedulator(COMPOSITE_replacedULATOR);
        this.allocator = allocator;
    }

    @Override
    protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
        int length = decodeLengthFromMessage(ctx, in);
        if (length == -1) {
            return;
        }
        final ByteBuf frame = in.slice(in.readerIndex(), length);
        try {
            final InboundRpcMessage message = decodeMessage(ctx, frame, length);
            if (message != null) {
                out.add(message);
            }
        } finally {
            in.skipBytes(length);
        }
    }

    public static int decodeLengthFromMessage(ChannelHandlerContext ctx, ByteBuf in) throws Exception {
        int length = 0;
        if (!ctx.channel().isOpen()) {
            if (in.readableBytes() > 0) {
                logger.info("Channel is closed, discarding remaining {} byte(s) in buffer.", in.readableBytes());
            }
            in.skipBytes(in.readableBytes());
            return -1;
        }
        in.markReaderIndex();
        /**
         *  a variable-width message length can be up to five bytes in length. read bytes until we have a length.
         */
        final byte[] buf = new byte[5];
        for (int i = 0; i < buf.length; i++) {
            if (!in.isReadable()) {
                in.resetReaderIndex();
                return -1;
            }
            buf[i] = in.readByte();
            if (buf[i] >= 0) {
                length = CodedInputStream.newInstance(buf, 0, i + 1).readRawVarint32();
                if (length < 0) {
                    throw new CorruptedFrameException("negative length: " + length);
                }
                if (length == 0) {
                    throw new CorruptedFrameException("Received a message of length 0.");
                }
                if (in.readableBytes() < length) {
                    in.resetReaderIndex();
                    return -1;
                } else {
                    // complete message in buffer.
                    break;
                }
            }
        }
        return length;
    }

    /**
     * We decode the message in the same context as the length decoding to better
     * manage running out of memory. We decode the rpc header using heap memory
     * (expected to always be available) so that we can propagate a error message
     * with correct coordination id to sender rather than failing the channel.
     *
     * @param ctx The channel context.
     * @param frame The Frame of the message we're processing.
     * @param length The length of the frame.
     * @throws Exception Code should only throw corrupt channel messages, causing the channel to close.
     */
    private InboundRpcMessage decodeMessage(final ChannelHandlerContext ctx, final ByteBuf frame, final int length) throws Exception {
        // now, we know the entire message is in the buffer and the buffer is constrained to this message. Additionally,
        // this process should avoid reading beyond the end of this buffer so we inform the ByteBufInputStream to throw an
        // exception if be go beyond readable bytes (as opposed to blocking).
        final ByteBufInputStream is = new ByteBufInputStream(frame, length);
        // read the rpc header, saved in delimited format.
        checkTag(is, RpcEncoder.HEADER_TAG);
        final RpcHeader header = RpcHeader.parseDelimitedFrom(is);
        // read the protobuf body into a buffer.
        checkTag(is, RpcEncoder.PROTOBUF_BODY_TAG);
        final int pBodyLength = readRawVarint32(is);
        final byte[] pBody = new byte[pBodyLength];
        frame.readBytes(pBody);
        ByteBuf dBody = null;
        // read the data body.
        if (frame.readableBytes() > 0) {
            if (RpcConstants.EXTRA_DEBUGGING) {
                logger.debug("Reading raw body, buffer has {} bytes available.", frame.readableBytes());
            }
            checkTag(is, RpcEncoder.RAW_BODY_TAG);
            final int dBodyLength = readRawVarint32(is);
            if (frame.readableBytes() != dBodyLength) {
                throw new CorruptedFrameException(String.format("Expected to receive a raw body of %d bytes but received a buffer with %d bytes.", dBodyLength, frame.readableBytes()));
            }
            try {
                dBody = NettyArrowBuf.unwrapBuffer(allocator.buffer(dBodyLength));
                // need to make buffer copy, otherwise netty will try to refill this buffer if we move the readerIndex forward...
                // TODO: Can we avoid this copy?
                dBody.writeBytes(frame.nioBuffer(frame.readerIndex(), dBodyLength));
            } catch (OutOfMemoryException e) {
                sendOutOfMemory(e, ctx, header.getCoordinationId());
                return null;
            }
            if (RpcConstants.EXTRA_DEBUGGING) {
                logger.debug("Read raw body of length ", dBodyLength);
            }
        } else {
            if (RpcConstants.EXTRA_DEBUGGING) {
                logger.debug("No need to read raw body, no readable bytes left.");
            }
        }
        InboundRpcMessage m = new InboundRpcMessage(header.getMode(), header.getRpcType(), header.getCoordinationId(), pBody, dBody);
        return m;
    }

    private void sendOutOfMemory(OutOfMemoryException e, final ChannelHandlerContext ctx, int coordinationId) {
        final UserException uex = UserException.memoryError(e).message("Out of memory while receiving data.").addContext(MemoryDebugInfo.getDetailsOnAllocationFailure(e, allocator)).build(logger);
        final OutboundRpcMessage outMessage = new OutboundRpcMessage(RpcMode.RESPONSE_FAILURE, 0, coordinationId, uex.getOrCreatePBError(false));
        if (RpcConstants.EXTRA_DEBUGGING) {
            logger.debug("Adding message to outbound buffer. {}", outMessage);
        }
        ChannelFuture future = ctx.writeAndFlush(outMessage);
        // if we were unable to report back the failure make sure we close the channel otherwise we may cause the sender
        // to block undefinitely waiting for an ACK on this message
        future.addListener(ChannelFutureListener.CLOSE_ON_FAILURE);
    }

    private void checkTag(ByteBufInputStream is, int expectedTag) throws IOException {
        int actualTag = readRawVarint32(is);
        if (actualTag != expectedTag) {
            throw new CorruptedFrameException(String.format("Expected to read a tag of %d but actually received a value of %d.  Happened after reading %d message.", expectedTag, actualTag, messageCounter.get()));
        }
    }

    // Taken from CodedInputStream and modified to enable ByteBufInterface.
    public static int readRawVarint32(ByteBufInputStream is) throws IOException {
        byte tmp = is.readByte();
        if (tmp >= 0) {
            return tmp;
        }
        int result = tmp & 0x7f;
        if ((tmp = is.readByte()) >= 0) {
            result |= tmp << 7;
        } else {
            result |= (tmp & 0x7f) << 7;
            if ((tmp = is.readByte()) >= 0) {
                result |= tmp << 14;
            } else {
                result |= (tmp & 0x7f) << 14;
                if ((tmp = is.readByte()) >= 0) {
                    result |= tmp << 21;
                } else {
                    result |= (tmp & 0x7f) << 21;
                    result |= (tmp = is.readByte()) << 28;
                    if (tmp < 0) {
                        // Discard upper 32 bits.
                        for (int i = 0; i < 5; i++) {
                            if (is.readByte() >= 0) {
                                return result;
                            }
                        }
                        throw new CorruptedFrameException("Encountered a malformed varint.");
                    }
                }
            }
        }
        return result;
    }

    @Override
    public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
        ctx.fireChannelReadComplete();
    }
}

18 Source : VectorContainerWriter.java
with Apache License 2.0
from dremio

public clreplaced VectorContainerWriter extends AbstractFieldWriter implements ComplexWriter {

    // private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(VectorContainerWriter.clreplaced);
    private final SingleStructWriter structRoot;

    private final SpecialStructVector structVector;

    private final OutputMutator mutator;

    private BufferAllocator allocator;

    public VectorContainerWriter(OutputMutator mutator) {
        this.mutator = mutator;
        this.allocator = new RootAllocator(Long.MAX_VALUE);
        structVector = new SpecialStructVector(allocator, mutator.getCallBack());
        structRoot = new SingleStructWriter(structVector);
    }

    @Override
    public Field getField() {
        return structVector.getField();
    }

    @Override
    public int getValueCapacity() {
        return structRoot.getValueCapacity();
    }

    public void setInitialCapacity(int initialCapacity) {
        structRoot.setInitialCapacity(initialCapacity);
    }

    public NonNullableStructVector getStructVector() {
        return structVector;
    }

    @Override
    public void reset() {
        setPosition(0);
    }

    @Override
    public void close() throws Exception {
        clear();
        structRoot.close();
        structVector.close();
    }

    @Override
    public void clear() {
        structRoot.clear();
    }

    public SingleStructWriter getWriter() {
        return structRoot;
    }

    @Override
    public void setValueCount(int count) {
        structRoot.setValueCount(count);
    }

    @Override
    public void setPosition(int index) {
        super.setPosition(index);
        structRoot.setPosition(index);
    }

    @Override
    public void allocate() {
        structRoot.allocate();
    }

    private clreplaced SpecialStructVector extends NonNullableStructVector {

        public SpecialStructVector(BufferAllocator allocator, CallBack callback) {
            super("", allocator, new FieldType(false, ArrowType.Struct.INSTANCE, null, null), callback);
        }

        @Override
        public List<FieldVector> getChildren() {
            return Lists.newArrayList(Iterables.transform(mutator.getVectors(), new Function<ValueVector, FieldVector>() {

                @Nullable
                @Override
                public FieldVector apply(@Nullable ValueVector input) {
                    return (FieldVector) input;
                }
            }));
        }

        @Override
        public <T extends FieldVector> T addOrGet(String childName, FieldType fieldType, Clreplaced<T> clazz) {
            try {
                Field field = new Field(childName, fieldType, null);
                final FieldVector v = mutator.addField(field, clazz);
                putChild(childName, v);
                return this.typeify(v, clazz);
            } catch (SchemaChangeException e) {
                throw new IllegalStateException(e);
            }
        }

        @Override
        public <T extends FieldVector> T getChild(String name, Clreplaced<T> clazz) {
            final ValueVector v = mutator.getVector(name.toLowerCase());
            if (v == null) {
                return null;
            }
            return typeify(v, clazz);
        }
    }

    @Override
    public StructWriter rootreplacedtruct() {
        return structRoot;
    }

    @Override
    public ListWriter rootAsList() {
        throw new UnsupportedOperationException("Dremio doesn't support objects whose first level is a scalar or array.  Objects must start as maps.");
    }
}

18 Source : BaseRepeatedValueVectorHelper.java
with Apache License 2.0
from dremio

/*
   * RepeatedValueVector (or ListVector) no longer has inner offsetVector.
   * There is just the buffer that stores all offsets. In FixedWidthRepeatedReader
   * we earlier had the liberty to get the offset vector and mutate it directly
   * in a safe manner since the operations were carried out on the inner vector.
   * We no longer have such provision of setting offsets in a safe manner. Hence
   * we need these helper methods that directly work on the offset buffer
   * of the vector, do get/set operations and reallocation if needed.
   * An alternative would be to introduce static methods in ListVector or
   * BaseRepeatedValueVector interface for specifically setting data in
   * inner offset buffer but that approach is going to pollute the public
   * API in OSS.
   */
public static void setOffsetHelper(final BaseRepeatedValueVector vector, final int indexToGet, final int indexToSet, final BufferAllocator vectorAllocator) {
    final int valueToSet = vector.offsetBuffer.getInt(indexToGet * BaseRepeatedValueVector.OFFSET_WIDTH);
    while (indexToSet >= getOffsetBufferValueCapacity(vector)) {
        reallocOffsetBuffer(vector, vectorAllocator);
    }
    vector.offsetBuffer.setInt(indexToSet * BaseRepeatedValueVector.OFFSET_WIDTH, valueToSet);
}

18 Source : ObjectVector.java
with Apache License 2.0
from dremio

@Override
public TransferPair getTransferPair(BufferAllocator allocator) {
    throw new UnsupportedOperationException("ObjectVector does not support this");
}

18 Source : ObjectVector.java
with Apache License 2.0
from dremio

@Override
public TransferPair getTransferPair(String ref, BufferAllocator allocator) {
    throw new UnsupportedOperationException("ObjectVector does not support this");
}

See More Examples