org.apache.accumulo.core.data.Key

Here are the examples of the java api class org.apache.accumulo.core.data.Key taken from open source projects.

1. Gaffer1BloomElementFunctorTest#shouldTransformRangeFromEntityToEntityAndSomeEdges()

Project: Gaffer
File: Gaffer1BloomElementFunctorTest.java
@Test
public void shouldTransformRangeFromEntityToEntityAndSomeEdges() throws AccumuloElementConversionException {
    // Create entity
    final Entity entity = new Entity(TestGroups.ENTITY);
    entity.setVertex(1);
    //        String key1 = ConversionUtils.getRowKeyFromEntity(entity1);
    final Key key1 = elementConverter.getKeyFromEntity(entity);
    // Create edge from that entity
    final Edge edge = new Edge(TestGroups.EDGE);
    edge.setSource(1);
    edge.setDestination(2);
    //        String key2 = ConversionUtils.getRowKeysFromEdge(edge).getFirst();
    final Key key2 = elementConverter.getKeysFromEdge(edge).getFirst();
    // Create range from entity to edge inclusive
    final Range range = new Range(key1.getRow(), true, key2.getRow(), true);
    // Check don't get null Bloom key
    assertNotNull(elementFunctor.transform(range));
    // Check get correct Bloom key
    final org.apache.hadoop.util.bloom.Key expectedBloomKey = new org.apache.hadoop.util.bloom.Key(elementFunctor.getVertexFromRangeKey(key1.getRowData().getBackingArray()));
    assertEquals(expectedBloomKey, elementFunctor.transform(range));
}

2. Gaffer1BloomElementFunctorTest#shouldTransformRangeEntity()

Project: Gaffer
File: Gaffer1BloomElementFunctorTest.java
@Test
public void shouldTransformRangeEntity() throws AccumuloElementConversionException {
    // Create Range formed from one entity and shouldRetieveElementsInRangeBetweenSeeds
    final Entity entity1 = new Entity(TestGroups.ENTITY);
    entity1.setVertex(1);
    final Key key1 = elementConverter.getKeyFromEntity(entity1);
    final Range range1 = new Range(key1, true, key1, true);
    final org.apache.hadoop.util.bloom.Key expectedBloomKey1 = new org.apache.hadoop.util.bloom.Key(Arrays.copyOf(key1.getRowData().getBackingArray(), key1.getRowData().getBackingArray().length));
    assertTrue(elementFunctor.transform(range1).equals(expectedBloomKey1));
    // Create Range formed from two entities and shouldRetieveElementsInRangeBetweenSeeds - should get null
    final Entity entity2 = new Entity(TestGroups.ENTITY);
    entity2.setVertex(2);
    final Key key2 = elementConverter.getKeyFromEntity(entity2);
    final Range range2 = new Range(key1, true, key2, true);
    assertNull(elementFunctor.transform(range2));
}

3. ByteEntityBloomElementFunctorTest#shouldTransformRangeFromEntityToEntityAndSomeEdges()

Project: Gaffer
File: ByteEntityBloomElementFunctorTest.java
@Test
public void shouldTransformRangeFromEntityToEntityAndSomeEdges() throws AccumuloElementConversionException {
    // Create entity
    final Entity entity = new Entity(TestGroups.ENTITY);
    entity.setVertex(1);
    //        String key1 = ConversionUtils.getRowKeyFromEntity(entity1);
    final Key key1 = elementConverter.getKeyFromEntity(entity);
    // Create edge from that entity
    final Edge edge = new Edge(TestGroups.EDGE);
    edge.setSource(1);
    edge.setDestination(2);
    //        String key2 = ConversionUtils.getRowKeysFromEdge(edge).getFirst();
    final Key key2 = elementConverter.getKeysFromEdge(edge).getFirst();
    // Create range from entity to edge inclusive
    final Range range = new Range(key1.getRow(), true, key2.getRow(), true);
    // Check don't get null Bloom key
    assertNotNull(elementFunctor.transform(range));
    // Check get correct Bloom key
    final org.apache.hadoop.util.bloom.Key expectedBloomKey = new org.apache.hadoop.util.bloom.Key(elementFunctor.getVertexFromRangeKey(key1.getRowData().getBackingArray()));
    assertEquals(expectedBloomKey, elementFunctor.transform(range));
}

4. ByteEntityBloomElementFunctorTest#shouldTransformRangeEntity()

Project: Gaffer
File: ByteEntityBloomElementFunctorTest.java
@Test
public void shouldTransformRangeEntity() throws AccumuloElementConversionException {
    // Create Range formed from one entity and shouldRetieveElementsInRangeBetweenSeeds
    final Entity entity1 = new Entity(TestGroups.ENTITY);
    entity1.setVertex(1);
    final Key key1 = elementConverter.getKeyFromEntity(entity1);
    final Range range1 = new Range(key1, true, key1, true);
    final org.apache.hadoop.util.bloom.Key expectedBloomKey1 = new org.apache.hadoop.util.bloom.Key(Arrays.copyOf(key1.getRowData().getBackingArray(), key1.getRowData().getBackingArray().length - 2));
    assertTrue(elementFunctor.transform(range1).equals(expectedBloomKey1));
    // Create Range formed from two entities and shouldRetieveElementsInRangeBetweenSeeds - should get null
    final Entity entity2 = new Entity(TestGroups.ENTITY);
    entity2.setVertex(2);
    final Key key2 = elementConverter.getKeyFromEntity(entity2);
    final Range range2 = new Range(key1, true, key2, true);
    assertNull(elementFunctor.transform(range2));
}

5. ClassicRangeFactory#getEdgeRangeFromVertex()

Project: Gaffer
File: ClassicRangeFactory.java
private Range getEdgeRangeFromVertex(final byte[] serialisedVertex) {
    final byte[] startRowKey = new byte[serialisedVertex.length + 1];
    System.arraycopy(serialisedVertex, 0, startRowKey, 0, serialisedVertex.length);
    startRowKey[serialisedVertex.length] = ByteArrayEscapeUtils.DELIMITER;
    // Add delimiter to ensure that we don't get Entities.
    final byte[] endRowKey = new byte[serialisedVertex.length + 1];
    System.arraycopy(serialisedVertex, 0, endRowKey, 0, serialisedVertex.length);
    endRowKey[serialisedVertex.length] = ByteArrayEscapeUtils.DELIMITER_PLUS_ONE;
    final Key startKey = new Key(startRowKey, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, Long.MAX_VALUE);
    final Key endKey = new Key(endRowKey, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, Long.MAX_VALUE);
    return new Range(startKey, true, endKey, false);
}

6. AbstractCoreKeyAccumuloElementConverter#getKeysFromEdge()

Project: Gaffer
File: AbstractCoreKeyAccumuloElementConverter.java
@Override
public Pair<Key> getKeysFromEdge(final Edge edge) throws AccumuloElementConversionException {
    // Get pair of row keys
    final Pair<byte[]> rowKeys = getRowKeysFromEdge(edge);
    final byte[] columnFamily = buildColumnFamily(edge.getGroup());
    final byte[] columnQualifier = buildColumnQualifier(edge.getGroup(), edge.getProperties());
    final byte[] columnVisibility = buildColumnVisibility(edge.getGroup(), edge.getProperties());
    final long timeStamp = buildTimestamp(edge);
    // Create Accumulo keys - note that second row key may be null (if it's
    // a self-edge) and
    // in that case we should return null second key
    final Key key1 = new Key(rowKeys.getFirst(), columnFamily, columnQualifier, columnVisibility, timeStamp);
    final Key key2 = rowKeys.getSecond() != null ? new Key(rowKeys.getSecond(), columnFamily, columnQualifier, columnVisibility, timeStamp) : null;
    // Return pair of keys
    return new Pair<>(key1, key2);
}

7. TestAccumuloStorage#testUnsortedColumnList()

Project: pig
File: TestAccumuloStorage.java
@Test
public void testUnsortedColumnList() throws IOException, ParseException {
    AccumuloStorage storage = new AccumuloStorage("z,a");
    List<Key> keys = Lists.newArrayList();
    List<Value> values = Lists.newArrayList();
    keys.add(new Key("1", "a", ""));
    keys.add(new Key("1", "z", ""));
    values.add(new Value("a".getBytes()));
    values.add(new Value("z".getBytes()));
    Key k = new Key("1");
    Value v = WholeRowIterator.encodeRow(keys, values);
    Tuple t = storage.getTuple(k, v);
    Assert.assertEquals(3, t.size());
    Assert.assertEquals("z", t.get(1).toString());
    Assert.assertEquals("a", t.get(2).toString());
}

8. TestAccumuloStorage#testReadSingleKey()

Project: pig
File: TestAccumuloStorage.java
@Test
public void testReadSingleKey() throws IOException, ParseException {
    AccumuloStorage storage = new AccumuloStorage();
    List<Key> keys = Lists.newArrayList();
    List<Value> values = Lists.newArrayList();
    keys.add(new Key("1", "", "col1"));
    values.add(new Value("value1".getBytes()));
    Key k = new Key("1");
    Value v = WholeRowIterator.encodeRow(keys, values);
    Tuple t = storage.getTuple(k, v);
    Assert.assertEquals(2, t.size());
    Assert.assertEquals("1", t.get(0).toString());
    InternalMap map = new InternalMap();
    map.put(":col1", new DataByteArray("value1"));
    Assert.assertEquals(map, t.get(1));
}

9. KeyValueToRyaStatementFunction#apply()

Project: incubator-rya
File: KeyValueToRyaStatementFunction.java
@Override
public RyaStatement apply(Map.Entry<Key, Value> input) {
    Key key = input.getKey();
    Value value = input.getValue();
    RyaStatement statement = null;
    try {
        statement = context.deserializeTriple(tableLayout, new TripleRow(key.getRowData().toArray(), key.getColumnFamilyData().toArray(), key.getColumnQualifierData().toArray(), key.getTimestamp(), key.getColumnVisibilityData().toArray(), (value != null) ? value.get() : null));
    } catch (TripleRowResolverException e) {
        throw new RuntimeException(e);
    }
    return statement;
}

10. AccumuloMRUtils#getEnd()

Project: geowave
File: AccumuloMRUtils.java
protected static BigInteger getEnd(final Range range, final int cardinality) {
    final Key end = range.getEndKey();
    byte[] endBytes;
    if (!range.isInfiniteStopKey() && (end != null)) {
        endBytes = extractBytes(end.getRowData(), cardinality);
    } else {
        endBytes = extractBytes(new ArrayByteSequence(new byte[] {}), cardinality, true);
    }
    return new BigInteger(endBytes);
}

11. AccumuloMRUtils#getStart()

Project: geowave
File: AccumuloMRUtils.java
protected static BigInteger getStart(final Range range, final int cardinality) {
    final Key start = range.getStartKey();
    byte[] startBytes;
    if (!range.isInfiniteStartKey() && (start != null)) {
        startBytes = extractBytes(start.getRowData(), cardinality);
    } else {
        startBytes = extractBytes(new ArrayByteSequence(new byte[] {}), cardinality);
    }
    return new BigInteger(startBytes);
}

12. AbstractAccumuloElementConverterTest#shouldReturnAccumuloKeyConverterFromBasicEntity()

Project: Gaffer
File: AbstractAccumuloElementConverterTest.java
@Test
public void shouldReturnAccumuloKeyConverterFromBasicEntity() throws SchemaException, AccumuloElementConversionException, IOException {
    // Given
    final Entity entity = new Entity(TestGroups.ENTITY);
    entity.setVertex("3");
    // When
    final Key key = converter.getKeyFromEntity(entity);
    // Then
    final Entity newEntity = (Entity) converter.getElementFromKey(key);
    assertEquals("3", newEntity.getVertex());
}

13. TableUtils#addUpdateUtilsTable()

Project: Gaffer
File: TableUtils.java
public static void addUpdateUtilsTable(final AccumuloStore store) throws StoreException {
    ensureUtilsTableExists(store);
    final BatchWriter writer = createBatchWriter(store, AccumuloStoreConstants.GAFFER_UTILS_TABLE);
    final Key key;
    try {
        key = new Key(store.getProperties().getTable().getBytes(CommonConstants.UTF_8), AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, Long.MAX_VALUE);
    } catch (final UnsupportedEncodingException e) {
        throw new StoreException(e.getMessage(), e);
    }
    final Mutation m = new Mutation(key.getRow());
    m.put(key.getColumnFamily(), key.getColumnQualifier(), new ColumnVisibility(key.getColumnVisibility()), key.getTimestamp(), getValueFromSchemas(store.getSchema(), store.getKeyPackage()));
    try {
        writer.addMutation(m);
    } catch (final MutationsRejectedException e) {
        LOGGER.error("Failed to create an accumulo key mutation");
    }
}

14. TimeSkippingIteratorTest#testSkipColBug()

Project: incubator-fluo
File: TimeSkippingIteratorTest.java
@Test
public void testSkipColBug() throws IOException {
    // test for bug fluo#656
    SortedMap<Key, Value> data = new TreeMap<>();
    for (int q = 0; q < 2; q++) {
        for (int i = 0; i < 1000; i++) {
            Key k = new Key("r1", "f1", "q" + q, i);
            Value v = new Value(("" + i).getBytes());
            data.put(k, v);
        }
    }
    SortedKeyValueIterator<Key, Value> source = new SortedMapIterator(data);
    TimestampSkippingIterator tsi = new TimestampSkippingIterator(source);
    Key start = new Key("r1", "f1", "q0", Long.MAX_VALUE);
    Key end = new Key("r1", "f1", "q0", Long.MIN_VALUE);
    tsi.seek(new Range(start, true, end, true), Collections.<ByteSequence>emptySet(), false);
    Key curCol = new Key(tsi.getTopKey());
    Assert.assertTrue(tsi.hasTop());
    Assert.assertEquals(new Key("r1", "f1", "q0", 999), curCol);
    tsi.skipColumn(curCol);
    Assert.assertFalse(tsi.hasTop());
    // make sure fix didn't break anything
    start = new Key("r1", "f1", "q0", Long.MAX_VALUE);
    end = new Key("r1", "f1", "q1", Long.MIN_VALUE);
    tsi.seek(new Range(start, true, end, true), Collections.<ByteSequence>emptySet(), false);
    Assert.assertTrue(tsi.hasTop());
    Assert.assertEquals(new Key("r1", "f1", "q0", 999), tsi.getTopKey());
    tsi.skipColumn(curCol);
    Assert.assertTrue(tsi.hasTop());
    Assert.assertEquals(new Key("r1", "f1", "q1", 999), tsi.getTopKey());
    curCol = new Key(tsi.getTopKey());
    tsi.skipColumn(curCol);
    Assert.assertFalse(tsi.hasTop());
}

15. JoinSelectProspectOutputTest#testOutput()

Project: incubator-rya
File: JoinSelectProspectOutputTest.java
@Test
public void testOutput() throws InterruptedException, IOException {
    String s = "urn:gem:etype#1234";
    String p = "urn:gem#pred";
    String ts = "798497748386999999";
    Text t1 = new Text(TripleValueType.subject.name() + DELIM + s + DELIM + 1);
    Text t2 = new Text(TripleValueType.predicate.name() + DELIM + p + DELIM + 2);
    Text t3 = new Text(TripleValueType.subjectpredicate.name() + DELIM + s + DELIM + p + DELIM + ts);
    byte[] b = new byte[0];
    byte[] c = "25".getBytes();
    byte[] d = "47".getBytes();
    byte[] e = "15".getBytes();
    Key key1 = new Key(t1.getBytes(), b, b, b, 1);
    Key key2 = new Key(t2.getBytes(), b, b, b, 1);
    Key key3 = new Key(t3.getBytes(), b, b, b, 1);
    Value val1 = new Value(c);
    Value val2 = new Value(d);
    Value val3 = new Value(e);
    // System.out.println("Keys are " + key1 + " and " + key2);
    new MapDriver<Key, Value, CompositeType, TripleCard>().withMapper(new JoinSelectProspectOutput.CardinalityMapper()).withInput(key1, val1).withInput(key2, val2).withInput(key3, val3).withOutput(new CompositeType(s, 1), new TripleCard(new CardinalityType(25, "subject", 1))).withOutput(new CompositeType(p, 1), new TripleCard(new CardinalityType(47, "predicate", 2))).withOutput(new CompositeType(s + DELIM + p, 1), new TripleCard(new CardinalityType(15, "subjectpredicate", Long.parseLong(ts)))).runTest();
}

16. CardinalityMapperTest#testOutput()

Project: incubator-rya
File: CardinalityMapperTest.java
@Test
public void testOutput() throws InterruptedException, IOException {
    String s = "urn:gem:etype#1234";
    String p = "urn:gem#pred";
    Text t1 = new Text(TripleValueType.subject.name() + DELIM + s + DELIM + 1);
    Text t2 = new Text(TripleValueType.predicate.name() + DELIM + p + DELIM + 2);
    Text t3 = new Text(TripleValueType.subjectpredicate.name() + DELIM + s + DELIM + p + DELIM + 3);
    byte[] b = new byte[0];
    byte[] c = "25".getBytes();
    byte[] d = "47".getBytes();
    byte[] e = "15".getBytes();
    Key key1 = new Key(t1.getBytes(), b, b, b, 1);
    Key key2 = new Key(t2.getBytes(), b, b, b, 1);
    Key key3 = new Key(t3.getBytes(), b, b, b, 1);
    Value val1 = new Value(c);
    Value val2 = new Value(d);
    Value val3 = new Value(e);
    // System.out.println("Keys are " + key1 + " and " + key2);
    new MapDriver<Key, Value, CompositeType, TripleCard>().withMapper(new JoinSelectProspectOutput.CardinalityMapper()).withInput(key1, val1).withInput(key2, val2).withInput(key3, val3).withOutput(new CompositeType(s, 1), new TripleCard(new CardinalityType(25, "subject", 1))).withOutput(new CompositeType(p, 1), new TripleCard(new CardinalityType(47, "predicate", 2))).withOutput(new CompositeType(s + DELIM + p, 1), new TripleCard(new CardinalityType(15, "subjectpredicate", 3))).runTest();
}

17. AccumuloMrsImageReader#get()

Project: mrgeo
File: AccumuloMrsImageReader.java
// end get
/**
   * Retrieve a series of tiles from the Accumulo store.
   *
   * @param startKey
   *          the start of the list of items to pull
   * @param endKey
   *          the end (inclusive) of items to pull
   * @return an iterator through the list of items to pull
   */
@Override
public KVIterator<TileIdWritable, Raster> get(final TileIdWritable startKey, final TileIdWritable endKey) {
    // start
    long startLong;
    if (startKey != null) {
        startLong = startKey.get();
    } else {
        startLong = 0;
    }
    // make sure the end is selected correctly
    long endLong;
    if (endKey != null) {
        endLong = endKey.get();
    } else {
        if (startLong == 0) {
            byte[] b = new byte[8];
            b[0] = 'A';
            endLong = ByteBuffer.wrap(b).getLong();
        } else {
            endLong = startLong;
        }
    }
    // check if we are getting one tile only
    boolean oneTile = false;
    if (startLong == endLong) {
        oneTile = true;
    }
    if (!oneTile) {
        // this is done to ensure of getting all tiles in the range
        endLong++;
    }
    // set up the keys for the ranges
    Key sKey = new Key(AccumuloUtils.toKey(startLong));
    Key eKey = new Key(AccumuloUtils.toKey(endLong));
    scanner.clearColumns();
    scanner.clearScanIterators();
    /*
     * TODO: how do you know if you are missing items in the tile list - it is possible right now
     * it appears there is a one to one mapping in the tiles and list
     */
    // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    String strAuths = AMTR_props.getProperty(MrGeoAccumuloConstants.MRGEO_ACC_KEY_AUTHS);
    auths = AccumuloUtils.createAuthorizationsFromDelimitedString(strAuths);
    if (connector == null) {
        try {
            connector = AccumuloConnector.getConnector();
        } catch (DataProviderException dpe) {
        }
    }
    if (log.isDebugEnabled()) {
        StringBuilder authsStr = new StringBuilder();
        for (byte[] b : auths.getAuthorizations()) {
            authsStr.append(new String(b) + " ");
        }
        //log.info("startkey = " + startKey.get() + " endkey = " + endKey.get());
        log.debug("accStartkey = " + AccumuloUtils.toLong(sKey.getRow()) + " accEndKey = " + AccumuloUtils.toLong(eKey.getRow()) + " zoomLevel = " + zoomLevel + "\tonetile = " + oneTile + "\tauths = " + authsStr.toString());
    }
    Range r;
    if (oneTile) {
        r = new Range(AccumuloUtils.toRowId(startLong));
    } else {
        r = new Range(sKey, true, eKey, true);
    }
    // set the scanner
    scanner.setRange(r);
    if (zoomLevel != -1) {
        scanner.fetchColumnFamily(new Text(Integer.toString(zoomLevel)));
    }
    /**
     * it is important to realize that the core does not work like a traditional
     * iterator.  This is just the way they did it.
     */
    return new KVIterator<TileIdWritable, Raster>() {

        //final Iterator<Entry<Key, Value>> it = batchScanner.iterator();
        final Iterator<Map.Entry<Key, Value>> it = scanner.iterator();

        Map.Entry<Key, Value> current = null;

        //Map.Entry<Key, Value> nextCurrent = null;
        //ArrayList<Map.Entry<Key, Value>> vals = new ArrayList<Map.Entry<Key, Value>>();
        // this goes false after reading first element
        //private boolean readFirst = true;
        //int cnt = 0;
        @Override
        public TileIdWritable currentKey() {
            return new TileIdWritable(AccumuloUtils.toLong(current.getKey().getRow()));
        }

        // end currentKey
        @Override
        public Raster currentValue() {
            try {
                return toNonWritable(current.getValue().get(), null, null);
            } catch (final IOException e) {
                throw new MrsImageException(e);
            }
        }

        // end currentValue
        /**
       * It is expected for the core that hasNext sets the new value.  This is backwards
       * from how things normally work.
       *
       * @return true if the current value is set and false if there is nothing to set
       */
        @Override
        public boolean hasNext() {
            if (current == null && it.hasNext()) {
                current = it.next();
                return true;
            }
            if (it.hasNext()) {
                current = it.next();
                return true;
            }
            current = null;
            return false;
        }

        // ent hasNext
        @Override
        public Raster next() {
            try {
                if (current == null && it.hasNext()) {
                    current = it.next();
                    log.debug("Current key = " + Hex.encodeHexString(current.getKey().getRow().getBytes()));
                    log.debug("Size of value = " + current.getValue().get().length);
                    return toNonWritable(current.getValue().get(), null, null);
                }
                return null;
            } catch (final IOException e) {
                throw new MrsImageException(e);
            }
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException("iterator is read-only");
        }
    };
}

18. VisibilityTransformer#transform()

Project: geowave
File: VisibilityTransformer.java
@Override
public Pair<Key, Value> transform(Pair<Key, Value> entry) {
    Key originalKey = entry.getKey();
    Text visibiltity = originalKey.getColumnVisibility();
    String newVisibility = visibiltity.toString().replaceFirst(transformingRegex, replacement);
    if (newVisibility.length() > 0) {
        char one = newVisibility.charAt(0);
        // strip off any ending options
        if (one == '&' || one == '|')
            newVisibility = newVisibility.substring(1);
    }
    byte[] row = originalKey.getRowData().toArray();
    byte[] cf = originalKey.getColumnFamilyData().toArray();
    byte[] cq = originalKey.getColumnQualifierData().toArray();
    long timestamp = originalKey.getTimestamp();
    byte[] cv = newVisibility.getBytes(StringUtils.GEOWAVE_CHAR_SET);
    Key newKey = new Key(row, 0, row.length, cf, 0, cf.length, cq, 0, cq.length, cv, 0, cv.length, timestamp + 1);
    return Pair.of(newKey, entry.getValue());
}

19. IteratorUtils#replaceRow()

Project: geowave
File: IteratorUtils.java
public static Key replaceRow(final Key originalKey, final byte[] newRow) {
    final byte[] row = newRow;
    final byte[] cf = originalKey.getColumnFamilyData().toArray();
    final byte[] cq = originalKey.getColumnQualifierData().toArray();
    final byte[] cv = originalKey.getColumnVisibilityData().toArray();
    final long timestamp = originalKey.getTimestamp();
    final Key newKey = new SkeletonKey(new Key(row, 0, row.length, cf, 0, cf.length, cq, 0, cq.length, cv, 0, cv.length, timestamp));
    newKey.setDeleted(originalKey.isDeleted());
    return newKey;
}

20. ByteArrayEscapeUtilsTest#testOrdering()

Project: Gaffer
File: ByteArrayEscapeUtilsTest.java
@Test
public void testOrdering() {
    // Generate some keys with row key formed from random bytes, and add to ordered set
    final SortedSet<Key> original = new TreeSet<>();
    for (int i = 0; i < 100000; i++) {
        final int length = RandomUtils.nextInt(100) + 1;
        final byte[] b = new byte[length];
        for (int j = 0; j < b.length; j++) {
            b[j] = (byte) RandomUtils.nextInt();
        }
        final Key key = new Key(b, EMPTY_BYTES, EMPTY_BYTES, EMPTY_BYTES, Long.MAX_VALUE);
        original.add(key);
    }
    // Loop through set, check that ordering is preserved after escaping
    final Iterator<Key> it = original.iterator();
    Key first = it.next();
    Key second = it.next();
    while (true) {
        assertTrue(first.compareTo(second) < 0);
        final Key escapedFirst = new Key(ByteArrayEscapeUtils.escape(first.getRowData().getBackingArray()), EMPTY_BYTES, EMPTY_BYTES, EMPTY_BYTES, Long.MAX_VALUE);
        final Key escapedSecond = new Key(ByteArrayEscapeUtils.escape(second.getRowData().getBackingArray()), EMPTY_BYTES, EMPTY_BYTES, EMPTY_BYTES, Long.MAX_VALUE);
        assertTrue(escapedFirst.compareTo(escapedSecond) < 0);
        first = second;
        if (it.hasNext()) {
            second = it.next();
        } else {
            break;
        }
    }
}

21. ClassicRangeFactory#getEntityRangeFromVertex()

Project: Gaffer
File: ClassicRangeFactory.java
private Range getEntityRangeFromVertex(final byte[] serialisedVertex) {
    final byte[] key = Arrays.copyOf(serialisedVertex, serialisedVertex.length + 1);
    key[key.length - 1] = ByteArrayEscapeUtils.DELIMITER;
    final Key startKey = new Key(serialisedVertex, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, Long.MAX_VALUE);
    final Key endKey = new Key(key, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, Long.MAX_VALUE);
    return new Range(startKey, true, endKey, false);
}

22. ClassicRangeFactory#getRange()

Project: Gaffer
File: ClassicRangeFactory.java
private Range getRange(final byte[] serialisedVertex) {
    final byte[] endRowKey = new byte[serialisedVertex.length + 1];
    System.arraycopy(serialisedVertex, 0, endRowKey, 0, serialisedVertex.length);
    endRowKey[serialisedVertex.length] = ByteArrayEscapeUtils.DELIMITER_PLUS_ONE;
    final Key startKey = new Key(serialisedVertex, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, Long.MAX_VALUE);
    final Key endKey = new Key(endRowKey, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, AccumuloStoreConstants.EMPTY_BYTES, Long.MAX_VALUE);
    return new Range(startKey, true, endKey, false);
}

23. AccumuloGeoTableTest#testGetTile()

Project: mrgeo
File: AccumuloGeoTableTest.java
// end testConnectorAll
@Test
@Category(UnitTest.class)
public void testGetTile() throws Exception {
    ZooKeeperInstance zkinst = new ZooKeeperInstance(inst, zoo);
    PasswordToken pwTok = new PasswordToken(pw.getBytes());
    Connector conn = zkinst.getConnector(u, pwTok);
    Assert.assertNotNull(conn);
    PasswordToken token = new PasswordToken(pw.getBytes());
    Authorizations auths = new Authorizations(authsStr.split(","));
    long start = 0;
    long end = Long.MAX_VALUE;
    Key sKey = AccumuloUtils.toKey(start);
    Key eKey = AccumuloUtils.toKey(end);
    Range r = new Range(sKey, eKey);
    Scanner s = conn.createScanner("paris4", auths);
    s.fetchColumnFamily(new Text(Integer.toString(10)));
    s.setRange(r);
    Iterator<Entry<Key, Value>> it = s.iterator();
    while (it.hasNext()) {
        Entry<Key, Value> ent = it.next();
        if (ent == null) {
            return;
        }
        System.out.println("current key   = " + AccumuloUtils.toLong(ent.getKey().getRow()));
        System.out.println("current value = " + ent.getValue().getSize());
    }
}

24. JoinSelectMapperTest#testOutput()

Project: incubator-rya
File: JoinSelectMapperTest.java
@Test
public void testOutput() throws TripleRowResolverException, IOException {
    RyaStatement rya = new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("mydata1"));
    Text s = new Text(rya.getSubject().getData());
    Text p = new Text(rya.getPredicate().getData());
    Text o = new Text(rya.getObject().getData());
    Text sp = new Text(rya.getSubject().getData() + DELIM + rya.getPredicate().getData());
    Text so = new Text(rya.getSubject().getData() + DELIM + rya.getObject().getData());
    Text po = new Text(rya.getPredicate().getData() + DELIM + rya.getObject().getData());
    Text ps = new Text(rya.getPredicate().getData() + DELIM + rya.getSubject().getData());
    Text op = new Text(rya.getObject().getData() + DELIM + rya.getPredicate().getData());
    Text os = new Text(rya.getObject().getData() + DELIM + rya.getSubject().getData());
    TripleEntry t1 = new TripleEntry(s, p, new Text("subject"), new Text("predicate"), new Text("object"));
    TripleEntry t2 = new TripleEntry(p, o, new Text("predicate"), new Text("object"), new Text("subject"));
    TripleEntry t3 = new TripleEntry(o, s, new Text("object"), new Text("subject"), new Text("predicate"));
    TripleEntry t4 = new TripleEntry(o, new Text(""), new Text("object"), new Text(""), new Text("subjectpredicate"));
    TripleEntry t5 = new TripleEntry(p, new Text(""), new Text("predicate"), new Text(""), new Text("objectsubject"));
    TripleEntry t6 = new TripleEntry(s, new Text(""), new Text("subject"), new Text(""), new Text("predicateobject"));
    TripleEntry t7 = new TripleEntry(s, new Text(""), new Text("subject"), new Text(""), new Text("objectpredicate"));
    TripleEntry t8 = new TripleEntry(p, new Text(""), new Text("predicate"), new Text(""), new Text("subjectobject"));
    TripleEntry t9 = new TripleEntry(o, new Text(""), new Text("object"), new Text(""), new Text("predicatesubject"));
    TripleRowResolver trr = new WholeRowTripleResolver();
    Map<TABLE_LAYOUT, TripleRow> map = trr.serialize(rya);
    System.out.println(map);
    TripleRow tr = map.get(TABLE_LAYOUT.SPO);
    System.out.println("Triple row is" + tr);
    System.out.println("ColumnV is " + tr.getTimestamp());
    byte[] b = new byte[0];
    Key key = new Key(tr.getRow(), tr.getColumnFamily(), tr.getColumnQualifier(), b, 1);
    Value val = new Value(b);
    new MapDriver<Key, Value, CompositeType, TripleCard>().withMapper(new JoinSelectSpoTableOutput.JoinSelectMapper()).withInput(key, val).withOutput(new CompositeType(o, new IntWritable(2)), new TripleCard(t1)).withOutput(new CompositeType(s, new IntWritable(2)), new TripleCard(t2)).withOutput(new CompositeType(p, new IntWritable(2)), new TripleCard(t3)).withOutput(new CompositeType(po, new IntWritable(2)), new TripleCard(t6)).withOutput(new CompositeType(so, new IntWritable(2)), new TripleCard(t5)).withOutput(new CompositeType(sp, new IntWritable(2)), new TripleCard(t4)).withOutput(new CompositeType(op, new IntWritable(2)), new TripleCard(t7)).withOutput(new CompositeType(os, new IntWritable(2)), new TripleCard(t8)).withOutput(new CompositeType(ps, new IntWritable(2)), new TripleCard(t9)).runTest();
}

25. BooleanTreeIterator#seekDocSource()

Project: incubator-rya
File: BooleanTreeIterator.java
private void seekDocSource(Key key) throws IOException {
    Key docKey = new Key(key.getRow(), ColumnPrefixes.DOCS_CF_PREFIX, key.getColumnQualifier());
    docSource.seek(new Range(docKey, true, null, false), Collections.<ByteSequence>emptyList(), false);
}

26. DocumentIndexIntersectingIterator#seek()

Project: incubator-rya
File: DocumentIndexIntersectingIterator.java
@Override
public void seek(Range range, Collection<ByteSequence> seekColumnFamilies, boolean inclusive) throws IOException {
    overallRange = new Range(range);
    currentRow = new Text();
    currentTermCond.set(emptyByteArray);
    termCondSet = false;
    //       log.info("Calling seek with range " + range);
    // seek each of the sources to the right column family within the row
    // given by key
    Key sourceKey;
    if (rangeCqValid(range)) {
        String[] cqInfo = cqParser(range.getStartKey().getColumnQualifier());
        int id = Integer.parseInt(cqInfo[1]);
        if (id >= 0) {
            for (int i = 0; i < sourcesCount; i++) {
                if (i == id) {
                    sourceKey = buildKey(getRow(range.getStartKey()), sources[i].term, new Text(cqInfo[0]));
                    sources[i].seek(new Range(sourceKey, true, null, false));
                    sources[i].next();
                    if (!hasContext && sources[i].hasTop()) {
                        ctxt = getTermCond(sources[i].top).toString().split("")[0];
                    }
                } else {
                    sourceKey = buildKey(getRow(range.getStartKey()), sources[i].term);
                    sources[i].seek(new Range(sourceKey, true, null, false));
                }
            }
        } else {
            for (int i = 0; i < sourcesCount; i++) {
                sourceKey = buildKey(getRow(range.getStartKey()), sources[i].term, range.getStartKey().getColumnQualifier());
                sources[i].seek(new Range(sourceKey, true, null, false));
            }
        }
    } else {
        //            log.info("Range is invalid.");
        for (int i = 0; i < sourcesCount; i++) {
            if (range.getStartKey() != null) {
                sourceKey = buildKey(getRow(range.getStartKey()), sources[i].term);
                // Seek only to the term for this source as a column family
                sources[i].seek(new Range(sourceKey, true, null, false));
            } else {
                // Seek only to the term for this source as a column family
                sources[i].seek(range);
            }
        }
    }
    advanceToIntersection();
}

27. RdfFileInputToolTest#testInputContext()

Project: incubator-rya
File: RdfFileInputToolTest.java
public void testInputContext() throws Exception {
    RdfFileInputTool.main(new String[] { "-Dac.mock=true", "-Dac.instance=" + instance, "-Dac.username=" + user, "-Dac.pwd=" + pwd, "-Drdf.tablePrefix=" + tablePrefix, "-Drdf.format=" + RDFFormat.TRIG.getName(), "src/test/resources/namedgraphs.trig" });
    Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, auths);
    scanner.setRange(new Range());
    Iterator<Map.Entry<Key, Value>> iterator = scanner.iterator();
    ValueFactory vf = new ValueFactoryImpl();
    assertTrue(iterator.hasNext());
    RyaStatement rs = new RyaStatement(new RyaURI("http://www.example.org/exampleDocument#Monica"), new RyaURI("http://www.example.org/vocabulary#name"), new RyaType("Monica Murphy"), new RyaURI("http://www.example.org/exampleDocument#G1"));
    Key key = iterator.next().getKey();
    TripleRow tripleRow = RyaTripleContext.getInstance(new AccumuloRdfConfiguration()).serializeTriple(rs).get(TABLE_LAYOUT.SPO);
    assertEquals(new Text(tripleRow.getRow()), key.getRow());
    assertEquals(new Text(tripleRow.getColumnFamily()), key.getColumnFamily());
}

28. HashTest#check()

Project: incubator-fluo
File: HashTest.java
private boolean check(byte[] row, byte[] cf, byte[] cq) {
    Column col = new Column(Bytes.of(cf), Bytes.of(cq));
    byte[] cfcq = NotificationUtil.encodeCol(col);
    Key k = new Key(row, ColumnConstants.NOTIFY_CF.toArray(), cfcq, new byte[0], 6);
    boolean accept = NotificationHashFilter.accept(k, 7, 3);
    Assert.assertEquals(accept, HashNotificationFinder.shouldProcess(Notification.from(k), 7, 3));
    return accept;
}

29. TestData#addIfInRange()

Project: incubator-fluo
File: TestData.java
public TestData addIfInRange(String key, String value, Range range) {
    String[] fields = key.split("\\s+");
    String row = fields[0];
    String cf = fields[1];
    String cq = fields[2];
    String ct;
    long ts;
    byte[] val = new byte[0];
    ;
    if (cf.equals("ntfy")) {
        ts = Long.parseLong(fields[3]) << 1;
        ct = cf;
        if (fields.length == 5) {
            if (!fields[4].equals("DEL"))
                throw new IllegalArgumentException("bad ntfy");
            // its a delete
            ts |= 1l;
        }
    } else {
        ct = fields[3];
        ts = Long.parseLong(fields[4]);
    }
    switch(ct) {
        case "ACK":
            ts |= ColumnConstants.ACK_PREFIX;
            break;
        case "TX_DONE":
            ts |= ColumnConstants.TX_DONE_PREFIX;
            break;
        case "WRITE":
            ts |= ColumnConstants.WRITE_PREFIX;
            long writeTs = Long.parseLong(value.split("\\s+")[0]);
            val = WriteValue.encode(writeTs, value.contains("PRIMARY"), value.contains("DELETE"));
            break;
        case "LOCK":
            ts |= ColumnConstants.LOCK_PREFIX;
            String rc[] = value.split("\\s+");
            val = LockValue.encode(Bytes.of(rc[0]), new Column(rc[1], rc[2]), value.contains("WRITE"), value.contains("DELETE"), value.contains("TRIGGER"), 42l);
            break;
        case "DATA":
            ts |= ColumnConstants.DATA_PREFIX;
            val = value.getBytes();
            break;
        case "DEL_LOCK":
            ts |= ColumnConstants.DEL_LOCK_PREFIX;
            if (value.contains("ROLLBACK") || value.contains("ABORT")) {
                val = DelLockValue.encodeRollback(value.contains("PRIMARY"), true);
            } else {
                long commitTs = Long.parseLong(value.split("\\s+")[0]);
                val = DelLockValue.encodeCommit(commitTs, value.contains("PRIMARY"));
            }
            break;
        case "ntfy":
            break;
        default:
            throw new IllegalArgumentException("unknown column type " + ct);
    }
    Key akey = new Key(row, cf, cq, ts);
    if (range.contains(akey)) {
        data.put(akey, new Value(val));
    }
    return this;
}

30. NotificationIteratorTest#testScans()

Project: incubator-fluo
File: NotificationIteratorTest.java
@Test
public void testScans() {
    TestData input = getTestData();
    // check the test code
    Assert.assertEquals(26, input.data.size());
    for (Key key : input.data.keySet()) {
        TestData output = new TestData(newNI(input, IteratorScope.scan), new Range(key, false, null, true));
        TestData expected = null;
        if (key.getRowData().toString().equals("0")) {
            expected = new TestData();
            expected.add("1 ntfy foo:bar 3", "");
            expected.add("1 ntfy foo:baz 1", "");
            expected.add("2 ntfy foo:baz 3", "");
            expected.add("9 ntfy foo:bar 3", "");
        } else if (key.getRowData().toString().equals("1")) {
            if (key.getColumnQualifierData().toString().equals("foo:bar")) {
                expected = new TestData();
                expected.add("1 ntfy foo:baz 1", "");
                expected.add("2 ntfy foo:baz 3", "");
                expected.add("9 ntfy foo:bar 3", "");
            } else {
                expected = new TestData();
                expected.add("2 ntfy foo:baz 3", "");
                expected.add("9 ntfy foo:bar 3", "");
            }
        } else if (key.getRowData().toString().compareTo("9") < 0) {
            expected = new TestData();
            expected.add("9 ntfy foo:bar 3", "");
        } else {
            expected = new TestData();
        }
        Assert.assertEquals(expected, output);
    }
}

31. RollbackCheckIterator#seek()

Project: incubator-fluo
File: RollbackCheckIterator.java
@Override
public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException {
    range = IteratorUtil.maximizeStartKeyTimeStamp(range);
    if (columnFamilies.isEmpty() && !inclusive) {
        source.seek(range, SnapshotIterator.NOTIFY_CF_SET, false);
    } else {
        source.seek(range, columnFamilies, inclusive);
    }
    Key curCol = new Key();
    if (source.hasTop()) {
        curCol.set(source.getTopKey());
        // TODO can this optimization cause problems?
        if (!curCol.equals(range.getStartKey(), PartialKey.ROW_COLFAM_COLQUAL_COLVIS)) {
            return;
        }
    }
    long invalidationTime = -1;
    hasTop = false;
    while (source.hasTop() && curCol.equals(source.getTopKey(), PartialKey.ROW_COLFAM_COLQUAL_COLVIS)) {
        long colType = source.getTopKey().getTimestamp() & ColumnConstants.PREFIX_MASK;
        long ts = source.getTopKey().getTimestamp() & ColumnConstants.TIMESTAMP_MASK;
        if (colType == ColumnConstants.TX_DONE_PREFIX) {
        // do nothing if TX_DONE
        } else if (colType == ColumnConstants.WRITE_PREFIX) {
            long timePtr = WriteValue.getTimestamp(source.getTopValue().get());
            if (timePtr > invalidationTime) {
                invalidationTime = timePtr;
            }
            if (lockTime == timePtr) {
                hasTop = true;
                return;
            }
        } else if (colType == ColumnConstants.DEL_LOCK_PREFIX) {
            if (ts > invalidationTime) {
                invalidationTime = ts;
            }
            if (ts == lockTime) {
                hasTop = true;
                return;
            }
        } else if (colType == ColumnConstants.LOCK_PREFIX) {
            if (ts > invalidationTime) {
                // nothing supersedes this lock, therefore the column is locked
                hasTop = true;
                return;
            }
        } else if (colType == ColumnConstants.DATA_PREFIX) {
            // can stop looking
            return;
        } else if (colType == ColumnConstants.ACK_PREFIX) {
        // do nothing if ACK
        } else {
            throw new IllegalArgumentException();
        }
        source.next();
    }
}

32. PrewriteIterator#seek()

Project: incubator-fluo
File: PrewriteIterator.java
@Override
public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException {
    Collection<ByteSequence> fams;
    if (columnFamilies.isEmpty() && !inclusive) {
        fams = SnapshotIterator.NOTIFY_CF_SET;
        inclusive = false;
    } else {
        fams = columnFamilies;
    }
    Key endKey = new Key(range.getStartKey());
    if (checkAck) {
        endKey.setTimestamp(ColumnConstants.DATA_PREFIX | ColumnConstants.TIMESTAMP_MASK);
    } else {
        endKey.setTimestamp(ColumnConstants.ACK_PREFIX | ColumnConstants.TIMESTAMP_MASK);
    }
    // Tried seeking directly to WRITE_PREFIX, however this did not work well because of how
    // TimestampSkippingIterator currently works. Currently, it can not remove the deleting iterator
    // until after the first seek.
    Range seekRange = new Range(range.getStartKey(), true, endKey, false);
    source.seek(seekRange, fams, inclusive);
    hasTop = false;
    long invalidationTime = -1;
    while (source.hasTop() && seekRange.getStartKey().equals(source.getTopKey(), PartialKey.ROW_COLFAM_COLQUAL_COLVIS)) {
        long colType = source.getTopKey().getTimestamp() & ColumnConstants.PREFIX_MASK;
        long ts = source.getTopKey().getTimestamp() & ColumnConstants.TIMESTAMP_MASK;
        if (colType == ColumnConstants.TX_DONE_PREFIX) {
            // tried to make 1st seek go to WRITE_PREFIX, but this did not allow the DeleteIterator to
            // be removed from the stack so it was slower.
            source.skipToPrefix(seekRange.getStartKey(), ColumnConstants.WRITE_PREFIX);
        } else if (colType == ColumnConstants.WRITE_PREFIX) {
            long timePtr = WriteValue.getTimestamp(source.getTopValue().get());
            if (timePtr > invalidationTime) {
                invalidationTime = timePtr;
            }
            if (ts >= snaptime) {
                hasTop = true;
                return;
            }
            source.skipToPrefix(seekRange.getStartKey(), ColumnConstants.DEL_LOCK_PREFIX);
        } else if (colType == ColumnConstants.DEL_LOCK_PREFIX) {
            if (ts > invalidationTime) {
                invalidationTime = ts;
                if (ts >= snaptime) {
                    hasTop = true;
                    return;
                }
            }
            source.skipToPrefix(seekRange.getStartKey(), ColumnConstants.LOCK_PREFIX);
        } else if (colType == ColumnConstants.LOCK_PREFIX) {
            if (ts > invalidationTime) {
                // nothing supersedes this lock, therefore the column is locked
                hasTop = true;
                return;
            }
            if (checkAck) {
                source.skipToPrefix(seekRange.getStartKey(), ColumnConstants.ACK_PREFIX);
            } else {
                // only ack and data left and not interested in either so stop looking
                return;
            }
        } else if (colType == ColumnConstants.DATA_PREFIX) {
            // can stop looking
            return;
        } else if (colType == ColumnConstants.ACK_PREFIX) {
            if (checkAck && ts > ntfyTimestamp) {
                hasTop = true;
                return;
            } else {
                // nothing else to look at in this column
                return;
            }
        } else {
            throw new IllegalArgumentException();
        }
    }
}

33. FluoFormatter#toString()

Project: incubator-fluo
File: FluoFormatter.java
public static String toString(Entry<Key, Value> entry) {
    Key key = entry.getKey();
    if (NotificationUtil.isNtfy(key)) {
        StringBuilder sb = new StringBuilder();
        encNonAscii(sb, key.getRowData());
        sb.append(" ");
        encNonAscii(sb, key.getColumnFamilyData());
        sb.append(":");
        Column col = NotificationUtil.decodeCol(key);
        encNonAscii(sb, col.getFamily());
        sb.append(":");
        encNonAscii(sb, col.getQualifier());
        sb.append(" [");
        encNonAscii(sb, key.getColumnVisibilityData());
        sb.append("] ");
        sb.append(NotificationUtil.decodeTs(key));
        sb.append('-');
        sb.append(NotificationUtil.isDelete(key) ? "DELETE" : "INSERT");
        sb.append("\t");
        encNonAscii(sb, entry.getValue().get());
        return sb.toString();
    } else {
        long ts = key.getTimestamp();
        String type = "";
        if ((ts & ColumnConstants.PREFIX_MASK) == ColumnConstants.TX_DONE_PREFIX) {
            type = "TX_DONE";
        }
        if ((ts & ColumnConstants.PREFIX_MASK) == ColumnConstants.DEL_LOCK_PREFIX) {
            type = "DEL_LOCK";
        }
        if ((ts & ColumnConstants.PREFIX_MASK) == ColumnConstants.LOCK_PREFIX) {
            type = "LOCK";
        }
        if ((ts & ColumnConstants.PREFIX_MASK) == ColumnConstants.DATA_PREFIX) {
            type = "DATA";
        }
        if ((ts & ColumnConstants.PREFIX_MASK) == ColumnConstants.WRITE_PREFIX) {
            type = "WRITE";
        }
        if ((ts & ColumnConstants.PREFIX_MASK) == ColumnConstants.ACK_PREFIX) {
            type = "ACK";
        }
        StringBuilder sb = new StringBuilder();
        String val;
        if (type.equals("WRITE")) {
            val = new WriteValue(entry.getValue().get()).toString();
        } else if (type.equals("DEL_LOCK")) {
            val = new DelLockValue(entry.getValue().get()).toString();
        } else if (type.equals("LOCK")) {
            val = new LockValue(entry.getValue().get()).toString();
        } else {
            encNonAscii(sb, entry.getValue().get());
            val = sb.toString();
        }
        sb.setLength(0);
        encNonAscii(sb, key.getRowData());
        sb.append(" ");
        encNonAscii(sb, key.getColumnFamilyData());
        sb.append(":");
        encNonAscii(sb, key.getColumnQualifierData());
        sb.append(" [");
        encNonAscii(sb, key.getColumnVisibilityData());
        sb.append("] ");
        sb.append(ts & ColumnConstants.TIMESTAMP_MASK);
        sb.append('-');
        sb.append(type);
        sb.append("\t");
        sb.append(val);
        return sb.toString();
    }
}

34. PersistentDataFormatter#formatEntry()

Project: geowave
File: PersistentDataFormatter.java
public String formatEntry(Entry<Key, Value> entry, DateFormat timestampFormat) {
    StringBuilder sb = new StringBuilder();
    StringBuilder sbInsertion = new StringBuilder();
    Key key = entry.getKey();
    EntryRowID rowId = new EntryRowID(key.getRow().getBytes());
    byte[] insertionIdBytes;
    insertionIdBytes = rowId.getInsertionId();
    for (byte b : insertionIdBytes) {
        sbInsertion.append(String.format("%02x", b));
    }
    Text insertionIdText = new Text(sbInsertion.toString());
    Text adapterIdText = new Text(StringUtils.stringFromBinary(rowId.getAdapterId()));
    Text dataIdText = new Text(StringUtils.stringFromBinary(rowId.getDataId()));
    Text duplicatesText = new Text(Integer.toString(rowId.getNumberOfDuplicates()));
    // append insertion Id
    appendText(sb, insertionIdText).append(" ");
    // append adapterId
    appendText(sb, adapterIdText).append(" ");
    // append dataId
    appendText(sb, dataIdText).append(" ");
    // append numberOfDuplicates
    appendText(sb, duplicatesText).append(" ");
    // append column family
    appendText(sb, key.getColumnFamily()).append(":");
    // append column qualifier
    appendText(sb, key.getColumnQualifier()).append(" ");
    // append visibility expression
    sb.append(new ColumnVisibility(key.getColumnVisibility()));
    // append timestamp
    if (timestampFormat != null) {
        tmpDate.get().setTime(entry.getKey().getTimestamp());
        sb.append(" ").append(timestampFormat.format(tmpDate.get()));
    }
    Value value = entry.getValue();
    // append value
    if (value != null && value.getSize() > 0) {
        sb.append("\t");
        appendValue(sb, value);
    }
    return sb.toString();
}

35. AccumuloKeyValuePairGenerator#constructKeyValuePairs()

Project: geowave
File: AccumuloKeyValuePairGenerator.java
public List<AccumuloKeyValuePair> constructKeyValuePairs(byte[] adapterId, T entry) {
    List<AccumuloKeyValuePair> keyValuePairs = new ArrayList<>();
    Key key;
    Value value;
    AccumuloKeyValuePair keyValuePair;
    DataStoreEntryInfo ingestInfo = DataStoreUtils.getIngestInfo(adapter, index, entry, visibilityWriter);
    List<ByteArrayId> rowIds = ingestInfo.getRowIds();
    List<FieldInfo<?>> fieldInfoList = ingestInfo.getFieldInfo();
    for (ByteArrayId rowId : rowIds) {
        for (@SuppressWarnings("rawtypes") FieldInfo fieldInfo : fieldInfoList) {
            key = new Key(rowId.getBytes(), adapterId, fieldInfo.getDataValue().getId().getBytes(), fieldInfo.getVisibility(), System.currentTimeMillis());
            value = new Value(fieldInfo.getWrittenValue());
            keyValuePair = new AccumuloKeyValuePair(key, value);
            keyValuePairs.add(keyValuePair);
        }
    }
    return keyValuePairs;
}

36. MergingVisibilityCombiner#transformRange()

Project: geowave
File: MergingVisibilityCombiner.java
@Override
protected void transformRange(final SortedKeyValueIterator<Key, Value> input, final KVBuffer output) throws IOException {
    Mergeable currentMergeable = null;
    Key outputKey = null;
    while (input.hasTop()) {
        final Value val = input.getTopValue();
        // the SortedKeyValueIterator uses the same instance of topKey to
        // hold keys (a wrapper)
        final Key currentKey = new Key(input.getTopKey());
        if (outputKey == null) {
            outputKey = currentKey;
        } else if ((currentMergeable != null) && !outputKey.getRowData().equals(currentKey.getRowData())) {
            output.append(outputKey, new Value(PersistenceUtils.toBinary(currentMergeable)));
            currentMergeable = null;
            outputKey = currentKey;
            continue;
        } else {
            final Text combinedVisibility = new Text(combineVisibilities(currentKey.getColumnVisibility().getBytes(), outputKey.getColumnVisibility().getBytes()));
            outputKey = replaceColumnVisibility(outputKey, combinedVisibility);
        }
        final Mergeable mergeable = getMergeable(currentKey, val.get());
        // but just in case, check
        if (mergeable != null) {
            if (currentMergeable == null) {
                currentMergeable = mergeable;
            } else {
                currentMergeable.merge(mergeable);
            }
        }
        input.next();
    }
    if (currentMergeable != null) {
        output.append(outputKey, new Value(getBinary(currentMergeable)));
    }
}

37. Gaffer1BloomElementFunctorTest#shouldTransformKeyEntity()

Project: Gaffer
File: Gaffer1BloomElementFunctorTest.java
@Test
public void shouldTransformKeyEntity() throws AccumuloElementConversionException {
    // Create Key formed from entity and shouldRetieveElementsInRangeBetweenSeeds
    final Entity entity1 = new Entity(TestGroups.ENTITY);
    entity1.setVertex(1);
    final Key key1 = elementConverter.getKeyFromEntity(entity1);
    final org.apache.hadoop.util.bloom.Key expectedBloomKey1 = new org.apache.hadoop.util.bloom.Key(elementFunctor.getVertexFromRangeKey(key1.getRowData().getBackingArray()));
    assertEquals(expectedBloomKey1, elementFunctor.transform(key1));
}

38. ByteEntityBloomElementFunctorTest#shouldTransformKeyEntity()

Project: Gaffer
File: ByteEntityBloomElementFunctorTest.java
@Test
public void shouldTransformKeyEntity() throws AccumuloElementConversionException {
    // Create Key formed from entity and shouldRetieveElementsInRangeBetweenSeeds
    final Entity entity1 = new Entity(TestGroups.ENTITY);
    entity1.setVertex(1);
    final Key key1 = elementConverter.getKeyFromEntity(entity1);
    final org.apache.hadoop.util.bloom.Key expectedBloomKey1 = new org.apache.hadoop.util.bloom.Key(elementFunctor.getVertexFromRangeKey(key1.getRowData().getBackingArray()));
    assertEquals(expectedBloomKey1, elementFunctor.transform(key1));
}

39. SampleDataAndCreateSplitsFileTool#run()

Project: Gaffer
File: SampleDataAndCreateSplitsFileTool.java
@Override
public int run(final String[] strings) throws OperationException {
    try {
        job = new SampleDataForSplitPointsJobFactory().createJob(operation, store);
    } catch (IOException e) {
        throw new OperationException("Failed to create the hadoop job : " + e.getMessage(), e);
    }
    try {
        job.waitForCompletion(true);
    } catch (IOExceptionInterruptedException | ClassNotFoundException |  e) {
        throw new OperationException("Erorr while waiting for job to complete : " + e.getMessage(), e);
    }
    try {
        if (!job.isSuccessful()) {
            throw new OperationException("Error running job");
        }
    } catch (IOException e) {
        throw new OperationException("Error running job" + e.getMessage(), e);
    }
    // Number of records output
    // NB In the following line use mapred.Task.Counter.REDUCE_OUTPUT_RECORDS rather than
    // mapreduce.TaskCounter.REDUCE_OUTPUT_RECORDS as this is more compatible with earlier
    // versions of Hadoop.
    Counter counter;
    try {
        counter = job.getCounters().findCounter(org.apache.hadoop.mapred.Task.Counter.REDUCE_OUTPUT_RECORDS);
    } catch (IOException e) {
        throw new OperationException("Failed to get counter: " + org.apache.hadoop.mapred.Task.Counter.REDUCE_OUTPUT_RECORDS, e);
    }
    int numberTabletServers;
    try {
        numberTabletServers = store.getConnection().instanceOperations().getTabletServers().size();
    } catch (StoreException e) {
        throw new OperationException(e.getMessage(), e);
    }
    long outputEveryNthRecord = counter.getValue() / (numberTabletServers - 1);
    // Read through resulting file, pick out the split points and write to file.
    Configuration conf = getConf();
    FileSystem fs;
    try {
        fs = FileSystem.get(conf);
    } catch (IOException e) {
        throw new OperationException("Failed to get Filesystem from configuraiton : " + e.getMessage(), e);
    }
    Path resultsFile = new Path(operation.getOutputPath(), "part-r-00000");
    Key key = new Key();
    Value value = new Value();
    long count = 0;
    int numberSplitPointsOutput = 0;
    try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, resultsFile, conf);
        PrintStream splitsWriter = new PrintStream(new BufferedOutputStream(fs.create(new Path(operation.getResultingSplitsFilePath()), true)), false, CommonConstants.UTF_8)) {
        while (reader.next(key, value) && numberSplitPointsOutput < numberTabletServers - 1) {
            count++;
            if (count % outputEveryNthRecord == 0) {
                numberSplitPointsOutput++;
                splitsWriter.println(new String(Base64.encodeBase64(key.getRow().getBytes()), CommonConstants.UTF_8));
            }
        }
    } catch (IOException e) {
        throw new OperationException(e.getMessage(), e);
    }
    try {
        fs.delete(resultsFile, true);
    } catch (IOException e) {
        throw new OperationException("Failed to delete the mapreduce result file : " + e.getMessage(), e);
    }
    return SUCCESS_RESPONSE;
}

40. TestAccumuloStorage#testReadMultipleScalarsAndMaps()

Project: pig
File: TestAccumuloStorage.java
@Test
public void testReadMultipleScalarsAndMaps() throws IOException, ParseException {
    AccumuloStorage storage = new AccumuloStorage("z,r:,m:2,b:");
    List<Key> keys = Lists.newArrayList();
    List<Value> values = Lists.newArrayList();
    keys.add(new Key("1", "a", "1"));
    keys.add(new Key("1", "b", "1"));
    keys.add(new Key("1", "b", "2"));
    keys.add(new Key("1", "f", "1"));
    keys.add(new Key("1", "f", "2"));
    keys.add(new Key("1", "m", "1"));
    keys.add(new Key("1", "m", "2"));
    keys.add(new Key("1", "r", "1"));
    keys.add(new Key("1", "r", "2"));
    keys.add(new Key("1", "r", "3"));
    keys.add(new Key("1", "z", ""));
    values.add(new Value("a1".getBytes()));
    values.add(new Value("b1".getBytes()));
    values.add(new Value("b2".getBytes()));
    values.add(new Value("f1".getBytes()));
    values.add(new Value("f2".getBytes()));
    values.add(new Value("m1".getBytes()));
    values.add(new Value("m2".getBytes()));
    values.add(new Value("r1".getBytes()));
    values.add(new Value("r2".getBytes()));
    values.add(new Value("r3".getBytes()));
    values.add(new Value("z1".getBytes()));
    Key k = new Key("1");
    Value v = WholeRowIterator.encodeRow(keys, values);
    Tuple t = storage.getTuple(k, v);
    Assert.assertEquals(5, t.size());
    Assert.assertEquals(new DataByteArray("z1".getBytes()), t.get(1));
    HashMap<String, DataByteArray> rMap = new HashMap<String, DataByteArray>();
    rMap.put("r:1", new DataByteArray("r1".getBytes()));
    rMap.put("r:2", new DataByteArray("r2".getBytes()));
    rMap.put("r:3", new DataByteArray("r3".getBytes()));
    Assert.assertEquals(rMap, t.get(2));
    Assert.assertEquals(new DataByteArray("m2".getBytes()), t.get(3));
    HashMap<String, DataByteArray> bMap = new HashMap<String, DataByteArray>();
    bMap.put("b:1", new DataByteArray("b1".getBytes()));
    bMap.put("b:2", new DataByteArray("b2".getBytes()));
    Assert.assertEquals(bMap, t.get(4));
}

41. TestAccumuloStorage#testReadMultipleScalars()

Project: pig
File: TestAccumuloStorage.java
@Test
public void testReadMultipleScalars() throws IOException, ParseException {
    AccumuloStorage storage = new AccumuloStorage("col1,col3,col5");
    List<Key> keys = Lists.newArrayList();
    List<Value> values = Lists.newArrayList();
    // Filtering by AccumuloInputFormat isn't applied here since we're
    // shortcircuiting things
    keys.add(new Key("1", "col1", ""));
    // keys.add(new Key("1", "col2", ""));
    keys.add(new Key("1", "col3", ""));
    // keys.add(new Key("1", "col4", ""));
    keys.add(new Key("1", "col5", ""));
    values.add(new Value("value1".getBytes()));
    // values.add(new Value("value2".getBytes()));
    values.add(new Value("value3".getBytes()));
    // values.add(new Value("value4".getBytes()));
    values.add(new Value("value5".getBytes()));
    Key k = new Key("1");
    Value v = WholeRowIterator.encodeRow(keys, values);
    Tuple t = storage.getTuple(k, v);
    Assert.assertEquals(4, t.size());
    Assert.assertEquals("1", t.get(0).toString());
    Assert.assertEquals("value1", t.get(1).toString());
    Assert.assertEquals("value3", t.get(2).toString());
    Assert.assertEquals("value5", t.get(3).toString());
}

42. TestAccumuloStorage#testReadMultipleColumnsNoColfamAggregate()

Project: pig
File: TestAccumuloStorage.java
@Test
public void testReadMultipleColumnsNoColfamAggregate() throws IOException, ParseException {
    AccumuloStorage storage = new AccumuloStorage();
    List<Key> keys = Lists.newArrayList();
    List<Value> values = Lists.newArrayList();
    keys.add(new Key("1", "col1", "cq1"));
    keys.add(new Key("1", "col1", "cq2"));
    keys.add(new Key("1", "col1", "cq3"));
    keys.add(new Key("1", "col2", "cq1"));
    keys.add(new Key("1", "col3", "cq1"));
    keys.add(new Key("1", "col3", "cq2"));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value2".getBytes()));
    values.add(new Value("value3".getBytes()));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value2".getBytes()));
    Key k = new Key("1");
    Value v = WholeRowIterator.encodeRow(keys, values);
    Tuple t = storage.getTuple(k, v);
    Assert.assertEquals(2, t.size());
    Assert.assertEquals("1", t.get(0).toString());
    InternalMap map = new InternalMap();
    map.put("col1:cq1", new DataByteArray("value1"));
    map.put("col1:cq2", new DataByteArray("value2"));
    map.put("col1:cq3", new DataByteArray("value3"));
    map.put("col2:cq1", new DataByteArray("value1"));
    map.put("col3:cq1", new DataByteArray("value1"));
    map.put("col3:cq2", new DataByteArray("value2"));
    Assert.assertEquals(map, t.get(1));
}

43. TestAccumuloStorage#testReadMultipleColumnsEmptyString()

Project: pig
File: TestAccumuloStorage.java
@Test
public void testReadMultipleColumnsEmptyString() throws IOException, ParseException {
    AccumuloStorage storage = new AccumuloStorage("");
    List<Key> keys = Lists.newArrayList();
    List<Value> values = Lists.newArrayList();
    keys.add(new Key("1", "col1", "cq1"));
    keys.add(new Key("1", "col1", "cq2"));
    keys.add(new Key("1", "col1", "cq3"));
    keys.add(new Key("1", "col2", "cq1"));
    keys.add(new Key("1", "col3", "cq1"));
    keys.add(new Key("1", "col3", "cq2"));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value2".getBytes()));
    values.add(new Value("value3".getBytes()));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value2".getBytes()));
    Key k = new Key("1");
    Value v = WholeRowIterator.encodeRow(keys, values);
    Tuple t = storage.getTuple(k, v);
    Assert.assertEquals(2, t.size());
    Assert.assertEquals("1", t.get(0).toString());
    InternalMap map = new InternalMap();
    map.put("col1:cq1", new DataByteArray("value1"));
    map.put("col1:cq2", new DataByteArray("value2"));
    map.put("col1:cq3", new DataByteArray("value3"));
    map.put("col2:cq1", new DataByteArray("value1"));
    map.put("col3:cq1", new DataByteArray("value1"));
    map.put("col3:cq2", new DataByteArray("value2"));
    Assert.assertEquals(map, t.get(1));
}

44. TestAccumuloStorage#testReadMultipleColumnsAggregateColfamsAsteriskEmptyColfam()

Project: pig
File: TestAccumuloStorage.java
@Test
public void testReadMultipleColumnsAggregateColfamsAsteriskEmptyColfam() throws IOException, ParseException {
    AccumuloStorage storage = new AccumuloStorage("*");
    List<Key> keys = Lists.newArrayList();
    List<Value> values = Lists.newArrayList();
    keys.add(new Key("1", "col1", ""));
    keys.add(new Key("1", "col2", ""));
    keys.add(new Key("1", "col3", ""));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value2".getBytes()));
    values.add(new Value("value3".getBytes()));
    Key k = new Key("1");
    Value v = WholeRowIterator.encodeRow(keys, values);
    Tuple t = storage.getTuple(k, v);
    Assert.assertEquals(2, t.size());
    Assert.assertEquals("1", t.get(0).toString());
    InternalMap map = new InternalMap();
    map.put("col1", new DataByteArray("value1"));
    map.put("col2", new DataByteArray("value2"));
    map.put("col3", new DataByteArray("value3"));
    Assert.assertEquals(map, t.get(1));
}

45. TestAccumuloStorage#testReadMultipleColumnsAggregateColfamsAsterisk()

Project: pig
File: TestAccumuloStorage.java
@Test
public void testReadMultipleColumnsAggregateColfamsAsterisk() throws IOException, ParseException {
    AccumuloStorage storage = new AccumuloStorage("*");
    List<Key> keys = Lists.newArrayList();
    List<Value> values = Lists.newArrayList();
    keys.add(new Key("1", "col1", "cq1"));
    keys.add(new Key("1", "col1", "cq2"));
    keys.add(new Key("1", "col1", "cq3"));
    keys.add(new Key("1", "col2", "cq1"));
    keys.add(new Key("1", "col3", "cq1"));
    keys.add(new Key("1", "col3", "cq2"));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value2".getBytes()));
    values.add(new Value("value3".getBytes()));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value2".getBytes()));
    Key k = new Key("1");
    Value v = WholeRowIterator.encodeRow(keys, values);
    Tuple t = storage.getTuple(k, v);
    Assert.assertEquals(2, t.size());
    Assert.assertEquals("1", t.get(0).toString());
    InternalMap map = new InternalMap();
    map.put("col1:cq1", new DataByteArray("value1"));
    map.put("col1:cq2", new DataByteArray("value2"));
    map.put("col1:cq3", new DataByteArray("value3"));
    map.put("col2:cq1", new DataByteArray("value1"));
    map.put("col3:cq1", new DataByteArray("value1"));
    map.put("col3:cq2", new DataByteArray("value2"));
    Assert.assertEquals(map, t.get(1));
}

46. TestAccumuloStorage#testReadSingleColumn()

Project: pig
File: TestAccumuloStorage.java
@Test
public void testReadSingleColumn() throws IOException, ParseException {
    AccumuloStorage storage = new AccumuloStorage();
    List<Key> keys = Lists.newArrayList();
    List<Value> values = Lists.newArrayList();
    keys.add(new Key("1", "col1", "cq1"));
    keys.add(new Key("1", "col1", "cq2"));
    keys.add(new Key("1", "col1", "cq3"));
    values.add(new Value("value1".getBytes()));
    values.add(new Value("value2".getBytes()));
    values.add(new Value("value3".getBytes()));
    Key k = new Key("1");
    Value v = WholeRowIterator.encodeRow(keys, values);
    Tuple t = storage.getTuple(k, v);
    Assert.assertEquals(2, t.size());
    Assert.assertEquals("1", t.get(0).toString());
    InternalMap map = new InternalMap();
    map.put("col1:cq1", new DataByteArray("value1"));
    map.put("col1:cq2", new DataByteArray("value2"));
    map.put("col1:cq3", new DataByteArray("value3"));
    Assert.assertEquals(map, t.get(1));
}

47. AccumuloMrsPyramidMetadataFileWriter#write()

Project: mrgeo
File: AccumuloMrsPyramidMetadataFileWriter.java
// end write
@Override
public void write(MrsPyramidMetadata metadata) throws IOException {
    // write the metadata object to hdfs
    //    Properties mrgeoAccProps = AccumuloConnector.getAccumuloProperties();
    //    ColumnVisibility cv;
    //    if(mrgeoAccProps.getProperty(MrGeoAccumuloConstants.MRGEO_ACC_KEY_VIZ) == null){
    //      cv = new ColumnVisibility();
    //    } else {
    //      cv = new ColumnVisibility(mrgeoAccProps.getProperty(MrGeoAccumuloConstants.MRGEO_ACC_KEY_VIZ));
    //    }
    Path path = new Path(workDir, "meta.rf");
    FileSystem fs = HadoopFileUtils.getFileSystem(path);
    if (fs.exists(path)) {
        fs.delete(path, false);
    }
    log.debug("Saving metadata to " + path.toString());
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    String metadataStr = null;
    try {
        metadata.save(baos);
        metadataStr = baos.toString();
        baos.close();
    } catch (IOException ioe) {
        throw new RuntimeException(ioe.getMessage());
    }
    FileSKVWriter metaWrite = FileOperations.getInstance().openWriter(path.toString(), fs, fs.getConf(), AccumuloConfiguration.getDefaultConfiguration());
    metaWrite.startDefaultLocalityGroup();
    Key metKey = new Key(MrGeoAccumuloConstants.MRGEO_ACC_METADATA, MrGeoAccumuloConstants.MRGEO_ACC_METADATA, MrGeoAccumuloConstants.MRGEO_ACC_CQALL);
    Value metValue = new Value(metadataStr.getBytes());
    metaWrite.append(metKey, metValue);
    metaWrite.close();
}