org.apache.accumulo.core.client.mock.MockInstance

Here are the examples of the java api class org.apache.accumulo.core.client.mock.MockInstance taken from open source projects.

1. SpatialQueryExample#setupDataStores()

Project: geowave
Source File: SpatialQueryExample.java
View license
private void setupDataStores() throws AccumuloSecurityException, AccumuloException {
    // Initialize VectorDataStore and AccumuloAdapterStore
    MockInstance instance = new MockInstance();
    // For the MockInstance we can user "user" - "password" as our
    // connection tokens
    Connector connector = instance.getConnector("user", new PasswordToken("password"));
    BasicAccumuloOperations operations = new BasicAccumuloOperations(connector);
    dataStore = new AccumuloDataStore(operations);
    adapterStore = new AccumuloAdapterStore(operations);
}

2. AccumuloRangeQueryTest#ingestGeometries()

Project: geowave
Source File: AccumuloRangeQueryTest.java
View license
@Before
public void ingestGeometries() throws AccumuloException, AccumuloSecurityException, IOException {
    final MockInstance mockInstance = new MockInstance();
    final Connector mockConnector = mockInstance.getConnector("root", new PasswordToken(new byte[0]));
    mockDataStore = new AccumuloDataStore(new BasicAccumuloOperations(mockConnector));
    index = new SpatialDimensionalityTypeProvider().createPrimaryIndex();
    adapter = new TestGeometryAdapter();
    try (IndexWriter writer = mockDataStore.createWriter(adapter, index)) {
        writer.write(testdata);
    }
}

3. AccumuloSelectivityEvalDAOTest#init()

View license
@Before
public void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException {
    mock = new MockInstance("accumulo");
    PasswordToken pToken = new PasswordToken("pass".getBytes());
    conn = mock.getConnector("user", pToken);
    config = new BatchWriterConfig();
    config.setMaxMemory(1000);
    config.setMaxLatency(1000, TimeUnit.SECONDS);
    config.setMaxWriteThreads(10);
    if (conn.tableOperations().exists("rya_prospects")) {
        conn.tableOperations().delete("rya_prospects");
    }
    if (conn.tableOperations().exists("rya_selectivity")) {
        conn.tableOperations().delete("rya_selectivity");
    }
    arc = new AccumuloRdfConfiguration();
    res = new ProspectorServiceEvalStatsDAO(conn, arc);
    arc.setTableLayoutStrategy(new TablePrefixLayoutStrategy());
    arc.setMaxRangesForScanner(300);
}

4. JoinSelectStatisticsTest#init()

View license
@Before
public void init() throws AccumuloException, AccumuloSecurityException, TableExistsException, TableNotFoundException {
    MockInstance mockInstance = new MockInstance(INSTANCE_NAME);
    c = mockInstance.getConnector("root", new PasswordToken(""));
    if (c.tableOperations().exists("rya_prospects")) {
        c.tableOperations().delete("rya_prospects");
    }
    if (c.tableOperations().exists("rya_selectivity")) {
        c.tableOperations().delete("rya_selectivity");
    }
    if (c.tableOperations().exists("rya_spo")) {
        c.tableOperations().delete("rya_spo");
    }
    c.tableOperations().create("rya_spo");
    c.tableOperations().create("rya_prospects");
    c.tableOperations().create("rya_selectivity");
    ryaContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration(getConfig()));
}

5. QueryJoinSelectOptimizerTest#init()

View license
@Before
public void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException {
    mock = new MockInstance("accumulo");
    PasswordToken pToken = new PasswordToken("pass".getBytes());
    conn = mock.getConnector("user", pToken);
    config = new BatchWriterConfig();
    config.setMaxMemory(1000);
    config.setMaxLatency(1000, TimeUnit.SECONDS);
    config.setMaxWriteThreads(10);
    if (conn.tableOperations().exists("rya_prospects")) {
        conn.tableOperations().delete("rya_prospects");
    }
    if (conn.tableOperations().exists("rya_selectivity")) {
        conn.tableOperations().delete("rya_selectivity");
    }
    arc = new AccumuloRdfConfiguration();
    arc.setTableLayoutStrategy(new TablePrefixLayoutStrategy());
    arc.setMaxRangesForScanner(300);
    res = new ProspectorServiceEvalStatsDAO(conn, arc);
}

6. RdfCloudTripleStoreSelectivityEvaluationStatisticsTest#init()

View license
@Before
public void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException {
    mock = new MockInstance("accumulo");
    PasswordToken pToken = new PasswordToken("pass".getBytes());
    conn = mock.getConnector("user", pToken);
    config = new BatchWriterConfig();
    config.setMaxMemory(1000);
    config.setMaxLatency(1000, TimeUnit.SECONDS);
    config.setMaxWriteThreads(10);
    if (conn.tableOperations().exists("rya_prospects")) {
        conn.tableOperations().delete("rya_prospects");
    }
    if (conn.tableOperations().exists("rya_selectivity")) {
        conn.tableOperations().delete("rya_selectivity");
    }
    arc = new AccumuloRdfConfiguration();
    arc.setTableLayoutStrategy(new TablePrefixLayoutStrategy());
    arc.setMaxRangesForScanner(300);
}

7. SimpleIngestTest#setUp()

Project: geowave
Source File: SimpleIngestTest.java
View license
@Before
public void setUp() {
    final MockInstance mockInstance = new MockInstance();
    Connector mockConnector = null;
    try {
        mockConnector = mockInstance.getConnector(AccumuloUser, AccumuloPass);
    } catch (AccumuloExceptionAccumuloSecurityException |  e) {
        LOGGER.error("Failed to create mock accumulo connection", e);
    }
    accumuloOperations = new BasicAccumuloOperations(mockConnector);
    indexStore = new AccumuloIndexStore(accumuloOperations);
    adapterStore = new AccumuloAdapterStore(accumuloOperations);
    statsStore = new AccumuloDataStatisticsStore(accumuloOperations);
    mockDataStore = new AccumuloDataStore(indexStore, adapterStore, statsStore, new AccumuloSecondaryIndexDataStore(accumuloOperations), new AccumuloAdapterIndexMappingStore(accumuloOperations), accumuloOperations, accumuloOptions);
    accumuloOptions.setCreateTable(true);
    accumuloOptions.setUseAltIndex(true);
    accumuloOptions.setPersistDataStatistics(true);
}

8. AccumuloDataStoreStatsTest#setUp()

View license
@Before
public void setUp() {
    final MockInstance mockInstance = new MockInstance();
    Connector mockConnector = null;
    try {
        mockConnector = mockInstance.getConnector("root", new PasswordToken(new byte[0]));
    } catch (AccumuloExceptionAccumuloSecurityException |  e) {
        LOGGER.error("Failed to create mock accumulo connection", e);
    }
    accumuloOperations = new BasicAccumuloOperations(mockConnector);
    indexStore = new AccumuloIndexStore(accumuloOperations);
    adapterStore = new AccumuloAdapterStore(accumuloOperations);
    statsStore = new AccumuloDataStatisticsStore(accumuloOperations);
    secondaryIndexDataStore = new AccumuloSecondaryIndexDataStore(accumuloOperations, new AccumuloOptions());
    mockDataStore = new AccumuloDataStore(indexStore, adapterStore, statsStore, secondaryIndexDataStore, new AccumuloAdapterIndexMappingStore(accumuloOperations), accumuloOperations, accumuloOptions);
}

9. AccumuloOptionsTest#setUp()

Project: geowave
Source File: AccumuloOptionsTest.java
View license
@Before
public void setUp() {
    final MockInstance mockInstance = new MockInstance();
    Connector mockConnector = null;
    try {
        mockConnector = mockInstance.getConnector("root", new PasswordToken(new byte[0]));
    } catch (AccumuloExceptionAccumuloSecurityException |  e) {
        LOGGER.error("Failed to create mock accumulo connection", e);
    }
    accumuloOperations = new BasicAccumuloOperations(mockConnector);
    indexStore = new AccumuloIndexStore(accumuloOperations);
    adapterStore = new AccumuloAdapterStore(accumuloOperations);
    statsStore = new AccumuloDataStatisticsStore(accumuloOperations);
    secondaryIndexDataStore = new AccumuloSecondaryIndexDataStore(accumuloOperations, new AccumuloOptions());
    mockDataStore = new AccumuloDataStore(indexStore, adapterStore, statsStore, secondaryIndexDataStore, new AccumuloAdapterIndexMappingStore(accumuloOperations), accumuloOperations, accumuloOptions);
}

10. TestAccumuloVertexFormat#testAccumuloInputOutput()

View license
/*
     Write a simple parent-child directed graph to Accumulo.
     Run a job which reads the values
     into subclasses that extend AccumuloVertex I/O formats.
     Check the output after the job.
     */
@Test
public void testAccumuloInputOutput() throws Exception {
    if (System.getProperty("prop.mapred.job.tracker") != null) {
        if (log.isInfoEnabled())
            log.info("testAccumuloInputOutput: " + "Ignore this test if not local mode.");
        return;
    }
    File jarTest = new File(System.getProperty("prop.jarLocation"));
    if (!jarTest.exists()) {
        fail("Could not find Giraph jar at " + "location specified by 'prop.jarLocation'. " + "Make sure you built the main Giraph artifact?.");
    }
    //Write out vertices and edges out to a mock instance.
    MockInstance mockInstance = new MockInstance(INSTANCE_NAME);
    Connector c = mockInstance.getConnector("root", new byte[] {});
    c.tableOperations().create(TABLE_NAME);
    BatchWriter bw = c.createBatchWriter(TABLE_NAME, 10000L, 1000L, 4);
    Mutation m1 = new Mutation(new Text("0001"));
    m1.put(FAMILY, CHILDREN, new Value("0002".getBytes()));
    bw.addMutation(m1);
    Mutation m2 = new Mutation(new Text("0002"));
    m2.put(FAMILY, CHILDREN, new Value("0003".getBytes()));
    bw.addMutation(m2);
    if (log.isInfoEnabled())
        log.info("Writing mutations to Accumulo table");
    bw.close();
    Configuration conf = new Configuration();
    conf.set(AccumuloVertexOutputFormat.OUTPUT_TABLE, TABLE_NAME);
    /*
        Very important to initialize the formats before
        sending configuration to the GiraphJob. Otherwise
        the internally constructed Job in GiraphJob will
        not have the proper context initialization.
         */
    AccumuloInputFormat.setInputInfo(conf, USER, "".getBytes(), TABLE_NAME, new Authorizations());
    AccumuloInputFormat.setMockInstance(conf, INSTANCE_NAME);
    AccumuloOutputFormat.setOutputInfo(conf, USER, PASSWORD, true, null);
    AccumuloOutputFormat.setMockInstance(conf, INSTANCE_NAME);
    GiraphJob job = new GiraphJob(conf, getCallingMethodName());
    setupConfiguration(job);
    GiraphConfiguration giraphConf = job.getConfiguration();
    giraphConf.setVertexClass(EdgeNotification.class);
    giraphConf.setVertexInputFormatClass(AccumuloEdgeInputFormat.class);
    giraphConf.setVertexOutputFormatClass(AccumuloEdgeOutputFormat.class);
    HashSet<Pair<Text, Text>> columnsToFetch = new HashSet<Pair<Text, Text>>();
    columnsToFetch.add(new Pair<Text, Text>(FAMILY, CHILDREN));
    AccumuloInputFormat.fetchColumns(job.getConfiguration(), columnsToFetch);
    if (log.isInfoEnabled())
        log.info("Running edge notification job using Accumulo input");
    assertTrue(job.run(true));
    Scanner scanner = c.createScanner(TABLE_NAME, new Authorizations());
    scanner.setRange(new Range("0002", "0002"));
    scanner.fetchColumn(FAMILY, OUTPUT_FIELD);
    boolean foundColumn = false;
    if (log.isInfoEnabled())
        log.info("Verify job output persisted correctly.");
    //make sure we found the qualifier.
    assertTrue(scanner.iterator().hasNext());
    //now we check to make sure the expected value from the job persisted correctly.
    for (Map.Entry<Key, Value> entry : scanner) {
        Text row = entry.getKey().getRow();
        assertEquals("0002", row.toString());
        Value value = entry.getValue();
        assertEquals("0001", ByteBufferUtil.toString(ByteBuffer.wrap(value.get())));
        foundColumn = true;
    }
}