org.apache.metamodel.DataContext

Here are the examples of the java api org.apache.metamodel.DataContext taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

70 Examples 7

19 Source : JaxbConfigurationInterceptor.java
with GNU Lesser General Public License v3.0
from schic

private DatastoreCatalogType interceptDatastoreCatalog(DataCleanerConfiguration configuration, final Map<String, MutableSchema> datastoreUsage) {
    final DatastoreCatalogType newDatastoreCatalog = new DatastoreCatalogType();
    final Set<Entry<String, MutableSchema>> datastoreUsageEntries = datastoreUsage.entrySet();
    for (final Entry<String, MutableSchema> entry : datastoreUsageEntries) {
        final String name = entry.getKey();
        Schema schema = entry.getValue();
        final Datastore datastore = configuration.getDatastoreCatalog().getDatastore(name);
        if (datastore != null) {
            // a comparator that takes the column number into account.
            final Comparator<Column> columnComparator = new Comparator<Column>() {

                @Override
                public int compare(Column o1, Column o2) {
                    if (o1.getTable() == o2.getTable()) {
                        int diff = o1.getColumnNumber() - o2.getColumnNumber();
                        if (diff != 0) {
                            return diff;
                        }
                    }
                    return o1.compareTo(o2);
                }
            };
            try (final DatastoreConnection connection = datastore.openConnection()) {
                final DataContext dataContext = connection.getDataContext();
                final JaxbPojoDatastoreAdaptor adaptor = new JaxbPojoDatastoreAdaptor(configuration);
                final Collection<PojoTableType> pojoTables = new ArrayList<PojoTableType>();
                List<Table> usageTables = schema.getTables();
                if (usageTables == null || usageTables.size() == 0) {
                    // an unspecified schema entry will be interpreted as an
                    // open-ended inclusion of schema information only
                    schema = dataContext.getDefaultSchema();
                    usageTables = dataContext.getDefaultSchema().getTables();
                }
                final String schemaName = schema.getName();
                for (final Table usageTable : usageTables) {
                    List<Column> usageColumns = usageTable.getColumns();
                    if (usageColumns == null || usageColumns.size() == 0) {
                        // an unspecified table entry will be interpreted by
                        // including all columns of that table
                        final String tableName = usageTable.getName();
                        final Schema schemaByName = dataContext.getSchemaByName(schemaName);
                        if (schemaByName == null) {
                            logger.error("Could not find schema by name: {}, skipping table: {}", schemaName, usageTable);
                        // usageColumns = new Column[0];
                        } else {
                            final Table table = schemaByName.getTableByName(tableName);
                            usageColumns = table.getColumns();
                        }
                    }
                    if (usageColumns != null && usageColumns.size() > 0) {
                        // Arrays.sort(usageColumns, columnComparator);
                        final int maxRows = REMARK_INCLUDE_IN_QUERY.equals(usageTable.getRemarks()) ? MAX_POJO_ROWS : 0;
                        final Table sourceTable = usageColumns.get(0).getTable();
                        try {
                            final PojoTableType pojoTable = adaptor.createPojoTable(dataContext, sourceTable, usageColumns, maxRows);
                            pojoTables.add(pojoTable);
                        } catch (Exception e) {
                            // allow omitting errornous tables here.
                            logger.error("Failed to serialize table '" + sourceTable + "' of datastore '" + name + "' to POJO format: " + e.getMessage(), e);
                        }
                    }
                }
                final AbstractDatastoreType pojoDatastoreType = adaptor.createPojoDatastore(datastore.getName(), schemaName, pojoTables);
                pojoDatastoreType.setDescription(datastore.getDescription());
                newDatastoreCatalog.getJdbcDatastoreOrAccessDatastoreOrDynamodbDatastore().add(pojoDatastoreType);
            } catch (Exception e) {
                // allow omitting errornous datastores here.
                logger.error("Failed to serialize datastore '" + name + "' to POJO format: " + e.getMessage(), e);
            }
        }
    }
    return newDatastoreCatalog;
}

19 Source : DatastoreSchemaController.java
with GNU Lesser General Public License v3.0
from schic

private List<Map<String, Object>> createSchemaList(DataContext dataContext) {
    List<Map<String, Object>> schemas = new ArrayList<>();
    for (Schema schema : dataContext.getSchemas()) {
        schemas.add(createSchemaMap(schema));
    }
    return schemas;
}

19 Source : AbstractFileBasedDatastoreDialog.java
with GNU Lesser General Public License v3.0
from schic

protected Table getPreviewTable(final DataContext dc) {
    return dc.getDefaultSchema().getTable(0);
}

19 Source : CliRunner.java
with GNU Lesser General Public License v3.0
from schic

private void printColumns(final DataCleanerConfiguration configuration) {
    final String datastoreName = _arguments.getDatastoreName();
    final String tableName = _arguments.getTableName();
    final String schemaName = _arguments.getSchemaName();
    if (datastoreName == null) {
        System.err.println("You need to specify the datastore name!");
    } else if (tableName == null) {
        System.err.println("You need to specify a table name!");
    } else {
        final Datastore ds = configuration.getDatastoreCatalog().getDatastore(datastoreName);
        if (ds == null) {
            System.err.println("No such datastore: " + datastoreName);
        } else {
            final DatastoreConnection con = ds.openConnection();
            final DataContext dc = con.getDataContext();
            final Schema schema;
            if (schemaName == null) {
                schema = dc.getDefaultSchema();
            } else {
                schema = dc.getSchemaByName(schemaName);
            }
            if (schema == null) {
                System.err.println("No such schema: " + schemaName);
            } else {
                final Table table = schema.getTableByName(tableName);
                if (table == null) {
                    write("No such table: " + tableName);
                } else {
                    final List<String> columnNames = table.getColumnNames();
                    write("Columns:");
                    write("--------");
                    for (final String columnName : columnNames) {
                        write(columnName);
                    }
                }
            }
            con.close();
        }
    }
}

19 Source : CliRunner.java
with GNU Lesser General Public License v3.0
from schic

private void printTables(final DataCleanerConfiguration configuration) {
    final String datastoreName = _arguments.getDatastoreName();
    final String schemaName = _arguments.getSchemaName();
    if (datastoreName == null) {
        System.err.println("You need to specify the datastore name!");
    } else {
        final Datastore ds = configuration.getDatastoreCatalog().getDatastore(datastoreName);
        if (ds == null) {
            System.err.println("No such datastore: " + datastoreName);
        } else {
            final DatastoreConnection con = ds.openConnection();
            final DataContext dc = con.getDataContext();
            final Schema schema;
            if (schemaName == null) {
                schema = dc.getDefaultSchema();
            } else {
                schema = dc.getSchemaByName(schemaName);
            }
            if (schema == null) {
                System.err.println("No such schema: " + schemaName);
            } else {
                final List<String> tableNames = schema.getTableNames();
                if (tableNames == null || tableNames.isEmpty()) {
                    System.err.println("No tables in schema!");
                } else {
                    write("Tables:");
                    write("-------");
                    for (final String tableName : tableNames) {
                        write(tableName);
                    }
                }
            }
            con.close();
        }
    }
}

19 Source : SchemaNavigator.java
with GNU Lesser General Public License v3.0
from schic

/**
 * A convenient component used for exploring/navigating the schema of a
 * {@link Datastore}. It is preferred to use this component instead of the
 * {@link DataContext} directly, since it is shared amongst connections while a
 * DataContext may be created per connection (depending on the datastore type).
 * 一个方便的组件,用于浏览/导航{@link Datastore}的架构。
 * 最好直接使用此组件而不是{@link DataContext},因为它是在连接之间共享的,
 * 而每个连接可以创建DataContext(取决于数据存储类型)。
 */
public final clreplaced SchemaNavigator {

    private final DataContext dataContext;

    public SchemaNavigator(final DataContext dataContext) {
        this.dataContext = dataContext;
    }

    public void refreshSchemas() {
        dataContext.refreshSchemas();
    }

    public Schema convertToSchema(final String schemaName) {
        return dataContext.getSchemaByName(schemaName);
    }

    public Schema[] getSchemas() {
        return dataContext.getSchemas().toArray(new Schema[0]);
    }

    public Schema getDefaultSchema() {
        return dataContext.getDefaultSchema();
    }

    public Schema getSchemaByName(final String name) {
        return dataContext.getSchemaByName(name);
    }

    public Table convertToTable(final String schemaName, final String tableName) {
        final Schema schema;
        if (schemaName == null) {
            schema = getDefaultSchema();
        } else {
            schema = getSchemaByName(schemaName);
        }
        if (schema == null) {
            throw new IllegalArgumentException("Schema " + schemaName + " not found. Available schema names are: " + dataContext.getSchemaNames());
        }
        final Table table;
        if (tableName == null) {
            if (schema.getTables().stream().filter(t -> t.getType() == TableType.TABLE).count() == 1) {
                table = schema.getTables().stream().filter(t -> t.getType() == TableType.TABLE).findFirst().get();
            } else {
                throw new IllegalArgumentException("No table name specified, and multiple options exist. Available table names are: " + schema.getTableNames());
            }
        } else {
            table = schema.getTableByName(tableName);
        }
        if (table == null) {
            throw new IllegalArgumentException("Table not found. Available table names are: " + schema.getTableNames());
        }
        return table;
    }

    public Column[] convertToColumns(final String schemaName, final String tableName, final String[] columnNames) {
        if (columnNames == null) {
            return null;
        }
        if (columnNames.length == 0) {
            return new Column[0];
        }
        final Table table = convertToTable(schemaName, tableName);
        final Column[] columns = new Column[columnNames.length];
        for (int i = 0; i < columns.length; i++) {
            columns[i] = table.getColumnByName(columnNames[i]);
        }
        return columns;
    }

    public Schema[] convertToSchemas(final String[] schemaNames) {
        final Schema[] result = new Schema[schemaNames.length];
        for (int i = 0; i < result.length; i++) {
            result[i] = convertToSchema(schemaNames[i]);
        }
        return result;
    }

    public Table[] convertToTables(final String[] tableNames) {
        final Table[] result = new Table[tableNames.length];
        for (int i = 0; i < result.length; i++) {
            result[i] = convertToTable(tableNames[i]);
        }
        return result;
    }

    public Table convertToTable(final String tableName) {
        return dataContext.getTableByQualifiedLabel(tableName);
    }

    public Column[] convertToColumns(final String[] columnNames) {
        final Column[] result = new Column[columnNames.length];
        for (int i = 0; i < result.length; i++) {
            result[i] = convertToColumn(columnNames[i]);
        }
        return result;
    }

    public Column convertToColumn(final String columnName) {
        return dataContext.getColumnByQualifiedLabel(columnName);
    }
}

19 Source : DataContextTraverser.java
with Apache License 2.0
from apache

/**
 * Utility object responsible for traversing the schema/table/column structures
 * of a {@link DataContext} based on String identifiers and names.
 *
 * This clreplaced will throw the appropriate exceptions if needed which is more
 * communicative than the usual NPEs that would otherwise be thrown.
 */
public clreplaced DataContextTraverser {

    private final DataContext dataContext;

    public DataContextTraverser(DataContext dataContext) {
        this.dataContext = dataContext;
    }

    public Schema getSchema(String schemaName) {
        final Schema schema = dataContext.getSchemaByName(schemaName);
        if (schema == null) {
            throw new NoSuchSchemaException(schemaName);
        }
        return schema;
    }

    public Table getTable(String schemaName, String tableName) {
        final Table table = getSchema(schemaName).getTableByName(tableName);
        if (table == null) {
            throw new NoSuchTableException(tableName);
        }
        return table;
    }

    public Column getColumn(String schemaName, String tableName, String columnName) {
        final Column column = getTable(schemaName, tableName).getColumnByName(columnName);
        if (column == null) {
            throw new NoSuchColumnException(columnName);
        }
        return column;
    }
}

18 Source : SampleCustomDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
public DatastoreConnection openConnection() {
    final DataContext dc = new XmlDomDataContext(xmlFile, false);
    return new DatastoreConnectionImpl<>(dc, this);
}

18 Source : DatastoreSynonymCatalog.java
with GNU Lesser General Public License v3.0
from schic

public Column getMasterTermColumn(final DatastoreConnection datastoreConnection) {
    final DataContext dataContext = datastoreConnection.getDataContext();
    final Column masterTermColumn = dataContext.getColumnByQualifiedLabel(_masterTermColumnPath);
    if (masterTermColumn == null) {
        throw new NoSuchColumnException(_masterTermColumnPath);
    }
    return masterTermColumn;
}

18 Source : OutputDataStreamDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<DataContext> createDatastoreConnection() {
    final DataContext dataContext = new OutputDataStreamDataContext(_outputDataStream);
    return new DatastoreConnectionImpl<>(dataContext, this);
}

18 Source : AbstractResourceBasedDatastoreDialog.java
with GNU Lesser General Public License v3.0
from schic

protected Table getPreviewTable(final DataContext dc) {
    final Schema schema = dc.getDefaultSchema();
    if (schema.getTableCount() == 0) {
        return null;
    }
    return schema.getTable(0);
}

18 Source : XmlDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<DataContext> createDatastoreConnection() {
    final File file = new File(_filename);
    final DataContext dataContext;
    if (_tableDefs == null || _tableDefs.length == 0) {
        dataContext = new XmlDomDataContext(file, true);
    } else {
        dataContext = new XmlSaxDataContext(file, _tableDefs);
    }
    return new DatastoreConnectionImpl<>(dataContext, this);
}

18 Source : SasDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<DataContext> createDatastoreConnection() {
    final DataContext dataContext = new SasDataContext(_directory);
    return new DatastoreConnectionImpl<>(dataContext, this);
}

18 Source : OdbDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<DataContext> createDatastoreConnection() {
    final DataContext dc = new OpenOfficeDataContext(new File(_filename));
    return new DatastoreConnectionImpl<>(dc, this);
}

18 Source : ElasticSearchDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<UpdateableDataContext> createDatastoreConnection() {
    switch(_clientType) {
        case NODE:
        case TRANSPORT:
            throw new UnsupportedOperationException("Support for ElasticSearch 'node' or 'transport' clients has been dropped. " + "Please reconfigure datastore to use HTTP connection.");
        default:
            final DataContext dataContext;
            if (_tableDefs == null || _tableDefs.length == 0) {
                dataContext = new ElasticSearchRestDataContext(getClientForRestProtocol(), _indexName);
            } else {
                dataContext = new ElasticSearchRestDataContext(getClientForRestProtocol(), _indexName, _tableDefs);
            }
            return new UpdateableDatastoreConnectionImpl<>((ElasticSearchRestDataContext) dataContext, this);
    }
}

18 Source : DbaseDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<DataContext> createDatastoreConnection() {
    final DataContext dc = new DbaseDataContext(_filename);
    return new DatastoreConnectionImpl<>(dc, this);
}

18 Source : ArffDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<DataContext> createDatastoreConnection() {
    final DataContext dataContext = new ArffDataContext(getResource());
    return new DatastoreConnectionImpl<DataContext>(dataContext, this);
}

18 Source : AccessDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<DataContext> createDatastoreConnection() {
    final DataContext dc = new AccessDataContext(_filename);
    return new DatastoreConnectionImpl<>(dc, this);
}

18 Source : SampleDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<DataContext> createDatastoreConnection() {
    final DataContext dataContext = new SampleDataContext();
    return new DatastoreConnectionImpl<>(dataContext, this);
}

18 Source : FuseStreamsComponentIntegrationTest.java
with GNU Lesser General Public License v3.0
from schic

@Test
public void testreplacedumptionsAboutOrderdb() throws Exception {
    try (DatastoreConnection connection = datastore.openConnection()) {
        final DataContext dataContext = connection.getDataContext();
        final Row countCustomers = MetaModelHelper.executeSingleRowQuery(dataContext, dataContext.query().from("customers").selectCount().toQuery());
        replacedert.replacedertEquals(COUNT_CUSTOMERS, countCustomers.getValue(0));
        final Row countEmployees = MetaModelHelper.executeSingleRowQuery(dataContext, dataContext.query().from("employees").selectCount().toQuery());
        replacedert.replacedertEquals(COUNT_EMPLOYEES, countEmployees.getValue(0));
    }
}

17 Source : JaxbPojoDatastoreAdaptor.java
with GNU Lesser General Public License v3.0
from schic

/**
 * Creates a serialized POJO copy of a datastore.
 *
 * @param datastore
 *            the datastore to copy
 * @param columns
 *            the columns to include, or null if all tables/columns should
 *            be included.
 * @param maxRowsToQuery
 *            the maximum number of records to query and include in the
 *            datastore copy. Keep this number reasonably low, or else the
 *            copy might cause out-of-memory issues (Both while reading and
 *            writing).
 * @return
 */
public AbstractDatastoreType createPojoDatastore(final Datastore datastore, final Set<Column> columns, final int maxRowsToQuery) {
    final PojoDatastoreType datastoreType = new PojoDatastoreType();
    datastoreType.setName(datastore.getName());
    datastoreType.setDescription(datastore.getDescription());
    try (DatastoreConnection con = datastore.openConnection()) {
        final DataContext dataContext = con.getDataContext();
        final Schema schema;
        final List<Table> tables;
        if (columns == null || columns.isEmpty()) {
            schema = dataContext.getDefaultSchema();
            tables = schema.getTables();
        } else {
            tables = Arrays.asList(MetaModelHelper.getTables(columns));
            // TODO: There's a possibility that tables span multiple
            // schemas, but we cannot currently support that in a
            // PojoDatastore, so we just pick the first and cross our
            // fingers.
            schema = tables.get(0).getSchema();
        }
        datastoreType.setSchemaName(schema.getName());
        for (final Table table : tables) {
            final List<Column> usedColumns;
            if (columns == null || columns.isEmpty()) {
                usedColumns = table.getColumns();
            } else {
                usedColumns = Arrays.asList(MetaModelHelper.getTableColumns(table, columns));
            }
            final PojoTableType tableType = createPojoTable(dataContext, table, usedColumns, maxRowsToQuery);
            datastoreType.getTable().add(tableType);
        }
    }
    return datastoreType;
}

17 Source : DefaultBootstrapOptions.java
with GNU Lesser General Public License v3.0
from schic

@Override
public void initializeSingleDatastoreJob(final replacedysisJobBuilder replacedysisJobBuilder, final DataContext dataContext, final InjectorBuilder injectorBuilder) {
// do nothing
}

17 Source : AccessDatastoreTest.java
with GNU Lesser General Public License v3.0
from schic

public void testGetDatastoreConnection() throws Exception {
    final AccessDatastore ds = new AccessDatastore("foobar", "src/test/resources/developers.mdb");
    replacedertEquals("foobar", ds.getName());
    final DatastoreConnection con = ds.openConnection();
    final DataContext dataContext = con.getDataContext();
    replacedertEquals("[information_schema, developers.mdb]", dataContext.getSchemaNames().toString());
    final List<String> tableNames = dataContext.getDefaultSchema().getTableNames();
    replacedertEquals("[developer, product]", tableNames.toString());
}

16 Source : DatastoreQueryController.java
with GNU Lesser General Public License v3.0
from schic

private Map<String, Object> getJsonResult(String tenant, String datastoreName, String query, HttpServletResponse response) throws IOException {
    datastoreName = datastoreName.replaceAll("\\+", " ");
    final DataCleanerConfiguration configuration = _tenantContextFactory.getContext(tenant).getConfiguration();
    final Datastore ds = configuration.getDatastoreCatalog().getDatastore(datastoreName);
    if (ds == null) {
        response.sendError(HttpServletResponse.SC_NOT_FOUND, "No such datastore: " + datastoreName);
        return null;
    }
    String username = getUsername();
    if (StringUtils.isNullOrEmpty(query)) {
        response.sendError(HttpServletResponse.SC_BAD_REQUEST, "No query defined");
        return null;
    }
    logger.info("Serving query result of datastore {} to user: {}. Query: {}", new Object[] { datastoreName, username, query });
    try (final DatastoreConnection con = ds.openConnection()) {
        final DataContext dataContext = con.getDataContext();
        try (final DataSet dataSet = dataContext.executeQuery(query)) {
            return getJsonResult(dataSet);
        }
    }
}

16 Source : ForeignKey.java
with GNU Lesser General Public License v3.0
from schic

public Column resolveForeignColumn(final DatastoreCatalog datastoreCatalog) {
    final Datastore datastore = datastoreCatalog.getDatastore(getForeignDatastoreName());
    if (datastore == null) {
        return null;
    }
    try (DatastoreConnection connection = datastore.openConnection()) {
        final DataContext dataContext = connection.getDataContext();
        final Schema schema = dataContext.getSchemaByName(getForeignSchemaName());
        if (schema == null) {
            return null;
        }
        final Table table = schema.getTableByName(getForeignTableName());
        if (table == null) {
            return null;
        }
        return table.getColumnByName(getForeignColumnName());
    }
}

16 Source : DatastoreOutputWriterFactoryTest.java
with GNU Lesser General Public License v3.0
from schic

public void testMulreplacedhreadedWriting() throws Exception {
    final AtomicInteger datastoreCount = new AtomicInteger();
    final OutputWriterScenarioHelper scenarioHelper = new OutputWriterScenarioHelper();
    final DatastoreCreationDelegate creationDelegate = new DatastoreCreationDelegate() {

        @Override
        public synchronized void createDatastore(final Datastore datastore) {
            if (_datastore != null) {
                replacedertEquals(_datastore, datastore);
            }
            _datastore = datastore;
            datastoreCount.incrementAndGet();
        }
    };
    final InputColumn<?>[] columns = scenarioHelper.getColumns().toArray(new InputColumn[0]);
    // creating 9 similar writers that all write at the same time
    final Thread[] threads = new Thread[9];
    for (int i = 0; i < threads.length; i++) {
        threads[i] = new Thread() {

            @Override
            public void run() {
                try {
                    final OutputWriter writer = DatastoreOutputWriterFactory.getWriter(OUTPUT_DIR, creationDelegate, "ds", "tab", false, columns);
                    scenarioHelper.writeExampleData(writer);
                } catch (final RuntimeException e) {
                    _exception = e;
                }
            }
        };
    }
    for (int i = 0; i < threads.length; i++) {
        threads[i].start();
    }
    for (int i = 0; i < threads.length; i++) {
        threads[i].join();
    }
    if (_exception != null) {
        throw _exception;
    }
    replacedertEquals(9, datastoreCount.get());
    replacedertNotNull(_datastore);
    try (DatastoreConnection connection = _datastore.openConnection()) {
        final DataContext dc = connection.getDataContext();
        dc.refreshSchemas();
        final List<String> tableNames = dc.getDefaultSchema().getTableNames();
        replacedertEquals("[TAB_1, TAB_2, TAB_3, TAB_4, TAB_5, TAB_6, TAB_7, TAB_8, TAB_9]", tableNames.stream().sorted().collect(Collectors.toList()).toString());
    }
}

16 Source : AbstractResourceBasedDatastoreDialog.java
with GNU Lesser General Public License v3.0
from schic

private DataSet getPreviewData(final Resource resource) {
    if (!isPreviewDataAvailable()) {
        logger.info("Not displaying preview table because isPreviewDataAvailable() returned false");
        return null;
    }
    logger.info("Attempting to fetch preview data from resource: {}", resource);
    final D datastore = getPreviewDatastore(resource);
    try (DatastoreConnection con = datastore.openConnection()) {
        final DataContext dc = con.getDataContext();
        final Table table = getPreviewTable(dc);
        if (table == null) {
            logger.info("Not displaying preview because getPreviewTable(..) returned null");
            return null;
        }
        List<Column> columns = table.getColumns();
        if (columns.size() > getPreviewColumns()) {
            // include max 10 columns
            columns = columns.stream().limit(getPreviewColumns()).collect(Collectors.toList());
        }
        final Query q = dc.query().from(table).select(columns).toQuery();
        q.setMaxRows(PREVIEW_ROWS);
        return dc.executeQuery(q);
    }
}

16 Source : CompositeDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<DataContext> createDatastoreConnection() {
    final List<DataContext> dataContexts = new ArrayList<>(_datastores.size());
    final List<Closeable> closeables = new ArrayList<>(_datastores.size());
    for (final Datastore datastore : _datastores) {
        final DatastoreConnection con = datastore.openConnection();
        final DataContext dc = con.getDataContext();
        closeables.add(con);
        dataContexts.add(dc);
    }
    final Closeable[] closeablesArray = closeables.toArray(new Closeable[closeables.size()]);
    final CompositeDataContext dataContext = new CompositeDataContext(dataContexts);
    return new DatastoreConnectionImpl<>(dataContext, this, closeablesArray);
}

16 Source : SchemaNavigatorTest.java
with GNU Lesser General Public License v3.0
from schic

public void testSchemaWithDot() throws Exception {
    final DataContext dc = new CsvDataContext(new File("src/test/resources/employees.csv"));
    replacedertEquals(2, dc.getDefaultSchema().getTable(0).getColumnCount());
    final SchemaNavigator sn = new SchemaNavigator(dc);
    final Column column = sn.convertToColumn("resources.employees.csv.email");
    replacedertEquals("Column[name=email,columnNumber=1,type=STRING,nullable=true,nativeType=null,columnSize=null]", column.toString());
}

16 Source : TableDataController.java
with Apache License 2.0
from apache

@RequestMapping(method = RequestMethod.GET)
@ResponseBody
public QueryResponse get(@PathVariable("tenant") String tenantId, @PathVariable("dataContext") String dataSourceName, @PathVariable("schema") String schemaId, @PathVariable("table") String tableId, @RequestParam(value = "offset", required = false) Integer offset, @RequestParam(value = "limit", required = false) Integer limit) {
    final TenantContext tenantContext = tenantRegistry.getTenantContext(tenantId);
    final DataContext dataContext = tenantContext.getDataSourceRegistry().openDataContext(dataSourceName);
    final DataContextTraverser traverser = new DataContextTraverser(dataContext);
    final Table table = traverser.getTable(schemaId, tableId);
    final Query query = dataContext.query().from(table).selectAll().toQuery();
    return QueryController.executeQuery(tenantContext, dataSourceName, dataContext, query, offset, limit);
}

16 Source : TableController.java
with Apache License 2.0
from apache

@RequestMapping(method = RequestMethod.GET)
@ResponseBody
public GetTableResponse get(@PathVariable("tenant") String tenantId, @PathVariable("dataContext") String dataSourceName, @PathVariable("schema") String schemaId, @PathVariable("table") String tableId) {
    final TenantContext tenantContext = tenantRegistry.getTenantContext(tenantId);
    final DataContext dataContext = tenantContext.getDataSourceRegistry().openDataContext(dataSourceName);
    final DataContextTraverser traverser = new DataContextTraverser(dataContext);
    final Table table = traverser.getTable(schemaId, tableId);
    final String tenantName = tenantContext.getTenantName();
    final UriBuilder uriBuilder = UriBuilder.fromPath("/{tenant}/{dataContext}/s/{schema}/t/{table}/c/{column}");
    final String tableName = table.getName();
    final String schemaName = table.getSchema().getName();
    final List<GetTableResponseColumns> columnsLinks = table.getColumnNames().stream().map(c -> {
        final String uri = uriBuilder.build(tenantName, dataSourceName, schemaName, tableName, c).toString();
        return new GetTableResponseColumns().name(c).uri(uri);
    }).collect(Collectors.toList());
    final GetTableResponse resp = new GetTableResponse();
    resp.type("table");
    resp.name(tableName);
    resp.schema(schemaName);
    resp.datasource(dataSourceName);
    resp.tenant(tenantName);
    resp.columns(columnsLinks);
    return resp;
}

16 Source : DataContextSupplier.java
with Apache License 2.0
from apache

@Override
public DataContext get() {
    final DataContextFactoryRegistry registry = getRegistryForTenant();
    final DataContext dataContext = registry.createDataContext(dataContextProperties);
    return dataContext;
}

15 Source : DatastoreQueryController.java
with GNU Lesser General Public License v3.0
from schic

private void queryDatastoreHtml(String tenant, String datastoreName, String query, HttpServletResponse response) throws IOException {
    datastoreName = datastoreName.replaceAll("\\+", " ");
    final DataCleanerConfiguration configuration = _tenantContextFactory.getContext(tenant).getConfiguration();
    final Datastore ds = configuration.getDatastoreCatalog().getDatastore(datastoreName);
    if (ds == null) {
        response.sendError(HttpServletResponse.SC_NOT_FOUND, "No such datastore: " + datastoreName);
        return;
    }
    String username = getUsername();
    if (StringUtils.isNullOrEmpty(query)) {
        response.sendError(HttpServletResponse.SC_BAD_REQUEST, "No query defined");
        return;
    }
    logger.info("Serving query result of datastore {} to user: {}. Query: {}", new Object[] { datastoreName, username, query });
    try (final DatastoreConnection con = ds.openConnection()) {
        final DataContext dataContext = con.getDataContext();
        try (final DataSet dataSet = dataContext.executeQuery(query)) {
            final Writer writer = response.getWriter();
            writer.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
            writer.write("\n<table xmlns=\"http://www.w3.org/1999/xhtml\">");
            writer.write("\n<thead>\n<tr>");
            final List<Selecreplacedem> selecreplacedems = dataSet.getSelecreplacedems();
            for (Selecreplacedem selecreplacedem : selecreplacedems) {
                final String label = selecreplacedem.getSuperQueryAlias(false);
                writer.write("<th>");
                writer.write(StringEscapeUtils.escapeXml(label));
                writer.write("</th>");
            }
            writer.write("</tr>\n</thead>");
            writer.flush();
            writer.write("\n<tbody>");
            int rowNumber = 1;
            while (dataSet.next()) {
                writer.write("\n<tr>");
                Row row = dataSet.getRow();
                for (int i = 0; i < selecreplacedems.size(); i++) {
                    Object value = row.getValue(i);
                    if (value == null) {
                        writer.write("<td />");
                    } else {
                        writer.write("<td>");
                        writer.write(StringEscapeUtils.escapeXml(ConvertToStringTransformer.transformValue(value)));
                        writer.write("</td>");
                    }
                }
                writer.write("</tr>");
                if (rowNumber % 20 == 0) {
                    writer.flush();
                }
                rowNumber++;
            }
            writer.write("\n</tbody>");
            writer.write("\n</table>");
        }
    } catch (QueryParserException e) {
        response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Query parsing failed: " + e.getMessage());
    }
}

15 Source : DatastoreDictionaryConnection.java
with GNU Lesser General Public License v3.0
from schic

@Override
public boolean containsValue(final String value) {
    final DataContext dataContext = _datastoreConnection.getDataContext();
    final Column column = _dictionary.getColumn(_datastoreConnection);
    try (DataSet dataSet = dataContext.query().from(column.getTable()).select(column).where(column).eq(value).maxRows(1).execute()) {
        if (dataSet.next()) {
            return true;
        }
        return false;
    }
}

15 Source : TokenizerAndValueDistributionTest.java
with GNU Lesser General Public License v3.0
from schic

public void testScenario() throws Throwable {
    final TaskRunner taskRunner = new MulreplacedhreadedTaskRunner(5);
    final DataCleanerEnvironment environment = new DataCleanerEnvironmentImpl().withTaskRunner(taskRunner);
    final DataCleanerConfiguration configuration = new DataCleanerConfigurationImpl().withEnvironment(environment);
    final replacedysisRunner runner = new replacedysisRunnerImpl(configuration);
    final Datastore datastore = TestHelper.createSampleDatabaseDatastore("ds");
    final DatastoreConnection con = datastore.openConnection();
    final DataContext dc = con.getDataContext();
    final replacedysisJobBuilder replacedysisJobBuilder = new replacedysisJobBuilder(configuration);
    replacedysisJobBuilder.setDatastoreConnection(con);
    final Table table = dc.getDefaultSchema().getTableByName("EMPLOYEES");
    replacedertNotNull(table);
    final Column jobreplacedleColumn = table.getColumnByName("JOBreplacedLE");
    replacedertNotNull(jobreplacedleColumn);
    replacedysisJobBuilder.addSourceColumns(jobreplacedleColumn);
    final TransformerComponentBuilder<TokenizerTransformer> transformerJobBuilder = replacedysisJobBuilder.addTransformer(TokenizerTransformer.clreplaced);
    transformerJobBuilder.addInputColumn(replacedysisJobBuilder.getSourceColumns().get(0));
    transformerJobBuilder.setConfiguredProperty("Number of tokens", 4);
    final List<MutableInputColumn<?>> transformerOutput = transformerJobBuilder.getOutputColumns();
    replacedertEquals(4, transformerOutput.size());
    transformerOutput.get(0).setName("first word");
    transformerOutput.get(1).setName("second word");
    transformerOutput.get(2).setName("third words");
    transformerOutput.get(3).setName("fourth words");
    for (final InputColumn<?> inputColumn : transformerOutput) {
        final replacedyzerComponentBuilder<ValueDistributionreplacedyzer> valueDistribuitionJobBuilder = replacedysisJobBuilder.addreplacedyzer(ValueDistributionreplacedyzer.clreplaced);
        valueDistribuitionJobBuilder.addInputColumn(inputColumn);
        valueDistribuitionJobBuilder.setConfiguredProperty("Record unique values", true);
        valueDistribuitionJobBuilder.setConfiguredProperty("Top n most frequent values", null);
        valueDistribuitionJobBuilder.setConfiguredProperty("Bottom n most frequent values", null);
    }
    final replacedysisJob replacedysisJob = replacedysisJobBuilder.toreplacedysisJob();
    replacedysisJobBuilder.close();
    final replacedysisResultFuture resultFuture = runner.run(replacedysisJob);
    replacedertFalse(resultFuture.isDone());
    final List<replacedyzerResult> results = resultFuture.getResults();
    replacedertTrue(resultFuture.isDone());
    if (!resultFuture.isSuccessful()) {
        final List<Throwable> errors = resultFuture.getErrors();
        throw errors.get(0);
    }
    // expect 1 result for each token
    replacedertEquals(4, results.size());
    for (final replacedyzerResult replacedyzerResult : results) {
        final ValueDistributionreplacedyzerResult result = (ValueDistributionreplacedyzerResult) replacedyzerResult;
        final Collection<String> uniqueValues = new TreeSet<>(result.getUniqueValues());
        if ("first word".equals(result.getName())) {
            replacedertEquals("[[Sales->19], [VP->2], [<unique>->2]]", result.getValueCounts().toString());
            replacedertEquals(0, result.getNullCount());
            replacedertEquals(2, result.getUniqueCount().intValue());
        } else if ("second word".equals(result.getName())) {
            replacedertEquals("[[Rep->17], [Manager->3], [<unique>->2], [<null>->1]]", result.getValueCounts().toString());
            replacedertEquals(1, result.getNullCount());
            replacedertEquals(2, result.getUniqueCount().intValue());
        } else if ("third words".equals(result.getName())) {
            replacedertEquals("[[<null>->20], [<unique>->3]]", result.getValueCounts().toString());
            replacedertEquals(20, result.getNullCount());
            replacedertEquals(3, result.getUniqueCount().intValue());
            replacedertEquals("[(EMEA), (replacedAN,, (NA)]", uniqueValues.toString());
        } else if ("fourth words".equals(result.getName())) {
            replacedertEquals("[[<null>->22], [<unique>->1]]", result.getValueCounts().toString());
            replacedertEquals(22, result.getNullCount());
            replacedertEquals(1, result.getUniqueCount().intValue());
            replacedertEquals("[APAC)]", uniqueValues.toString());
        } else {
            fail("Unexpected columnName: " + result.getName());
        }
    }
    con.close();
    taskRunner.shutdown();
}

15 Source : FixedWidthDatastore.java
with GNU Lesser General Public License v3.0
from schic

@Override
protected UsageAwareDatastoreConnection<DataContext> createDatastoreConnection() {
    final FixedWidthConfiguration configuration = getConfiguration();
    final Resource resource = _resourceRef.get();
    final DataContext dataContext;
    if (resource == null) {
        logger.warn("Resource was not available, a local file reference will be created with path: {}", _filename);
        dataContext = new FixedWidthDataContext(new File(_filename), configuration);
    } else {
        dataContext = new FixedWidthDataContext(resource, configuration);
    }
    return new DatastoreConnectionImpl<>(dataContext, this);
}

15 Source : CreateExcelSpreadsheetAnalyzerTest.java
with GNU Lesser General Public License v3.0
from schic

@Test
public void testSortNumerical() throws Exception {
    final String filename = "target/exceltest-sortnumerical.xlsx";
    final CreateExcelSpreadsheetreplacedyzer replacedyzer = new CreateExcelSpreadsheetreplacedyzer();
    final InputColumn<String> testColumn = new MockInputColumn<>("TestColumn");
    // Point of focus: MockInputColumn is of type Input, so it should be sorted as numbers
    final InputColumn<Integer> idColumn = new MockInputColumn<>("IdToSort", Integer.clreplaced);
    generatedFile = new File(filename);
    replacedyzer.file = generatedFile;
    replacedertNotNull(replacedyzer.file);
    replacedyzer.sheetName = "foo";
    replacedyzer.columns = new InputColumn<?>[2];
    replacedyzer.columns[0] = testColumn;
    replacedyzer.columns[1] = idColumn;
    replacedyzer.columnToBeSortedOn = idColumn;
    replacedyzer.validate();
    replacedyzer.init();
    final InputRow[] rows = new InputRow[13];
    rows[0] = new MockInputRow().put(testColumn, "row00").put(idColumn, 7);
    rows[1] = new MockInputRow().put(testColumn, "row01").put(idColumn, 9);
    rows[2] = new MockInputRow().put(testColumn, "row02").put(idColumn, 2);
    rows[3] = new MockInputRow().put(testColumn, "row03").put(idColumn, 3);
    rows[4] = new MockInputRow().put(testColumn, "row04").put(idColumn, 4);
    rows[5] = new MockInputRow().put(testColumn, "row05").put(idColumn, 12);
    rows[6] = new MockInputRow().put(testColumn, "row06").put(idColumn, 6);
    rows[7] = new MockInputRow().put(testColumn, "row07").put(idColumn, 0);
    rows[8] = new MockInputRow().put(testColumn, "row08").put(idColumn, 8);
    rows[9] = new MockInputRow().put(testColumn, "row09").put(idColumn, 1);
    rows[10] = new MockInputRow().put(testColumn, "row10").put(idColumn, 10);
    rows[11] = new MockInputRow().put(testColumn, "row11").put(idColumn, 11);
    rows[12] = new MockInputRow().put(testColumn, "row12").put(idColumn, 5);
    for (int i = 0; i < rows.length; i++) {
        replacedyzer.run(rows[i], i);
    }
    replacedyzer.getResult();
    final List<Integer> resultIds = new ArrayList<>(13);
    final ExcelDatastore outputDatastore = new ExcelDatastore(filename, new FileResource(replacedyzer.file), replacedyzer.file.getAbsolutePath());
    try (UpdateableDatastoreConnection outputDatastoreConnection = outputDatastore.openConnection()) {
        final DataContext dataContext = outputDatastoreConnection.getDataContext();
        try (DataSet dataSet = dataContext.query().from("foo").selectAll().execute()) {
            while (dataSet.next()) {
                final Row row = dataSet.getRow();
                final Integer idValue = Integer.parseInt((String) row.getValue(1));
                resultIds.add(idValue);
            }
        }
    }
    replacedertEquals("[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]", resultIds.toString());
}

15 Source : CreateExcelSpreadsheetAnalyzerTest.java
with GNU Lesser General Public License v3.0
from schic

@Test
public void testSortLexicographic() throws Exception {
    final String filename = "target/exceltest-sortlexicographic.xlsx";
    final CreateExcelSpreadsheetreplacedyzer replacedyzer = new CreateExcelSpreadsheetreplacedyzer();
    final InputColumn<String> testColumn = new MockInputColumn<>("TestColumn");
    // Point of focus: MockInputColumn is of type String, so it should be sorted alphabetically
    final InputColumn<String> idColumn = new MockInputColumn<>("IdToSort", String.clreplaced);
    generatedFile = new File(filename);
    replacedyzer.file = generatedFile;
    replacedertNotNull(replacedyzer.file);
    replacedyzer.sheetName = "foo";
    replacedyzer.columns = new InputColumn<?>[2];
    replacedyzer.columns[0] = testColumn;
    replacedyzer.columns[1] = idColumn;
    replacedyzer.columnToBeSortedOn = idColumn;
    replacedyzer.validate();
    replacedyzer.init();
    final InputRow[] rows = new InputRow[13];
    rows[0] = new MockInputRow().put(testColumn, "row00").put(idColumn, 7);
    rows[1] = new MockInputRow().put(testColumn, "row01").put(idColumn, 9);
    rows[2] = new MockInputRow().put(testColumn, "row02").put(idColumn, 2);
    rows[3] = new MockInputRow().put(testColumn, "row03").put(idColumn, 3);
    rows[4] = new MockInputRow().put(testColumn, "row04").put(idColumn, 4);
    rows[5] = new MockInputRow().put(testColumn, "row05").put(idColumn, 12);
    rows[6] = new MockInputRow().put(testColumn, "row06").put(idColumn, 6);
    rows[7] = new MockInputRow().put(testColumn, "row07").put(idColumn, 0);
    rows[8] = new MockInputRow().put(testColumn, "row08").put(idColumn, 8);
    rows[9] = new MockInputRow().put(testColumn, "row09").put(idColumn, 1);
    rows[10] = new MockInputRow().put(testColumn, "row10").put(idColumn, 10);
    rows[11] = new MockInputRow().put(testColumn, "row11").put(idColumn, 11);
    rows[12] = new MockInputRow().put(testColumn, "row12").put(idColumn, 5);
    for (int i = 0; i < rows.length; i++) {
        replacedyzer.run(rows[i], i);
    }
    replacedyzer.getResult();
    final List<Integer> resultIds = new ArrayList<>(13);
    final ExcelDatastore outputDatastore = new ExcelDatastore(filename, new FileResource(replacedyzer.file), replacedyzer.file.getAbsolutePath());
    try (UpdateableDatastoreConnection outputDatastoreConnection = outputDatastore.openConnection()) {
        final DataContext dataContext = outputDatastoreConnection.getDataContext();
        try (DataSet dataSet = dataContext.query().from("foo").selectAll().execute()) {
            while (dataSet.next()) {
                final Row row = dataSet.getRow();
                final Integer idValue = Integer.parseInt((String) row.getValue(1));
                resultIds.add(idValue);
            }
        }
    }
    replacedertEquals("[0, 1, 10, 11, 12, 2, 3, 4, 5, 6, 7, 8, 9]", resultIds.toString());
}

15 Source : CreateExcelSpreadsheetAnalyzerTest.java
with GNU Lesser General Public License v3.0
from schic

@Test
public void testSortLexicographicCaseSensitivity() throws Exception {
    final String filename = "target/exceltest-sortlexicographiccasesensitivity.xlsx";
    final CreateExcelSpreadsheetreplacedyzer replacedyzer = new CreateExcelSpreadsheetreplacedyzer();
    final InputColumn<String> sortColumn = new MockInputColumn<>("SortColumn");
    final InputColumn<String> someColumn = new MockInputColumn<>("SomeColumn", String.clreplaced);
    generatedFile = new File(filename);
    replacedyzer.file = generatedFile;
    replacedertNotNull(replacedyzer.file);
    replacedyzer.sheetName = "foo";
    replacedertNotNull(replacedyzer.file);
    replacedyzer.columns = new InputColumn<?>[2];
    replacedyzer.columns[0] = sortColumn;
    replacedyzer.columns[1] = someColumn;
    replacedyzer.columnToBeSortedOn = sortColumn;
    replacedyzer.validate();
    replacedyzer.init();
    final InputRow[] rows = new InputRow[8];
    rows[0] = new MockInputRow().put(sortColumn, "Claudia").put(someColumn, 1);
    rows[1] = new MockInputRow().put(sortColumn, "Dennis").put(someColumn, 2);
    rows[2] = new MockInputRow().put(sortColumn, "Kasper").put(someColumn, 3);
    rows[3] = new MockInputRow().put(sortColumn, "Tomasz").put(someColumn, 4);
    rows[4] = new MockInputRow().put(sortColumn, "claudia").put(someColumn, 5);
    rows[5] = new MockInputRow().put(sortColumn, "dennis").put(someColumn, 6);
    rows[6] = new MockInputRow().put(sortColumn, "kasper").put(someColumn, 7);
    rows[7] = new MockInputRow().put(sortColumn, "tomasz").put(someColumn, 8);
    for (int i = 0; i < rows.length; i++) {
        replacedyzer.run(rows[i], i);
    }
    replacedyzer.getResult();
    final List<String> resultIds = new ArrayList<>(13);
    final ExcelDatastore outputDatastore = new ExcelDatastore(filename, new FileResource(replacedyzer.file), replacedyzer.file.getAbsolutePath());
    try (UpdateableDatastoreConnection outputDatastoreConnection = outputDatastore.openConnection()) {
        final DataContext dataContext = outputDatastoreConnection.getDataContext();
        try (DataSet dataSet = dataContext.query().from("foo").selectAll().execute()) {
            while (dataSet.next()) {
                final Row row = dataSet.getRow();
                final String value = (String) row.getValue(0);
                resultIds.add(value);
            }
        }
    }
    replacedertEquals("[Claudia, claudia, Dennis, dennis, Kasper, kasper, Tomasz, tomasz]", resultIds.toString());
}

15 Source : UpdateTableAnalyzerTest.java
with GNU Lesser General Public License v3.0
from schic

public void testVanillaScenario() throws Exception {
    final InputColumn<Object> col1 = new MockInputColumn<>("in1", Object.clreplaced);
    final InputColumn<Object> col2 = new MockInputColumn<>("in2", Object.clreplaced);
    final InputColumn<Object> col3 = new MockInputColumn<>("in3", Object.clreplaced);
    final UpdateTablereplacedyzer updateTablereplacedyzer = new UpdateTablereplacedyzer();
    updateTablereplacedyzer.datastore = jdbcDatastore;
    updateTablereplacedyzer.tableName = "test_table";
    updateTablereplacedyzer.columnNames = new String[] { "baz", "foo" };
    updateTablereplacedyzer.values = new InputColumn<?>[] { col3, col1 };
    updateTablereplacedyzer.conditionColumnNames = new String[] { "bar" };
    updateTablereplacedyzer.conditionValues = new InputColumn<?>[] { col2 };
    updateTablereplacedyzer._componentContext = EasyMock.createMock(ComponentContext.clreplaced);
    updateTablereplacedyzer.validate();
    updateTablereplacedyzer.init();
    updateTablereplacedyzer.run(new MockInputRow().put(col1, "aaa").put(col2, 1).put(col3, "hello"), 1);
    updateTablereplacedyzer.run(new MockInputRow().put(col1, "bbb").put(col2, 2).put(col3, "world"), 1);
    final WriteDataResult result = updateTablereplacedyzer.getResult();
    replacedertEquals(0, result.getErrorRowCount());
    replacedertEquals(0, result.getWrittenRowCount());
    replacedertEquals(2, result.getUpdatesCount());
    final UpdateableDatastoreConnection con = jdbcDatastore.openConnection();
    final DataContext dc = con.getDataContext();
    final DataSet ds = dc.query().from("test_table").select("foo", "bar", "baz").orderBy("bar").execute();
    replacedertTrue(ds.next());
    replacedertEquals("Row[values=[aaa, 1, hello]]", ds.getRow().toString());
    replacedertTrue(ds.next());
    replacedertEquals("Row[values=[bbb, 2, world]]", ds.getRow().toString());
    replacedertTrue(ds.next());
    replacedertEquals("Row[values=[c, 3, dolor]]", ds.getRow().toString());
    replacedertTrue(ds.next());
    replacedertEquals("Row[values=[d, 4, sit]]", ds.getRow().toString());
    replacedertTrue(ds.next());
    replacedertEquals("Row[values=[e, 5, amet]]", ds.getRow().toString());
    replacedertFalse(ds.next());
    ds.close();
}

15 Source : DataSourceController.java
with Apache License 2.0
from apache

@RequestMapping(method = RequestMethod.GET)
@ResponseBody
public GetDatasourceResponse get(@PathVariable("tenant") String tenantId, @PathVariable("datasource") String dataSourceName) {
    final TenantContext tenantContext = tenantRegistry.getTenantContext(tenantId);
    final String tenantName = tenantContext.getTenantName();
    final UriBuilder uriBuilder = UriBuilder.fromPath("/{tenant}/{dataContext}/s/{schema}");
    List<GetDatasourceResponseSchemas> schemaLinks;
    Boolean updateable;
    try {
        final DataContext dataContext = tenantContext.getDataSourceRegistry().openDataContext(dataSourceName);
        updateable = dataContext instanceof UpdateableDataContext;
        schemaLinks = dataContext.getSchemaNames().stream().map(s -> {
            final String uri = uriBuilder.build(tenantName, dataSourceName, s).toString();
            return new GetDatasourceResponseSchemas().name(s).uri(uri);
        }).collect(Collectors.toList());
    } catch (Exception e) {
        logger.warn("Failed to open for GET datasource '{}/{}'. No schemas will be listed.", tenantId, dataSourceName, e);
        updateable = null;
        schemaLinks = null;
    }
    final GetDatasourceResponse resp = new GetDatasourceResponse();
    resp.type("datasource");
    resp.name(dataSourceName);
    resp.tenant(tenantName);
    resp.updateable(updateable);
    resp.queryUri(UriBuilder.fromPath("/{tenant}/{dataContext}/query").build(tenantName, dataSourceName).toString());
    resp.schemas(schemaLinks);
    return resp;
}

14 Source : CompoundComponentRequirementRejectionStreetTest.java
with GNU Lesser General Public License v3.0
from schic

public void testScenario() throws Throwable {
    final replacedysisJob job;
    try (DatastoreConnection connection = datastore.openConnection()) {
        final DataContext dataContext = connection.getDataContext();
        final Table table = dataContext.getTableByQualifiedLabel("PUBLIC.CUSTOMERS");
        final Row row = MetaModelHelper.executeSingleRowQuery(dataContext, dataContext.query().from(table).selectCount().toQuery());
        replacedertEquals(recordsInTable, ((Number) row.getValue(0)).intValue());
        try (replacedysisJobBuilder jobBuilder = new replacedysisJobBuilder(configuration)) {
            jobBuilder.setDatastore(datastore);
            jobBuilder.addSourceColumns("CUSTOMERS.CONTACTFIRSTNAME");
            jobBuilder.addSourceColumns("CUSTOMERS.CONTACTLASTNAME");
            // although not semantically correct, we pretend that EVEN is
            // the
            // success-state in our cleansing street and that ODD is the
            // reject-state.
            final Category valid = org.datacleaner.test.mock.EvenOddFilter.Category.EVEN;
            final Category invalid = org.datacleaner.test.mock.EvenOddFilter.Category.ODD;
            final TransformerComponentBuilder<MockTransformer> trans1 = jobBuilder.addTransformer(MockTransformer.clreplaced);
            trans1.setName("trans1");
            trans1.addInputColumn(jobBuilder.getSourceColumns().get(0));
            final FilterComponentBuilder<EvenOddFilter, org.datacleaner.test.mock.EvenOddFilter.Category> filter1 = jobBuilder.addFilter(EvenOddFilter.clreplaced);
            filter1.setName("filter1");
            filter1.addInputColumn(trans1.getOutputColumns().get(0));
            final TransformerComponentBuilder<MockTransformer> trans2 = jobBuilder.addTransformer(MockTransformer.clreplaced);
            trans2.setName("trans2");
            trans2.addInputColumn(jobBuilder.getSourceColumns().get(1));
            trans2.setRequirement(filter1, valid);
            final FilterComponentBuilder<EvenOddFilter, org.datacleaner.test.mock.EvenOddFilter.Category> filter2 = jobBuilder.addFilter(EvenOddFilter.clreplaced);
            filter2.setName("filter2");
            filter2.addInputColumn(trans2.getOutputColumns().get(0));
            final replacedyzerComponentBuilder<Mockreplacedyzer> replacedyzer1 = jobBuilder.addreplacedyzer(Mockreplacedyzer.clreplaced);
            replacedyzer1.setName("success");
            replacedyzer1.addInputColumn(jobBuilder.getSourceColumns().get(0));
            replacedyzer1.addInputColumn(jobBuilder.getSourceColumns().get(1));
            replacedyzer1.addInputColumn(trans1.getOutputColumns().get(0));
            replacedyzer1.addInputColumn(trans2.getOutputColumns().get(0));
            replacedyzer1.setRequirement(filter2, valid);
            final FilterOutcome invalid1 = filter1.getFilterOutcome(invalid);
            final FilterOutcome invalid2 = filter2.getFilterOutcome(invalid);
            final replacedyzerComponentBuilder<Mockreplacedyzer> replacedyzer2 = jobBuilder.addreplacedyzer(Mockreplacedyzer.clreplaced);
            replacedyzer2.setName("rejects");
            replacedyzer2.addInputColumn(jobBuilder.getSourceColumns().get(0));
            replacedyzer2.addInputColumn(jobBuilder.getSourceColumns().get(1));
            replacedyzer2.setComponentRequirement(new CompoundComponentRequirement(invalid1, invalid2));
            job = jobBuilder.toreplacedysisJob();
        }
    }
    final replacedysisRunner runner = new replacedysisRunnerImpl(configuration);
    final replacedysisResultFuture resultFuture = runner.run(job);
    resultFuture.await();
    if (resultFuture.isErrornous()) {
        throw resultFuture.getErrors().get(0);
    }
    int recordsInResults = 0;
    final Map<ComponentJob, replacedyzerResult> map = resultFuture.getResultMap();
    for (final Entry<ComponentJob, replacedyzerResult> entry : map.entrySet()) {
        final ComponentJob componentJob = entry.getKey();
        @SuppressWarnings("unchecked")
        final ListResult<InputRow> result = (ListResult<InputRow>) entry.getValue();
        final List<InputRow> values = result.getValues();
        final int recordsInResult = values.size();
        recordsInResults += recordsInResult;
        switch(componentJob.getName()) {
            case "success":
            case "rejects":
                // expected states
                replacedertTrue("Expected records in all buckets of the cleansing street, but did not find any in: " + componentJob, recordsInResult > 0);
                replacedertTrue("Expected records to be distributed across buckets, but found all in: " + componentJob, recordsInResult != recordsInTable);
                break;
            default:
                fail("Unexpected component in result map: " + componentJob);
        }
    }
    replacedertEquals(recordsInTable, recordsInResults);
}

14 Source : CsvOutputWriterFactoryTest.java
with GNU Lesser General Public License v3.0
from schic

public void testFullScenario() throws Exception {
    final OutputWriterScenarioHelper scenarioHelper = new OutputWriterScenarioHelper();
    final String filename = "target/test-output/csv-file1.txt";
    final OutputWriter writer = CsvOutputWriterFactory.getWriter(filename, scenarioHelper.getColumns());
    scenarioHelper.writeExampleData(writer);
    writer.close();
    final DataContext dc = new CsvDataContext(new File(filename));
    final Table table = dc.getDefaultSchema().getTable(0);
    final Query q = dc.query().from(table).select(table.getColumns()).toQuery();
    final DataSet dataSet = dc.executeQuery(q);
    scenarioHelper.performreplacedertions(dataSet, false);
}

14 Source : AbstractFileBasedDatastoreDialog.java
with GNU Lesser General Public License v3.0
from schic

private DataSet getPreviewData(final String filename) {
    if (!isPreviewDataAvailable()) {
        logger.info("Not displaying preview table because isPreviewDataAvailable() returned false");
        return null;
    }
    final D datastore = getPreviewDatastore(filename);
    try (DatastoreConnection con = datastore.openConnection()) {
        final DataContext dc = con.getDataContext();
        final Table table = getPreviewTable(dc);
        List<Column> columns = table.getColumns();
        if (columns.size() > getPreviewColumns()) {
            // include max 10 columns
            columns = columns.stream().limit(getPreviewColumns()).collect(Collectors.toList());
        }
        final Query q = dc.query().from(table).select(columns).toQuery();
        q.setMaxRows(7);
        return dc.executeQuery(q);
    }
}

14 Source : KafkaTest.java
with GNU Lesser General Public License v3.0
from schic

public void testConvertToColumnWithNoSchema() throws Exception {
    String bootstrapServers = "localhost:9092";
    DataContext dataContext = new KafkaDataContext<>(String.clreplaced, String.clreplaced, bootstrapServers, Arrays.asList("Topic1", "Topic2"));
    DataSet ds = dataContext.query().from("Topic1").select("key", "value").execute();
    while (ds.next()) {
        Row row = ds.getRow();
        System.out.println("key: " + row.getValue(0) + ", value: " + row.getValue(1));
    }
    ds.close();
}

13 Source : JaxbConfigurationReaderTest.java
with GNU Lesser General Public License v3.0
from schic

public void testReadComplexDataInPojoDatastore() throws Exception {
    final DataCleanerConfiguration configuration = reader.create(new File("src/test/resources/example-configuration-pojo-datastore-with-complex-data.xml"));
    final Datastore datastore = configuration.getDatastoreCatalog().getDatastore("pojo");
    replacedertNotNull(datastore);
    final DatastoreConnection con = datastore.openConnection();
    final DataContext dc = con.getDataContext();
    final Table table = dc.getDefaultSchema().getTable(0);
    final List<Column> columns = table.getColumns();
    replacedertEquals("[Column[name=Foo,columnNumber=0,type=VARCHAR,nullable=true,nativeType=null,columnSize=null], " + "Column[name=Bar,columnNumber=1,type=MAP,nullable=true,nativeType=null,columnSize=null], " + "Column[name=Baz,columnNumber=2,type=LIST,nullable=true,nativeType=null,columnSize=null], " + "Column[name=bytes,columnNumber=3,type=BINARY,nullable=true,nativeType=null,columnSize=null]]", columns.toString());
    final DataSet ds = dc.query().from(table).select(columns).execute();
    replacedertTrue(ds.next());
    replacedertEquals("Hello", ds.getRow().getValue(0).toString());
    replacedertEquals("{greeting=hello, person=world}", ds.getRow().getValue(1).toString());
    replacedertEquals("[hello, world]", ds.getRow().getValue(2).toString());
    replacedertEquals("{1,2,3,4,5}", ArrayUtils.toString(ds.getRow().getValue(3)));
    replacedertTrue(ds.getRow().getValue(1) instanceof Map);
    replacedertTrue(ds.getRow().getValue(2) instanceof List);
    replacedertTrue(ds.getRow().getValue(3) instanceof byte[]);
    replacedertTrue(ds.next());
    replacedertEquals("There", ds.getRow().getValue(0).toString());
    replacedertEquals("{greeting=hi, there you!, person={Firstname=Kasper, Lastname=Sørensen}}", ds.getRow().getValue(1).toString());
    replacedertEquals(null, ds.getRow().getValue(2));
    replacedertEquals(null, ds.getRow().getValue(3));
    replacedertTrue(ds.getRow().getValue(1) instanceof Map);
    replacedertTrue(ds.next());
    replacedertEquals("World", ds.getRow().getValue(0).toString());
    replacedertEquals(null, ds.getRow().getValue(1));
    replacedertEquals("[Sørensen, Kasper]", ds.getRow().getValue(2).toString());
    replacedertEquals("{-1,-2,-3,-4,-5}", ArrayUtils.toString(ds.getRow().getValue(3)));
    replacedertTrue(ds.getRow().getValue(2) instanceof List);
    replacedertTrue(ds.getRow().getValue(3) instanceof byte[]);
}

13 Source : DatastoreOutputWriterFactoryTest.java
with GNU Lesser General Public License v3.0
from schic

public void testFullScenario() throws Exception {
    final OutputWriterScenarioHelper scenarioHelper = new OutputWriterScenarioHelper();
    final DatastoreCreationDelegate creationDelegate = datastore -> {
        _datastoreCreated = true;
        replacedertEquals("my datastore", datastore.getName());
        try (DatastoreConnection con = datastore.openConnection()) {
            final DataContext dc = con.getDataContext();
            final Table table = dc.getDefaultSchema().getTable(0);
            final Query q = dc.query().from(table).select(table.getColumns()).toQuery();
            final DataSet dataSet = dc.executeQuery(q);
            scenarioHelper.performreplacedertions(dataSet, true);
        }
    };
    final OutputWriter writer = DatastoreOutputWriterFactory.getWriter(OUTPUT_DIR, creationDelegate, "my datastore", "my dataset", scenarioHelper.getColumns().toArray(new InputColumn[0]));
    scenarioHelper.writeExampleData(writer);
    replacedertEquals("my_dataset", DatastoreOutputWriterFactory.getActualTableName(writer));
    replacedertTrue(_datastoreCreated);
}

13 Source : CreateExcelSpreadsheetAnalyzer.java
with GNU Lesser General Public License v3.0
from schic

@Override
public OutputWriter createOutputWriter() {
    if (file.exists()) {
        final ExcelDatastore datastore = new ExcelDatastore(file.getName(), new FileResource(file), file.getAbsolutePath());
        try (UpdateableDatastoreConnection connection = datastore.openConnection()) {
            final DataContext dataContext = connection.getDataContext();
            final List<String> tableNames = dataContext.getDefaultSchema().getTableNames();
            for (String tableName : tableNames) {
                if (tableName.equals(sheetName)) {
                    if (overwriteSheetIfExists) {
                        final Table tableSheet = dataContext.getTableByQualifiedLabel(sheetName);
                        final UpdateableDataContext updateableDataContext = connection.getUpdateableDataContext();
                        updateableDataContext.executeUpdate(new DropTable(tableSheet));
                    }
                }
            }
        }
    }
    // If the user wants the file sorted after a column we create a
    // temporary file and return a CSV writer in order
    // to make a MergeSort on it, otherwise we return a normal Excel writer
    if (columnToBeSortedOn != null) {
        return createTemporaryCsvWriter();
    } else {
        return ExcelOutputWriterFactory.getWriter(file.getPath(), sheetName, fields, columns);
    }
}

12 Source : DatastoreQueryController.java
with GNU Lesser General Public License v3.0
from schic

@RolesAllowed(SecurityRoles.TASK_QUERY)
@RequestMapping(value = "/{tenant}/datastores/{datastore}.query", params = { "q", "f", "m" }, method = RequestMethod.GET, headers = "Accept=application/json", produces = { "application/json" })
@ResponseBody
public Map<String, Object> jsonPaginatedGet(@PathVariable("tenant") final String tenant, @PathVariable("datastore") String datastoreName, @RequestParam("q") String query, @RequestParam("f") int firstRow, @RequestParam("m") int maxRows, UriComponentsBuilder uriBuilder, HttpServletResponse response) throws IOException {
    response.setContentType("application/json");
    // should test for a sensible page size
    datastoreName = datastoreName.replaceAll("\\+", " ");
    final DataCleanerConfiguration configuration = _tenantContextFactory.getContext(tenant).getConfiguration();
    final Datastore ds = configuration.getDatastoreCatalog().getDatastore(datastoreName);
    if (ds == null) {
        response.sendError(HttpServletResponse.SC_NOT_FOUND, "No such datastore: " + datastoreName);
        return null;
    }
    String username = getUsername();
    if (StringUtils.isNullOrEmpty(query)) {
        response.sendError(HttpServletResponse.SC_BAD_REQUEST, "No query defined");
        return null;
    }
    logger.info("Serving query result of datastore {} to user: {}. Query: {}", new Object[] { datastoreName, username, query });
    try (final DatastoreConnection con = ds.openConnection()) {
        final DataContext dataContext = con.getDataContext();
        Query pagedQuery = dataContext.parseQuery(query);
        pagedQuery.setFirstRow(firstRow);
        pagedQuery.setMaxRows(maxRows);
        try (final DataSet dataSet = dataContext.executeQuery(pagedQuery)) {
            return getJsonResult(dataSet);
        }
    }
}

12 Source : PatternFinderAndStringAnalyzerDrillToDetailTest.java
with GNU Lesser General Public License v3.0
from schic

public void testScenario() throws Throwable {
    final TaskRunner taskRunner = new MulreplacedhreadedTaskRunner(5);
    final DataCleanerEnvironment environment = new DataCleanerEnvironmentImpl().withTaskRunner(taskRunner);
    final DataCleanerConfiguration configuration = new DataCleanerConfigurationImpl().withEnvironment(environment);
    final Datastore datastore = TestHelper.createSampleDatabaseDatastore("ds");
    final DatastoreConnection con = datastore.openConnection();
    final DataContext dc = con.getDataContext();
    try (replacedysisJobBuilder ajb = new replacedysisJobBuilder(configuration)) {
        ajb.setDatastoreConnection(con);
        final Table table = dc.getDefaultSchema().getTableByName("EMPLOYEES");
        replacedertNotNull(table);
        final Column jobreplacedleColumn = table.getColumnByName("JOBreplacedLE");
        replacedertNotNull(jobreplacedleColumn);
        final Column emailColumn = table.getColumnByName("EMAIL");
        replacedertNotNull(emailColumn);
        ajb.addSourceColumns(jobreplacedleColumn, emailColumn);
        final InputColumn<?> emailInputColumn = ajb.getSourceColumnByName("EMAIL");
        final TransformerComponentBuilder<EmailStandardizerTransformer> emailStd1 = ajb.addTransformer(EmailStandardizerTransformer.clreplaced).addInputColumn(emailInputColumn);
        final replacedyzerComponentBuilder<PatternFinderreplacedyzer> pf = ajb.addreplacedyzer(PatternFinderreplacedyzer.clreplaced);
        final InputColumn<?> jobreplacedleInputColumn = ajb.getSourceColumnByName("JOBreplacedLE");
        pf.addInputColumn(jobreplacedleInputColumn);
        pf.getComponentInstance().setDiscriminateTextCase(false);
        final replacedyzerComponentBuilder<Stringreplacedyzer> sa = ajb.addreplacedyzer(Stringreplacedyzer.clreplaced);
        sa.addInputColumns(emailInputColumn, emailStd1.getOutputColumnByName("Username"), emailStd1.getOutputColumnByName("Domain"));
        final replacedysisResultFuture resultFuture = new replacedysisRunnerImpl(configuration).run(ajb.toreplacedysisJob());
        if (!resultFuture.isSuccessful()) {
            throw resultFuture.getErrors().iterator().next();
        }
        // pattern finder result tests
        {
            final PatternFinderResult result = (PatternFinderResult) resultFuture.getResult(pf.toreplacedyzerJob());
            final String resultString = new PatternFinderResultTextRenderer().render(result);
            final String[] resultLines = resultString.split("\n");
            replacedertEquals(resultString, 5, resultLines.length);
            replacedertEquals(resultString, "                            Match count Sample      ", resultLines[0]);
            replacedertTrue(resultString, resultLines[1].startsWith("aaaaa aaaaaaaaa                      19"));
            final ResultProducer resultProducer = result.getSingleCrosstab().where("Pattern", "aaaaa aaaaaaaaa").where("Measures", "Match count").explore();
            replacedertEquals(DefaultResultProducer.clreplaced, resultProducer.getClreplaced());
            final replacedyzerResult result2 = resultProducer.getResult();
            replacedertEquals(AnnotatedRowsResult.clreplaced, result2.getClreplaced());
            final AnnotatedRowsResult annotatedRowsResult = (AnnotatedRowsResult) result2;
            replacedertEquals(19, annotatedRowsResult.getAnnotatedRowCount());
            final List<InputRow> rows = annotatedRowsResult.getSampleRows();
            replacedertEquals(19, rows.size());
            final String[] values = new String[19];
            for (int i = 0; i < values.length; i++) {
                values[i] = (String) rows.get(i).getValue(jobreplacedleInputColumn);
            }
            Arrays.sort(values);
            replacedertEquals("[Sales Rep, Sales Rep, Sales Rep, Sales Rep, Sales Rep, Sales Rep, Sales Rep, " + "Sales Rep, Sales Rep, Sales Rep, Sales Rep, Sales Rep, Sales Rep, Sales Rep, " + "Sales Rep, Sales Rep, Sales Rep, VP Marketing, VP Sales]", Arrays.toString(values));
        }
        // string replacedyzer tests
        {
            final CrosstabResult result = (CrosstabResult) resultFuture.getResult(sa.toreplacedyzerJob());
            final String[] resultLines = new CrosstabTextRenderer().render(result).split("\n");
            replacedertEquals("                                         EMAIL Username   Domain ", resultLines[0]);
            replacedertEquals("Total char count                           655      172      460 ", resultLines[6]);
            replacedertEquals("Max chars                                   31       10       20 ", resultLines[7]);
            replacedertEquals("Min chars                                   26        5       20 ", resultLines[8]);
            // username is a virtual columns, but because of the
            // row-annotation
            // system it is still possible to drill to detail on it.
            ResultProducer resultProducer = result.getCrosstab().where("Column", "Username").where("Measures", "Max chars").explore();
            replacedertNotNull(resultProducer);
            replacedertEquals(AnnotatedRowsResult.clreplaced, resultProducer.getResult().getClreplaced());
            // email is a physical column so it IS queryable
            resultProducer = result.getCrosstab().where("Column", "EMAIL").where("Measures", "Max chars").explore();
            replacedertNotNull(resultProducer);
            final replacedyzerResult result2 = resultProducer.getResult();
            replacedertEquals(AnnotatedRowsResult.clreplaced, result2.getClreplaced());
            final AnnotatedRowsResult arr = (AnnotatedRowsResult) result2;
            final List<InputRow> rows = arr.getSampleRows();
            replacedertEquals(1, rows.size());
            replacedertEquals("[email protected]", rows.get(0).getValue(emailInputColumn).toString());
        }
    }
    con.close();
    taskRunner.shutdown();
}

See More Examples