com.google.api.services.bigquery.model.TableSchema

Here are the examples of the java api class com.google.api.services.bigquery.model.TableSchema taken from open source projects.

1. BigQueryIOTest#testBuildSinkDisplayData()

Project: incubator-beam
File: BigQueryIOTest.java
@Test
public void testBuildSinkDisplayData() {
    String tableSpec = "project:dataset.table";
    TableSchema schema = new TableSchema().set("col1", "type1").set("col2", "type2");
    BigQueryIO.Write.Bound write = BigQueryIO.Write.to(tableSpec).withSchema(schema).withCreateDisposition(CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(WriteDisposition.WRITE_APPEND).withoutValidation();
    DisplayData displayData = DisplayData.from(write);
    assertThat(displayData, hasDisplayItem("table"));
    assertThat(displayData, hasDisplayItem("schema"));
    assertThat(displayData, hasDisplayItem("createDisposition", CreateDisposition.CREATE_IF_NEEDED.toString()));
    assertThat(displayData, hasDisplayItem("writeDisposition", WriteDisposition.WRITE_APPEND.toString()));
    assertThat(displayData, hasDisplayItem("validation", false));
}

2. TriggerExample#getSchema()

Project: incubator-beam
File: TriggerExample.java
/** Defines the BigQuery schema used for the output. */
private static TableSchema getSchema() {
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("trigger_type").setType("STRING"));
    fields.add(new TableFieldSchema().setName("freeway").setType("STRING"));
    fields.add(new TableFieldSchema().setName("total_flow").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("number_of_records").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("window").setType("STRING"));
    fields.add(new TableFieldSchema().setName("isFirst").setType("BOOLEAN"));
    fields.add(new TableFieldSchema().setName("isLast").setType("BOOLEAN"));
    fields.add(new TableFieldSchema().setName("timing").setType("STRING"));
    fields.add(new TableFieldSchema().setName("event_time").setType("TIMESTAMP"));
    fields.add(new TableFieldSchema().setName("processing_time").setType("TIMESTAMP"));
    TableSchema schema = new TableSchema().setFields(fields);
    return schema;
}

3. MaxPerKeyExamples#main()

Project: incubator-beam
File: MaxPerKeyExamples.java
public static void main(String[] args) throws Exception {
    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    Pipeline p = Pipeline.create(options);
    // Build the table schema for the output table.
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("month").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("max_mean_temp").setType("FLOAT"));
    TableSchema schema = new TableSchema().setFields(fields);
    p.apply(BigQueryIO.Read.from(options.getInput())).apply(new MaxMeanTemp()).apply(BigQueryIO.Write.to(options.getOutput()).withSchema(schema).withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE));
    p.run();
}

4. FilterExamples#main()

Project: incubator-beam
File: FilterExamples.java
public static void main(String[] args) throws Exception {
    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    Pipeline p = Pipeline.create(options);
    TableSchema schema = buildWeatherSchemaProjection();
    p.apply(BigQueryIO.Read.from(options.getInput())).apply(ParDo.of(new ProjectionFn())).apply(new BelowGlobalMean(options.getMonthFilter())).apply(BigQueryIO.Write.to(options.getOutput()).withSchema(schema).withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE));
    p.run();
}

5. CombinePerKeyExamples#main()

Project: incubator-beam
File: CombinePerKeyExamples.java
public static void main(String[] args) throws Exception {
    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    Pipeline p = Pipeline.create(options);
    // Build the table schema for the output table.
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("word").setType("STRING"));
    fields.add(new TableFieldSchema().setName("all_plays").setType("STRING"));
    TableSchema schema = new TableSchema().setFields(fields);
    p.apply(BigQueryIO.Read.from(options.getInput())).apply(new PlaysForWord()).apply(BigQueryIO.Write.to(options.getOutput()).withSchema(schema).withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE));
    p.run();
}

6. BigQueryTornadoes#main()

Project: incubator-beam
File: BigQueryTornadoes.java
public static void main(String[] args) {
    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    Pipeline p = Pipeline.create(options);
    // Build the table schema for the output table.
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("month").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("tornado_count").setType("INTEGER"));
    TableSchema schema = new TableSchema().setFields(fields);
    p.apply(BigQueryIO.Read.from(options.getInput())).apply(new CountTornadoes()).apply(BigQueryIO.Write.to(options.getOutput()).withSchema(schema).withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE));
    p.run();
}

7. BigQueryIOTest#testBuildSinkDisplayData()

Project: DataflowJavaSDK
File: BigQueryIOTest.java
@Test
public void testBuildSinkDisplayData() {
    String tableSpec = "project:dataset.table";
    TableSchema schema = new TableSchema().set("col1", "type1").set("col2", "type2");
    BigQueryIO.Write.Bound write = BigQueryIO.Write.to(tableSpec).withSchema(schema).withCreateDisposition(CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(WriteDisposition.WRITE_APPEND).withoutValidation();
    DisplayData displayData = DisplayData.from(write);
    assertThat(displayData, hasDisplayItem("table"));
    assertThat(displayData, hasDisplayItem("schema"));
    assertThat(displayData, hasDisplayItem("createDisposition", CreateDisposition.CREATE_IF_NEEDED.toString()));
    assertThat(displayData, hasDisplayItem("writeDisposition", WriteDisposition.WRITE_APPEND.toString()));
    assertThat(displayData, hasDisplayItem("validation", false));
}

8. TriggerExample#getSchema()

Project: DataflowJavaSDK
File: TriggerExample.java
/** Defines the BigQuery schema used for the output. */
private static TableSchema getSchema() {
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("trigger_type").setType("STRING"));
    fields.add(new TableFieldSchema().setName("freeway").setType("STRING"));
    fields.add(new TableFieldSchema().setName("total_flow").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("number_of_records").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("window").setType("STRING"));
    fields.add(new TableFieldSchema().setName("isFirst").setType("BOOLEAN"));
    fields.add(new TableFieldSchema().setName("isLast").setType("BOOLEAN"));
    fields.add(new TableFieldSchema().setName("timing").setType("STRING"));
    fields.add(new TableFieldSchema().setName("event_time").setType("TIMESTAMP"));
    fields.add(new TableFieldSchema().setName("processing_time").setType("TIMESTAMP"));
    TableSchema schema = new TableSchema().setFields(fields);
    return schema;
}

9. MaxPerKeyExamples#main()

Project: DataflowJavaSDK
File: MaxPerKeyExamples.java
public static void main(String[] args) throws Exception {
    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    Pipeline p = Pipeline.create(options);
    // Build the table schema for the output table.
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("month").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("max_mean_temp").setType("FLOAT"));
    TableSchema schema = new TableSchema().setFields(fields);
    p.apply(BigQueryIO.Read.from(options.getInput())).apply(new MaxMeanTemp()).apply(BigQueryIO.Write.to(options.getOutput()).withSchema(schema).withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE));
    p.run();
}

10. FilterExamples#main()

Project: DataflowJavaSDK
File: FilterExamples.java
public static void main(String[] args) throws Exception {
    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    Pipeline p = Pipeline.create(options);
    TableSchema schema = buildWeatherSchemaProjection();
    p.apply(BigQueryIO.Read.from(options.getInput())).apply(ParDo.of(new ProjectionFn())).apply(new BelowGlobalMean(options.getMonthFilter())).apply(BigQueryIO.Write.to(options.getOutput()).withSchema(schema).withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE));
    p.run();
}

11. CombinePerKeyExamples#main()

Project: DataflowJavaSDK
File: CombinePerKeyExamples.java
public static void main(String[] args) throws Exception {
    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    Pipeline p = Pipeline.create(options);
    // Build the table schema for the output table.
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("word").setType("STRING"));
    fields.add(new TableFieldSchema().setName("all_plays").setType("STRING"));
    TableSchema schema = new TableSchema().setFields(fields);
    p.apply(BigQueryIO.Read.from(options.getInput())).apply(new PlaysForWord()).apply(BigQueryIO.Write.to(options.getOutput()).withSchema(schema).withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE));
    p.run();
}

12. BigQueryTornadoes#main()

Project: DataflowJavaSDK
File: BigQueryTornadoes.java
public static void main(String[] args) {
    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    Pipeline p = Pipeline.create(options);
    // Build the table schema for the output table.
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("month").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("tornado_count").setType("INTEGER"));
    TableSchema schema = new TableSchema().setFields(fields);
    p.apply(BigQueryIO.Read.from(options.getInput())).apply(new CountTornadoes()).apply(BigQueryIO.Write.to(options.getOutput()).withSchema(schema).withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE));
    p.run();
}

13. AvroUtilsTest#testConvertGenericRecordToTableRow()

Project: incubator-beam
File: AvroUtilsTest.java
@Test
public void testConvertGenericRecordToTableRow() throws Exception {
    TableSchema tableSchema = new TableSchema();
    List<TableFieldSchema> subFields = Lists.<TableFieldSchema>newArrayList(new TableFieldSchema().setName("species").setType("STRING").setMode("NULLABLE"));
    /*
     * Note that the quality and quantity fields do not have their mode set, so they should default
     * to NULLABLE. This is an important test of BigQuery semantics.
     *
     * All the other fields we set in this function are required on the Schema response.
     *
     * See https://cloud.google.com/bigquery/docs/reference/v2/tables#schema
     */
    List<TableFieldSchema> fields = Lists.<TableFieldSchema>newArrayList(new TableFieldSchema().setName("number").setType("INTEGER").setMode("REQUIRED"), new TableFieldSchema().setName("species").setType("STRING").setMode("NULLABLE"), new TableFieldSchema().setName("quality").setType("FLOAT"), /* default to NULLABLE */
    new TableFieldSchema().setName("quantity").setType("INTEGER"), /* default to NULLABLE */
    new TableFieldSchema().setName("birthday").setType("TIMESTAMP").setMode("NULLABLE"), new TableFieldSchema().setName("flighted").setType("BOOLEAN").setMode("NULLABLE"), new TableFieldSchema().setName("scion").setType("RECORD").setMode("NULLABLE").setFields(subFields), new TableFieldSchema().setName("associates").setType("RECORD").setMode("REPEATED").setFields(subFields));
    tableSchema.setFields(fields);
    Schema avroSchema = AvroCoder.of(Bird.class).getSchema();
    {
        // Test nullable fields.
        GenericRecord record = new GenericData.Record(avroSchema);
        record.put("number", 5L);
        TableRow convertedRow = AvroUtils.convertGenericRecordToTableRow(record, tableSchema);
        TableRow row = new TableRow().set("number", "5").set("associates", new ArrayList<TableRow>());
        assertEquals(row, convertedRow);
    }
    {
        // Test type conversion for TIMESTAMP, INTEGER, BOOLEAN, and FLOAT.
        GenericRecord record = new GenericData.Record(avroSchema);
        record.put("number", 5L);
        record.put("quality", 5.0);
        record.put("birthday", 5L);
        record.put("flighted", Boolean.TRUE);
        TableRow convertedRow = AvroUtils.convertGenericRecordToTableRow(record, tableSchema);
        TableRow row = new TableRow().set("number", "5").set("birthday", "1970-01-01 00:00:00.000005 UTC").set("quality", 5.0).set("associates", new ArrayList<TableRow>()).set("flighted", Boolean.TRUE);
        assertEquals(row, convertedRow);
    }
    {
        // Test repeated fields.
        Schema subBirdSchema = AvroCoder.of(Bird.SubBird.class).getSchema();
        GenericRecord nestedRecord = new GenericData.Record(subBirdSchema);
        nestedRecord.put("species", "other");
        GenericRecord record = new GenericData.Record(avroSchema);
        record.put("number", 5L);
        record.put("associates", Lists.<GenericRecord>newArrayList(nestedRecord));
        TableRow convertedRow = AvroUtils.convertGenericRecordToTableRow(record, tableSchema);
        TableRow row = new TableRow().set("associates", Lists.<TableRow>newArrayList(new TableRow().set("species", "other"))).set("number", "5");
        assertEquals(row, convertedRow);
    }
}

14. AvroUtilsTest#testConvertGenericRecordToTableRow()

Project: DataflowJavaSDK
File: AvroUtilsTest.java
@Test
public void testConvertGenericRecordToTableRow() throws Exception {
    TableSchema tableSchema = new TableSchema();
    List<TableFieldSchema> subFields = Lists.<TableFieldSchema>newArrayList(new TableFieldSchema().setName("species").setType("STRING").setMode("NULLABLE"));
    /*
     * Note that the quality and quantity fields do not have their mode set, so they should default
     * to NULLABLE. This is an important test of BigQuery semantics.
     *
     * All the other fields we set in this function are required on the Schema response.
     *
     * See https://cloud.google.com/bigquery/docs/reference/v2/tables#schema
     */
    List<TableFieldSchema> fields = Lists.<TableFieldSchema>newArrayList(new TableFieldSchema().setName("number").setType("INTEGER").setMode("REQUIRED"), new TableFieldSchema().setName("species").setType("STRING").setMode("NULLABLE"), new TableFieldSchema().setName("quality").setType("FLOAT"), /* default to NULLABLE */
    new TableFieldSchema().setName("quantity").setType("INTEGER"), /* default to NULLABLE */
    new TableFieldSchema().setName("birthday").setType("TIMESTAMP").setMode("NULLABLE"), new TableFieldSchema().setName("flighted").setType("BOOLEAN").setMode("NULLABLE"), new TableFieldSchema().setName("scion").setType("RECORD").setMode("NULLABLE").setFields(subFields), new TableFieldSchema().setName("associates").setType("RECORD").setMode("REPEATED").setFields(subFields));
    tableSchema.setFields(fields);
    Schema avroSchema = AvroCoder.of(Bird.class).getSchema();
    {
        // Test nullable fields.
        GenericRecord record = new GenericData.Record(avroSchema);
        record.put("number", 5L);
        TableRow convertedRow = AvroUtils.convertGenericRecordToTableRow(record, tableSchema);
        TableRow row = new TableRow().set("number", "5").set("associates", new ArrayList<TableRow>());
        assertEquals(row, convertedRow);
    }
    {
        // Test type conversion for TIMESTAMP, INTEGER, BOOLEAN, and FLOAT.
        GenericRecord record = new GenericData.Record(avroSchema);
        record.put("number", 5L);
        record.put("quality", 5.0);
        record.put("birthday", 5L);
        record.put("flighted", Boolean.TRUE);
        TableRow convertedRow = AvroUtils.convertGenericRecordToTableRow(record, tableSchema);
        TableRow row = new TableRow().set("number", "5").set("birthday", "1970-01-01 00:00:00.000005 UTC").set("quality", 5.0).set("associates", new ArrayList<TableRow>()).set("flighted", Boolean.TRUE);
        assertEquals(row, convertedRow);
    }
    {
        // Test repeated fields.
        Schema subBirdSchema = AvroCoder.of(Bird.SubBird.class).getSchema();
        GenericRecord nestedRecord = new GenericData.Record(subBirdSchema);
        nestedRecord.put("species", "other");
        GenericRecord record = new GenericData.Record(avroSchema);
        record.put("number", 5L);
        record.put("associates", Lists.<GenericRecord>newArrayList(nestedRecord));
        TableRow convertedRow = AvroUtils.convertGenericRecordToTableRow(record, tableSchema);
        TableRow row = new TableRow().set("associates", Lists.<TableRow>newArrayList(new TableRow().set("species", "other"))).set("number", "5");
        assertEquals(row, convertedRow);
    }
}

15. BigQueryIOTest#testBuildSinkWithSchema()

Project: incubator-beam
File: BigQueryIOTest.java
@Test
public void testBuildSinkWithSchema() {
    TableSchema schema = new TableSchema();
    BigQueryIO.Write.Bound bound = BigQueryIO.Write.to("foo.com:project:somedataset.sometable").withSchema(schema);
    checkWriteObject(bound, "foo.com:project", "somedataset", "sometable", schema, CreateDisposition.CREATE_IF_NEEDED, WriteDisposition.WRITE_EMPTY);
}

16. WindowedWordCount#getSchema()

Project: incubator-beam
File: WindowedWordCount.java
/**
   * Helper method that defines the BigQuery schema used for the output.
   */
private static TableSchema getSchema() {
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("word").setType("STRING"));
    fields.add(new TableFieldSchema().setName("count").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("window_timestamp").setType("TIMESTAMP"));
    TableSchema schema = new TableSchema().setFields(fields);
    return schema;
}

17. FilterExamples#buildWeatherSchemaProjection()

Project: incubator-beam
File: FilterExamples.java
/**
   * Helper method to build the table schema for the output table.
   */
private static TableSchema buildWeatherSchemaProjection() {
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("year").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("month").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("day").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("mean_temp").setType("FLOAT"));
    TableSchema schema = new TableSchema().setFields(fields);
    return schema;
}

18. BigQueryIOTest#testBuildSinkWithSchema()

Project: DataflowJavaSDK
File: BigQueryIOTest.java
@Test
public void testBuildSinkWithSchema() {
    TableSchema schema = new TableSchema();
    BigQueryIO.Write.Bound bound = BigQueryIO.Write.named("WriteMyTable").to("foo.com:project:somedataset.sometable").withSchema(schema);
    checkWriteObject(bound, "foo.com:project", "somedataset", "sometable", schema, CreateDisposition.CREATE_IF_NEEDED, WriteDisposition.WRITE_EMPTY);
}

19. WindowedWordCount#getSchema()

Project: DataflowJavaSDK
File: WindowedWordCount.java
/**
   * Helper method that defines the BigQuery schema used for the output.
   */
private static TableSchema getSchema() {
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("word").setType("STRING"));
    fields.add(new TableFieldSchema().setName("count").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("window_timestamp").setType("TIMESTAMP"));
    TableSchema schema = new TableSchema().setFields(fields);
    return schema;
}

20. FilterExamples#buildWeatherSchemaProjection()

Project: DataflowJavaSDK
File: FilterExamples.java
/**
   * Helper method to build the table schema for the output table.
   */
private static TableSchema buildWeatherSchemaProjection() {
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("year").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("month").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("day").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("mean_temp").setType("FLOAT"));
    TableSchema schema = new TableSchema().setFields(fields);
    return schema;
}