Here are the examples of the java api class com.google.api.services.bigquery.model.JobStatistics2 taken from open source projects.
1. BigQueryIOTest#testBigQueryQuerySourceInitSplit()
Project: incubator-beam
File: BigQueryIOTest.java
File: BigQueryIOTest.java
@Test public void testBigQueryQuerySourceInitSplit() throws Exception { TableReference dryRunTable = new TableReference(); Job queryJob = new Job(); JobStatistics queryJobStats = new JobStatistics(); JobStatistics2 queryStats = new JobStatistics2(); queryStats.setReferencedTables(ImmutableList.of(dryRunTable)); queryJobStats.setQuery(queryStats); queryJob.setStatus(new JobStatus()).setStatistics(queryJobStats); Job extractJob = new Job(); JobStatistics extractJobStats = new JobStatistics(); JobStatistics4 extractStats = new JobStatistics4(); extractStats.setDestinationUriFileCounts(ImmutableList.of(1L)); extractJobStats.setExtract(extractStats); extractJob.setStatus(new JobStatus()).setStatistics(extractJobStats); FakeBigQueryServices fakeBqServices = new FakeBigQueryServices().withJobService(mockJobService).withDatasetService(mockDatasetService).readerReturns(toJsonString(new TableRow().set("name", "a").set("number", "1")), toJsonString(new TableRow().set("name", "b").set("number", "2")), toJsonString(new TableRow().set("name", "c").set("number", "3"))); String jobIdToken = "testJobIdToken"; String extractDestinationDir = "mock://tempLocation"; TableReference destinationTable = BigQueryIO.parseTableSpec("project:data_set.table_name"); BoundedSource<TableRow> bqSource = BigQueryQuerySource.create(jobIdToken, "query", destinationTable, true, /* flattenResults */ extractDestinationDir, fakeBqServices); List<TableRow> expected = ImmutableList.of(new TableRow().set("name", "a").set("number", "1"), new TableRow().set("name", "b").set("number", "2"), new TableRow().set("name", "c").set("number", "3")); PipelineOptions options = PipelineOptionsFactory.create(); options.setTempLocation(extractDestinationDir); TableReference queryTable = new TableReference().setProjectId("testProejct").setDatasetId("testDataset").setTableId("testTable"); when(mockJobService.dryRunQuery(anyString(), anyString())).thenReturn(new JobStatistics().setQuery(new JobStatistics2().setTotalBytesProcessed(100L).setReferencedTables(ImmutableList.of(queryTable)))); when(mockDatasetService.getTable(eq(queryTable.getProjectId()), eq(queryTable.getDatasetId()), eq(queryTable.getTableId()))).thenReturn(new Table().setSchema(new TableSchema())); when(mockDatasetService.getTable(eq(destinationTable.getProjectId()), eq(destinationTable.getDatasetId()), eq(destinationTable.getTableId()))).thenReturn(new Table().setSchema(new TableSchema())); IOChannelUtils.setIOFactory("mock", mockIOChannelFactory); when(mockIOChannelFactory.resolve(anyString(), anyString())).thenReturn("mock://tempLocation/output"); when(mockJobService.pollJob(Mockito.<JobReference>any(), Mockito.anyInt())).thenReturn(extractJob); Assert.assertThat(SourceTestUtils.readFromSource(bqSource, options), CoreMatchers.is(expected)); SourceTestUtils.assertSplitAtFractionBehavior(bqSource, 2, 0.3, ExpectedSplitOutcome.MUST_BE_CONSISTENT_IF_SUCCEEDS, options); List<? extends BoundedSource<TableRow>> sources = bqSource.splitIntoBundles(100, options); assertEquals(1, sources.size()); BoundedSource<TableRow> actual = sources.get(0); assertThat(actual, CoreMatchers.instanceOf(TransformingSource.class)); Mockito.verify(mockJobService).startQueryJob(Mockito.<JobReference>any(), Mockito.<JobConfigurationQuery>any()); Mockito.verify(mockJobService).startExtractJob(Mockito.<JobReference>any(), Mockito.<JobConfigurationExtract>any()); Mockito.verify(mockDatasetService).createDataset(anyString(), anyString(), anyString(), anyString()); }
2. BigQueryIOTest#testBigQueryQuerySourceInitSplit()
Project: DataflowJavaSDK
File: BigQueryIOTest.java
File: BigQueryIOTest.java
@Test public void testBigQueryQuerySourceInitSplit() throws Exception { TableReference dryRunTable = new TableReference(); Job queryJob = new Job(); JobStatistics queryJobStats = new JobStatistics(); JobStatistics2 queryStats = new JobStatistics2(); queryStats.setReferencedTables(ImmutableList.of(dryRunTable)); queryJobStats.setQuery(queryStats); queryJob.setStatus(new JobStatus()).setStatistics(queryJobStats); Job extractJob = new Job(); JobStatistics extractJobStats = new JobStatistics(); JobStatistics4 extractStats = new JobStatistics4(); extractStats.setDestinationUriFileCounts(ImmutableList.of(1L)); extractJobStats.setExtract(extractStats); extractJob.setStatus(new JobStatus()).setStatistics(extractJobStats); FakeBigQueryServices fakeBqServices = new FakeBigQueryServices().withJobService(mockJobService).withDatasetService(mockDatasetService).readerReturns(toJsonString(new TableRow().set("name", "a").set("number", "1")), toJsonString(new TableRow().set("name", "b").set("number", "2")), toJsonString(new TableRow().set("name", "c").set("number", "3"))); String jobIdToken = "testJobIdToken"; String extractDestinationDir = "mock://tempLocation"; TableReference destinationTable = BigQueryIO.parseTableSpec("project:data_set.table_name"); BoundedSource<TableRow> bqSource = BigQueryQuerySource.create(jobIdToken, "query", destinationTable, true, /* flattenResults */ extractDestinationDir, fakeBqServices); List<TableRow> expected = ImmutableList.of(new TableRow().set("name", "a").set("number", "1"), new TableRow().set("name", "b").set("number", "2"), new TableRow().set("name", "c").set("number", "3")); PipelineOptions options = PipelineOptionsFactory.create(); options.setTempLocation(extractDestinationDir); TableReference queryTable = new TableReference().setProjectId("testProejct").setDatasetId("testDataset").setTableId("testTable"); when(mockJobService.dryRunQuery(anyString(), anyString())).thenReturn(new JobStatistics().setQuery(new JobStatistics2().setTotalBytesProcessed(100L).setReferencedTables(ImmutableList.of(queryTable)))); when(mockDatasetService.getTable(eq(queryTable.getProjectId()), eq(queryTable.getDatasetId()), eq(queryTable.getTableId()))).thenReturn(new Table().setSchema(new TableSchema())); when(mockDatasetService.getTable(eq(destinationTable.getProjectId()), eq(destinationTable.getDatasetId()), eq(destinationTable.getTableId()))).thenReturn(new Table().setSchema(new TableSchema())); IOChannelUtils.setIOFactory("mock", mockIOChannelFactory); when(mockIOChannelFactory.resolve(anyString(), anyString())).thenReturn("mock://tempLocation/output"); when(mockJobService.pollJob(Mockito.<JobReference>any(), Mockito.anyInt())).thenReturn(extractJob); Assert.assertThat(SourceTestUtils.readFromSource(bqSource, options), CoreMatchers.is(expected)); SourceTestUtils.assertSplitAtFractionBehavior(bqSource, 2, 0.3, ExpectedSplitOutcome.MUST_BE_CONSISTENT_IF_SUCCEEDS, options); List<? extends BoundedSource<TableRow>> sources = bqSource.splitIntoBundles(100, options); assertEquals(1, sources.size()); BoundedSource<TableRow> actual = sources.get(0); assertThat(actual, CoreMatchers.instanceOf(TransformingSource.class)); Mockito.verify(mockJobService).startQueryJob(Mockito.<JobReference>any(), Mockito.<JobConfigurationQuery>any()); Mockito.verify(mockJobService).startExtractJob(Mockito.<JobReference>any(), Mockito.<JobConfigurationExtract>any()); Mockito.verify(mockDatasetService).createDataset(anyString(), anyString(), anyString(), anyString()); }