Here are the examples of the java api class com.google.api.services.dataflow.model.JobMetrics taken from open source projects.
1. TestDataflowRunnerTest#generateMockMetricResponse()
Project: incubator-beam
File: TestDataflowRunnerTest.java
File: TestDataflowRunnerTest.java
private LowLevelHttpResponse generateMockMetricResponse(boolean success, boolean tentative) throws Exception { MetricStructuredName name = new MetricStructuredName(); name.setName(success ? "PAssertSuccess" : "PAssertFailure"); name.setContext(tentative ? ImmutableMap.of("tentative", "") : ImmutableMap.<String, String>of()); MetricUpdate metric = new MetricUpdate(); metric.setName(name); metric.setScalar(BigDecimal.ONE); MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); response.setContentType(Json.MEDIA_TYPE); JobMetrics jobMetrics = new JobMetrics(); jobMetrics.setMetrics(Lists.newArrayList(metric)); // N.B. Setting the factory is necessary in order to get valid JSON. jobMetrics.setFactory(Transport.getJsonFactory()); response.setContent(jobMetrics.toPrettyString()); return response; }
2. TestDataflowPipelineRunnerTest#generateMockMetricResponse()
Project: DataflowJavaSDK
File: TestDataflowPipelineRunnerTest.java
File: TestDataflowPipelineRunnerTest.java
private LowLevelHttpResponse generateMockMetricResponse(boolean success, boolean tentative) throws Exception { MetricStructuredName name = new MetricStructuredName(); name.setName(success ? "DataflowAssertSuccess" : "DataflowAssertFailure"); name.setContext(tentative ? ImmutableMap.of("tentative", "") : ImmutableMap.<String, String>of()); MetricUpdate metric = new MetricUpdate(); metric.setName(name); metric.setScalar(BigDecimal.ONE); MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); response.setContentType(Json.MEDIA_TYPE); JobMetrics jobMetrics = new JobMetrics(); jobMetrics.setMetrics(Lists.newArrayList(metric)); // N.B. Setting the factory is necessary in order to get valid JSON. jobMetrics.setFactory(Transport.getJsonFactory()); response.setContent(jobMetrics.toPrettyString()); return response; }
3. DataflowPipelineJobTest#testGetAggregatorValuesWithUnrelatedMetricUpdateIgnoresUpdate()
Project: incubator-beam
File: DataflowPipelineJobTest.java
File: DataflowPipelineJobTest.java
@Test public void testGetAggregatorValuesWithUnrelatedMetricUpdateIgnoresUpdate() throws IOException, AggregatorRetrievalException { CombineFn<Long, long[], Long> combineFn = new Sum.SumLongFn(); String aggregatorName = "agg"; Aggregator<Long, Long> aggregator = new TestAggregator<>(combineFn, aggregatorName); @SuppressWarnings("unchecked") PTransform<PInput, POutput> pTransform = mock(PTransform.class); String stepName = "s1"; String fullName = "Foo/Bar/Baz"; AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(), ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName)); GetMetrics getMetrics = mock(GetMetrics.class); when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics); JobMetrics jobMetrics = new JobMetrics(); when(getMetrics.execute()).thenReturn(jobMetrics); MetricUpdate ignoredUpdate = new MetricUpdate(); ignoredUpdate.setScalar(null); MetricStructuredName ignoredName = new MetricStructuredName(); ignoredName.setName("ignoredAggregator.elementCount.out0"); ignoredName.setContext(null); ignoredUpdate.setName(ignoredName); jobMetrics.setMetrics(ImmutableList.of(ignoredUpdate)); Get getState = mock(Get.class); when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState); Job modelJob = new Job(); when(getState.execute()).thenReturn(modelJob); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms); AggregatorValues<Long> values = job.getAggregatorValues(aggregator); assertThat(values.getValuesAtSteps().entrySet(), empty()); assertThat(values.getValues(), empty()); }
4. DataflowPipelineJobTest#testGetAggregatorValuesWithMultipleMetricUpdatesReturnsCollection()
Project: incubator-beam
File: DataflowPipelineJobTest.java
File: DataflowPipelineJobTest.java
@Test public void testGetAggregatorValuesWithMultipleMetricUpdatesReturnsCollection() throws IOException, AggregatorRetrievalException { CombineFn<Long, long[], Long> combineFn = new Sum.SumLongFn(); String aggregatorName = "agg"; Aggregator<Long, Long> aggregator = new TestAggregator<>(combineFn, aggregatorName); @SuppressWarnings("unchecked") PTransform<PInput, POutput> pTransform = mock(PTransform.class); String stepName = "s1"; String fullName = "Foo/Bar/Baz"; AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform); @SuppressWarnings("unchecked") PTransform<PInput, POutput> otherTransform = mock(PTransform.class); String otherStepName = "s88"; String otherFullName = "Spam/Ham/Eggs"; AppliedPTransform<?, ?, ?> otherAppliedTransform = appliedPTransform(otherFullName, otherTransform); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform, aggregator, otherTransform).asMap(), ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName, otherAppliedTransform, otherStepName)); GetMetrics getMetrics = mock(GetMetrics.class); when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics); JobMetrics jobMetrics = new JobMetrics(); when(getMetrics.execute()).thenReturn(jobMetrics); MetricUpdate updateOne = new MetricUpdate(); long stepValue = 1234L; updateOne.setScalar(new BigDecimal(stepValue)); MetricStructuredName structuredNameOne = new MetricStructuredName(); structuredNameOne.setName(aggregatorName); structuredNameOne.setContext(ImmutableMap.of("step", stepName)); updateOne.setName(structuredNameOne); MetricUpdate updateTwo = new MetricUpdate(); long stepValueTwo = 1024L; updateTwo.setScalar(new BigDecimal(stepValueTwo)); MetricStructuredName structuredNameTwo = new MetricStructuredName(); structuredNameTwo.setName(aggregatorName); structuredNameTwo.setContext(ImmutableMap.of("step", otherStepName)); updateTwo.setName(structuredNameTwo); jobMetrics.setMetrics(ImmutableList.of(updateOne, updateTwo)); Get getState = mock(Get.class); when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState); Job modelJob = new Job(); when(getState.execute()).thenReturn(modelJob); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms); AggregatorValues<Long> values = job.getAggregatorValues(aggregator); assertThat(values.getValuesAtSteps(), hasEntry(fullName, stepValue)); assertThat(values.getValuesAtSteps(), hasEntry(otherFullName, stepValueTwo)); assertThat(values.getValuesAtSteps().size(), equalTo(2)); assertThat(values.getValues(), containsInAnyOrder(stepValue, stepValueTwo)); assertThat(values.getTotalValue(combineFn), equalTo(Long.valueOf(stepValue + stepValueTwo))); }
5. DataflowPipelineJobTest#testGetAggregatorValuesWithSingleMetricUpdateReturnsSingletonCollection()
Project: incubator-beam
File: DataflowPipelineJobTest.java
File: DataflowPipelineJobTest.java
@Test public void testGetAggregatorValuesWithSingleMetricUpdateReturnsSingletonCollection() throws IOException, AggregatorRetrievalException { CombineFn<Long, long[], Long> combineFn = new Sum.SumLongFn(); String aggregatorName = "agg"; Aggregator<Long, Long> aggregator = new TestAggregator<>(combineFn, aggregatorName); @SuppressWarnings("unchecked") PTransform<PInput, POutput> pTransform = mock(PTransform.class); String stepName = "s1"; String fullName = "Foo/Bar/Baz"; AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(), ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName)); GetMetrics getMetrics = mock(GetMetrics.class); when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics); JobMetrics jobMetrics = new JobMetrics(); when(getMetrics.execute()).thenReturn(jobMetrics); MetricUpdate update = new MetricUpdate(); long stepValue = 1234L; update.setScalar(new BigDecimal(stepValue)); MetricStructuredName structuredName = new MetricStructuredName(); structuredName.setName(aggregatorName); structuredName.setContext(ImmutableMap.of("step", stepName)); update.setName(structuredName); jobMetrics.setMetrics(ImmutableList.of(update)); Get getState = mock(Get.class); when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState); Job modelJob = new Job(); when(getState.execute()).thenReturn(modelJob); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms); AggregatorValues<Long> values = job.getAggregatorValues(aggregator); assertThat(values.getValuesAtSteps(), hasEntry(fullName, stepValue)); assertThat(values.getValuesAtSteps().size(), equalTo(1)); assertThat(values.getValues(), contains(stepValue)); assertThat(values.getTotalValue(combineFn), equalTo(Long.valueOf(stepValue))); }
6. DataflowPipelineJobTest#testGetAggregatorValuesWithNullMetricUpdatesReturnsEmptyValue()
Project: incubator-beam
File: DataflowPipelineJobTest.java
File: DataflowPipelineJobTest.java
@Test public void testGetAggregatorValuesWithNullMetricUpdatesReturnsEmptyValue() throws IOException, AggregatorRetrievalException { Aggregator<?, ?> aggregator = mock(Aggregator.class); @SuppressWarnings("unchecked") PTransform<PInput, POutput> pTransform = mock(PTransform.class); String stepName = "s1"; String fullName = "Foo/Bar/Baz"; AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(), ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName)); GetMetrics getMetrics = mock(GetMetrics.class); when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics); JobMetrics jobMetrics = new JobMetrics(); when(getMetrics.execute()).thenReturn(jobMetrics); jobMetrics.setMetrics(null); Get getState = mock(Get.class); when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState); Job modelJob = new Job(); when(getState.execute()).thenReturn(modelJob); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms); AggregatorValues<?> values = job.getAggregatorValues(aggregator); assertThat(values.getValues(), empty()); }
7. DataflowPipelineJobTest#testGetAggregatorValuesWithNoMetricUpdatesReturnsEmptyValue()
Project: incubator-beam
File: DataflowPipelineJobTest.java
File: DataflowPipelineJobTest.java
@Test public void testGetAggregatorValuesWithNoMetricUpdatesReturnsEmptyValue() throws IOException, AggregatorRetrievalException { Aggregator<?, ?> aggregator = mock(Aggregator.class); @SuppressWarnings("unchecked") PTransform<PInput, POutput> pTransform = mock(PTransform.class); String stepName = "s1"; String fullName = "Foo/Bar/Baz"; AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(), ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName)); GetMetrics getMetrics = mock(GetMetrics.class); when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics); JobMetrics jobMetrics = new JobMetrics(); when(getMetrics.execute()).thenReturn(jobMetrics); jobMetrics.setMetrics(ImmutableList.<MetricUpdate>of()); Get getState = mock(Get.class); when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState); Job modelJob = new Job(); when(getState.execute()).thenReturn(modelJob); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms); AggregatorValues<?> values = job.getAggregatorValues(aggregator); assertThat(values.getValues(), empty()); }
8. DataflowPipelineJobTest#testGetAggregatorValuesWithUnrelatedMetricUpdateIgnoresUpdate()
Project: DataflowJavaSDK
File: DataflowPipelineJobTest.java
File: DataflowPipelineJobTest.java
@Test public void testGetAggregatorValuesWithUnrelatedMetricUpdateIgnoresUpdate() throws IOException, AggregatorRetrievalException { CombineFn<Long, long[], Long> combineFn = new Sum.SumLongFn(); String aggregatorName = "agg"; Aggregator<Long, Long> aggregator = new TestAggregator<>(combineFn, aggregatorName); @SuppressWarnings("unchecked") PTransform<PInput, POutput> pTransform = mock(PTransform.class); String stepName = "s1"; String fullName = "Foo/Bar/Baz"; AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(), ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName)); GetMetrics getMetrics = mock(GetMetrics.class); when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics); JobMetrics jobMetrics = new JobMetrics(); when(getMetrics.execute()).thenReturn(jobMetrics); MetricUpdate ignoredUpdate = new MetricUpdate(); ignoredUpdate.setScalar(null); MetricStructuredName ignoredName = new MetricStructuredName(); ignoredName.setName("ignoredAggregator.elementCount.out0"); ignoredName.setContext(null); ignoredUpdate.setName(ignoredName); jobMetrics.setMetrics(ImmutableList.of(ignoredUpdate)); Get getState = mock(Get.class); when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState); Job modelJob = new Job(); when(getState.execute()).thenReturn(modelJob); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms); AggregatorValues<Long> values = job.getAggregatorValues(aggregator); assertThat(values.getValuesAtSteps().entrySet(), empty()); assertThat(values.getValues(), empty()); }
9. DataflowPipelineJobTest#testGetAggregatorValuesWithMultipleMetricUpdatesReturnsCollection()
Project: DataflowJavaSDK
File: DataflowPipelineJobTest.java
File: DataflowPipelineJobTest.java
@Test public void testGetAggregatorValuesWithMultipleMetricUpdatesReturnsCollection() throws IOException, AggregatorRetrievalException { CombineFn<Long, long[], Long> combineFn = new Sum.SumLongFn(); String aggregatorName = "agg"; Aggregator<Long, Long> aggregator = new TestAggregator<>(combineFn, aggregatorName); @SuppressWarnings("unchecked") PTransform<PInput, POutput> pTransform = mock(PTransform.class); String stepName = "s1"; String fullName = "Foo/Bar/Baz"; AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform); @SuppressWarnings("unchecked") PTransform<PInput, POutput> otherTransform = mock(PTransform.class); String otherStepName = "s88"; String otherFullName = "Spam/Ham/Eggs"; AppliedPTransform<?, ?, ?> otherAppliedTransform = appliedPTransform(otherFullName, otherTransform); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform, aggregator, otherTransform).asMap(), ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName, otherAppliedTransform, otherStepName)); GetMetrics getMetrics = mock(GetMetrics.class); when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics); JobMetrics jobMetrics = new JobMetrics(); when(getMetrics.execute()).thenReturn(jobMetrics); MetricUpdate updateOne = new MetricUpdate(); long stepValue = 1234L; updateOne.setScalar(new BigDecimal(stepValue)); MetricStructuredName structuredNameOne = new MetricStructuredName(); structuredNameOne.setName(aggregatorName); structuredNameOne.setContext(ImmutableMap.of("step", stepName)); updateOne.setName(structuredNameOne); MetricUpdate updateTwo = new MetricUpdate(); long stepValueTwo = 1024L; updateTwo.setScalar(new BigDecimal(stepValueTwo)); MetricStructuredName structuredNameTwo = new MetricStructuredName(); structuredNameTwo.setName(aggregatorName); structuredNameTwo.setContext(ImmutableMap.of("step", otherStepName)); updateTwo.setName(structuredNameTwo); jobMetrics.setMetrics(ImmutableList.of(updateOne, updateTwo)); Get getState = mock(Get.class); when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState); Job modelJob = new Job(); when(getState.execute()).thenReturn(modelJob); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms); AggregatorValues<Long> values = job.getAggregatorValues(aggregator); assertThat(values.getValuesAtSteps(), hasEntry(fullName, stepValue)); assertThat(values.getValuesAtSteps(), hasEntry(otherFullName, stepValueTwo)); assertThat(values.getValuesAtSteps().size(), equalTo(2)); assertThat(values.getValues(), containsInAnyOrder(stepValue, stepValueTwo)); assertThat(values.getTotalValue(combineFn), equalTo(Long.valueOf(stepValue + stepValueTwo))); }
10. DataflowPipelineJobTest#testGetAggregatorValuesWithSingleMetricUpdateReturnsSingletonCollection()
Project: DataflowJavaSDK
File: DataflowPipelineJobTest.java
File: DataflowPipelineJobTest.java
@Test public void testGetAggregatorValuesWithSingleMetricUpdateReturnsSingletonCollection() throws IOException, AggregatorRetrievalException { CombineFn<Long, long[], Long> combineFn = new Sum.SumLongFn(); String aggregatorName = "agg"; Aggregator<Long, Long> aggregator = new TestAggregator<>(combineFn, aggregatorName); @SuppressWarnings("unchecked") PTransform<PInput, POutput> pTransform = mock(PTransform.class); String stepName = "s1"; String fullName = "Foo/Bar/Baz"; AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(), ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName)); GetMetrics getMetrics = mock(GetMetrics.class); when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics); JobMetrics jobMetrics = new JobMetrics(); when(getMetrics.execute()).thenReturn(jobMetrics); MetricUpdate update = new MetricUpdate(); long stepValue = 1234L; update.setScalar(new BigDecimal(stepValue)); MetricStructuredName structuredName = new MetricStructuredName(); structuredName.setName(aggregatorName); structuredName.setContext(ImmutableMap.of("step", stepName)); update.setName(structuredName); jobMetrics.setMetrics(ImmutableList.of(update)); Get getState = mock(Get.class); when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState); Job modelJob = new Job(); when(getState.execute()).thenReturn(modelJob); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms); AggregatorValues<Long> values = job.getAggregatorValues(aggregator); assertThat(values.getValuesAtSteps(), hasEntry(fullName, stepValue)); assertThat(values.getValuesAtSteps().size(), equalTo(1)); assertThat(values.getValues(), contains(stepValue)); assertThat(values.getTotalValue(combineFn), equalTo(Long.valueOf(stepValue))); }
11. DataflowPipelineJobTest#testGetAggregatorValuesWithNullMetricUpdatesReturnsEmptyValue()
Project: DataflowJavaSDK
File: DataflowPipelineJobTest.java
File: DataflowPipelineJobTest.java
@Test public void testGetAggregatorValuesWithNullMetricUpdatesReturnsEmptyValue() throws IOException, AggregatorRetrievalException { Aggregator<?, ?> aggregator = mock(Aggregator.class); @SuppressWarnings("unchecked") PTransform<PInput, POutput> pTransform = mock(PTransform.class); String stepName = "s1"; String fullName = "Foo/Bar/Baz"; AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(), ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName)); GetMetrics getMetrics = mock(GetMetrics.class); when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics); JobMetrics jobMetrics = new JobMetrics(); when(getMetrics.execute()).thenReturn(jobMetrics); jobMetrics.setMetrics(null); Get getState = mock(Get.class); when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState); Job modelJob = new Job(); when(getState.execute()).thenReturn(modelJob); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms); AggregatorValues<?> values = job.getAggregatorValues(aggregator); assertThat(values.getValues(), empty()); }
12. DataflowPipelineJobTest#testGetAggregatorValuesWithNoMetricUpdatesReturnsEmptyValue()
Project: DataflowJavaSDK
File: DataflowPipelineJobTest.java
File: DataflowPipelineJobTest.java
@Test public void testGetAggregatorValuesWithNoMetricUpdatesReturnsEmptyValue() throws IOException, AggregatorRetrievalException { Aggregator<?, ?> aggregator = mock(Aggregator.class); @SuppressWarnings("unchecked") PTransform<PInput, POutput> pTransform = mock(PTransform.class); String stepName = "s1"; String fullName = "Foo/Bar/Baz"; AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(), ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName)); GetMetrics getMetrics = mock(GetMetrics.class); when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics); JobMetrics jobMetrics = new JobMetrics(); when(getMetrics.execute()).thenReturn(jobMetrics); jobMetrics.setMetrics(ImmutableList.<MetricUpdate>of()); Get getState = mock(Get.class); when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState); Job modelJob = new Job(); when(getState.execute()).thenReturn(modelJob); modelJob.setCurrentState(State.RUNNING.toString()); DataflowPipelineJob job = new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms); AggregatorValues<?> values = job.getAggregatorValues(aggregator); assertThat(values.getValues(), empty()); }
13. TestDataflowRunner#checkForSuccess()
Project: incubator-beam
File: TestDataflowRunner.java
File: TestDataflowRunner.java
Optional<Boolean> checkForSuccess(DataflowPipelineJob job) throws IOException { State state = job.getState(); if (state == State.FAILED || state == State.CANCELLED) { LOG.info("The pipeline failed"); return Optional.of(false); } JobMetrics metrics = job.getDataflowClient().projects().jobs().getMetrics(job.getProjectId(), job.getJobId()).execute(); if (metrics == null || metrics.getMetrics() == null) { LOG.warn("Metrics not present for Dataflow job {}.", job.getJobId()); } else { int successes = 0; int failures = 0; for (MetricUpdate metric : metrics.getMetrics()) { if (metric.getName() == null || metric.getName().getContext() == null || !metric.getName().getContext().containsKey(TENTATIVE_COUNTER)) { // Don't double count using the non-tentative version of the metric. continue; } if (PAssert.SUCCESS_COUNTER.equals(metric.getName().getName())) { successes += ((BigDecimal) metric.getScalar()).intValue(); } else if (PAssert.FAILURE_COUNTER.equals(metric.getName().getName())) { failures += ((BigDecimal) metric.getScalar()).intValue(); } } if (failures > 0) { LOG.info("Found result while running Dataflow job {}. Found {} success, {} failures out of " + "{} expected assertions.", job.getJobId(), successes, failures, expectedNumberOfAssertions); return Optional.of(false); } else if (successes >= expectedNumberOfAssertions) { LOG.info("Found result while running Dataflow job {}. Found {} success, {} failures out of " + "{} expected assertions.", job.getJobId(), successes, failures, expectedNumberOfAssertions); return Optional.of(true); } LOG.info("Running Dataflow job {}. Found {} success, {} failures out of {} expected " + "assertions.", job.getJobId(), successes, failures, expectedNumberOfAssertions); } return Optional.<Boolean>absent(); }
14. TestDataflowPipelineRunner#checkForSuccess()
Project: DataflowJavaSDK
File: TestDataflowPipelineRunner.java
File: TestDataflowPipelineRunner.java
Optional<Boolean> checkForSuccess(DataflowPipelineJob job) throws IOException { State state = job.getState(); if (state == State.FAILED || state == State.CANCELLED) { LOG.info("The pipeline failed"); return Optional.of(false); } JobMetrics metrics = job.getDataflowClient().projects().jobs().getMetrics(job.getProjectId(), job.getJobId()).execute(); if (metrics == null || metrics.getMetrics() == null) { LOG.warn("Metrics not present for Dataflow job {}.", job.getJobId()); } else { int successes = 0; int failures = 0; for (MetricUpdate metric : metrics.getMetrics()) { if (metric.getName() == null || metric.getName().getContext() == null || !metric.getName().getContext().containsKey(TENTATIVE_COUNTER)) { // Don't double count using the non-tentative version of the metric. continue; } if (DataflowAssert.SUCCESS_COUNTER.equals(metric.getName().getName())) { successes += ((BigDecimal) metric.getScalar()).intValue(); } else if (DataflowAssert.FAILURE_COUNTER.equals(metric.getName().getName())) { failures += ((BigDecimal) metric.getScalar()).intValue(); } } if (failures > 0) { LOG.info("Found result while running Dataflow job {}. Found {} success, {} failures out of " + "{} expected assertions.", job.getJobId(), successes, failures, expectedNumberOfAssertions); return Optional.of(false); } else if (successes >= expectedNumberOfAssertions) { LOG.info("Found result while running Dataflow job {}. Found {} success, {} failures out of " + "{} expected assertions.", job.getJobId(), successes, failures, expectedNumberOfAssertions); return Optional.of(true); } LOG.info("Running Dataflow job {}. Found {} success, {} failures out of {} expected " + "assertions.", job.getJobId(), successes, failures, expectedNumberOfAssertions); } return Optional.<Boolean>absent(); }