See 
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/1126/display/redirect>

------------------------------------------
[...truncated 345.18 KB...]
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "_equal"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": 
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                  "component_encodings": [
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                      "component_encodings": []
                    },
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                      "component_encodings": []
                    }
                  ],
                  "is_pair_like": true
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "out",
            "user_name": "assert_that/Match.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s10"
        },
        "serialized_fn": 
"eNq9VOt31EQUT7JbKIEWaRWK+FhQNFXYKPhA5L1tKSwsNdTugNY4SWY3afO6kwntnrN7DsrZnv4NfvCTf6h3ZrfUivDRk5Nk7us3M/f+7n1esXyaUz9krsdoUhecpkUn40lR9zPOzAaNY+rFrM1pnjO+kC2lJmjzv4E+AMMiFU3T3E4KFT+I4rjuyq/p+pxRwdxOmfoiyjCgah2wxxkNXNHLmQkTZBIhGlnAVlGGQ0M47MCk1dSbGr5Gc7Zh7mp97YXe1R9rcKQ1BHOe6BiyDUeHcIwUuLTDLGH2Bks3o7TY+18sYvqM2VsZ3yzwfsyW13NXskI0siSJhLvSE2GWXnbXGI86Pbvgvl0Em4WdK739j6TY+0mxZVLqeQ+m1LmvxTTxAnoDph9OTTQ0OE4M1GI+3hrCiXkBMw7MHrh5lwmXCsFNeFsBeGUUCzwtvEMOo4hmaYWTO3DKgbkDoVGSZ1y4SRaUMSbuNDmDAW8oHbw7hDMOvKf2cRHEF64L7+/ABw58SA5JJYOSxlBr/VfxfIYCnA2rVjgux0RzBsvxl9B2sRwDvXdB6GplCF0WaFDpG/3KZoVfEkagy3XHOIn63/W2lm5UNVHZNPmfuiaf7Vt9bUFbvzKo9qb7+ka1X32h61pbg/voN4F+z0d+EkmWfw/pCXoQfNtaX+d/vGpNDaIFGhLlXIucwhsu0ShmQY0WBePiau08r12/jl/4aAc+tkgVPeKoEHBepaPA9LIAPiGzKNzBjN5WYYvbPssljeFTcgQtkqeLnGccLBXGWZI9YzBPTBTWaFyOrZ8J+HzkQX0h83yBTKHAtnPm4z6u2vkiOf5yZ3fPBHXlOdaOo21FEBazhKUCvhDwJcn/F+6zAgnatUsRxZL4l8Jac6txTtMnJwx9Uj2GPmNM6yb+p/Ex9Dm9il+4rJj38lJfDeFrbIlvHPg2nAtPk7l/03e0UV1uBFeG8J0DV0Ok6/cOXAtrrfDsOly3BnBjlNO0TDzG4WZTVzOowKa5pVbU8+F2OYA7VnizaYQjZcA60CjZEBYcWBzC0gDukqOyA+QQcsMoFQUsH5yDaFD6esCwm6jIeGHeeySLvyzVJtzDIXi/NYCmpaCiNC+FwivgQYscQ1VWin3dw1a5Ay0Pa/7IgZUh/OCAM4THA1i1wuVQgv2IYGtW+KAVKt+2Nzoi5d0CUyinLAlXSgFPHHiqyJDzzGdFAT+FT1+5zc8Kch0hf9mHdL3SW4dfB0DXwXvj2G9HaZBtYTlM8BEnGACzVDm3lAHP0nld/MjDvBtnHo1HOJitLqKEqn8Ej7pdxhEieh3E2MVcYB1axmJ1LMIGgmySE6ql/DIpYyr7Uo5EBjEyYUbCRwkSiSa562eJF6VIkgRNKj9R4QYjSEh3S09AVv8b55Q1aQ==",
        "user_name": "assert_that/Match"
      }
    }
  ],
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: '2019-06-13T18:59:16.799337Z'
 currentStateTime: '1970-01-01T00:00:00Z'
 id: '2019-06-13_11_59_15-13502679819294419411'
 location: 'us-central1'
 name: 'beamapp-jenkins-0613185907-136820'
 projectId: 'apache-beam-testing'
 stageStates: []
 startTime: '2019-06-13T18:59:16.799337Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2019-06-13_11_59_15-13502679819294419411]
root: INFO: To access the Dataflow monitoring console, please navigate to 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_59_15-13502679819294419411?project=apache-beam-testing
root: INFO: Job 2019-06-13_11_59_15-13502679819294419411 is in state 
JOB_STATE_RUNNING
root: INFO: 2019-06-13T18:59:15.693Z: JOB_MESSAGE_DETAILED: Autoscaling is 
enabled for job 2019-06-13_11_59_15-13502679819294419411. The number of workers 
will be between 1 and 1000.
root: INFO: 2019-06-13T18:59:15.741Z: JOB_MESSAGE_DETAILED: Autoscaling was 
automatically enabled for job 2019-06-13_11_59_15-13502679819294419411.
root: INFO: 2019-06-13T18:59:18.924Z: JOB_MESSAGE_DETAILED: Checking 
permissions granted to controller Service Account.
root: INFO: 2019-06-13T18:59:19.544Z: JOB_MESSAGE_BASIC: Worker configuration: 
n1-standard-1 in us-central1-a.
root: INFO: 2019-06-13T18:59:20.108Z: JOB_MESSAGE_DETAILED: Expanding 
CoGroupByKey operations into optimizable parts.
root: INFO: 2019-06-13T18:59:20.150Z: JOB_MESSAGE_DEBUG: Combiner lifting 
skipped for step assert_that/Group/GroupByKey: GroupByKey not followed by a 
combiner.
root: INFO: 2019-06-13T18:59:20.191Z: JOB_MESSAGE_DETAILED: Expanding 
GroupByKey operations into optimizable parts.
root: INFO: 2019-06-13T18:59:20.232Z: JOB_MESSAGE_DETAILED: Lifting 
ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2019-06-13T18:59:20.325Z: JOB_MESSAGE_DEBUG: Annotating graph with 
Autotuner information.
root: INFO: 2019-06-13T18:59:20.580Z: JOB_MESSAGE_DETAILED: Fusing adjacent 
ParDo, Read, Write, and Flatten operations
root: INFO: 2019-06-13T18:59:20.624Z: JOB_MESSAGE_DETAILED: Unzipping flatten 
s7 for input s5.out
root: INFO: 2019-06-13T18:59:20.669Z: JOB_MESSAGE_DETAILED: Fusing unzipped 
copy of assert_that/Group/GroupByKey/Reify, through flatten 
assert_that/Group/Flatten, into producer assert_that/Group/pair_with_0
root: INFO: 2019-06-13T18:59:20.713Z: JOB_MESSAGE_DETAILED: Fusing consumer 
assert_that/Group/GroupByKey/Reify into assert_that/Group/pair_with_1
root: INFO: 2019-06-13T18:59:20.751Z: JOB_MESSAGE_DETAILED: Unzipping flatten 
s7-u13 for input s8-reify-value0-c11
root: INFO: 2019-06-13T18:59:20.800Z: JOB_MESSAGE_DETAILED: Fusing unzipped 
copy of assert_that/Group/GroupByKey/Write, through flatten 
assert_that/Group/Flatten/Unzipped-1, into producer 
assert_that/Group/GroupByKey/Reify
root: INFO: 2019-06-13T18:59:20.844Z: JOB_MESSAGE_DETAILED: Fusing consumer 
assert_that/Group/GroupByKey/Write into assert_that/Group/GroupByKey/Reify
root: INFO: 2019-06-13T18:59:20.883Z: JOB_MESSAGE_DETAILED: Fusing consumer 
assert_that/Match into assert_that/Unkey
root: INFO: 2019-06-13T18:59:20.912Z: JOB_MESSAGE_DETAILED: Fusing consumer 
assert_that/WindowInto(WindowIntoFn) into read
root: INFO: 2019-06-13T18:59:20.948Z: JOB_MESSAGE_DETAILED: Fusing consumer 
assert_that/Group/pair_with_1 into assert_that/ToVoidKey
root: INFO: 2019-06-13T18:59:20.993Z: JOB_MESSAGE_DETAILED: Fusing consumer 
assert_that/Unkey into assert_that/Group/Map(_merge_tagged_vals_under_key)
root: INFO: 2019-06-13T18:59:21.030Z: JOB_MESSAGE_DETAILED: Fusing consumer 
assert_that/Group/Map(_merge_tagged_vals_under_key) into 
assert_that/Group/GroupByKey/GroupByWindow
root: INFO: 2019-06-13T18:59:21.071Z: JOB_MESSAGE_DETAILED: Fusing consumer 
assert_that/ToVoidKey into assert_that/WindowInto(WindowIntoFn)
root: INFO: 2019-06-13T18:59:21.118Z: JOB_MESSAGE_DETAILED: Fusing consumer 
assert_that/Group/GroupByKey/GroupByWindow into 
assert_that/Group/GroupByKey/Read
root: INFO: 2019-06-13T18:59:21.160Z: JOB_MESSAGE_DETAILED: Fusing consumer 
assert_that/Group/pair_with_0 into assert_that/Create/Read
root: INFO: 2019-06-13T18:59:21.209Z: JOB_MESSAGE_DEBUG: Workflow config is 
missing a default resource spec.
root: INFO: 2019-06-13T18:59:21.253Z: JOB_MESSAGE_DEBUG: Adding StepResource 
setup and teardown to workflow graph.
root: INFO: 2019-06-13T18:59:21.300Z: JOB_MESSAGE_DEBUG: Adding workflow start 
and stop steps.
root: INFO: 2019-06-13T18:59:21.344Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
root: INFO: 2019-06-13T18:59:21.575Z: JOB_MESSAGE_DEBUG: Executing wait step 
start21
root: INFO: 2019-06-13T18:59:21.800Z: JOB_MESSAGE_BASIC: Executing operation 
assert_that/Group/GroupByKey/Create
root: INFO: 2019-06-13T18:59:21.849Z: JOB_MESSAGE_DEBUG: Starting worker pool 
setup.
root: INFO: 2019-06-13T18:59:21.885Z: JOB_MESSAGE_BASIC: Starting 1 workers in 
us-central1-a...
root: INFO: 2019-06-13T18:59:21.940Z: JOB_MESSAGE_BASIC: Finished operation 
assert_that/Group/GroupByKey/Create
root: INFO: 2019-06-13T18:59:22.040Z: JOB_MESSAGE_DEBUG: Value 
"assert_that/Group/GroupByKey/Session" materialized.
root: INFO: 2019-06-13T18:59:22.109Z: JOB_MESSAGE_BASIC: Executing operation 
read+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/pair_with_1+assert_that/Group/GroupByKey/Reify+assert_that/Group/GroupByKey/Write
root: INFO: 2019-06-13T18:59:22.156Z: JOB_MESSAGE_BASIC: Executing operation 
assert_that/Create/Read+assert_that/Group/pair_with_0+assert_that/Group/GroupByKey/Reify+assert_that/Group/GroupByKey/Write
root: INFO: 2019-06-13T18:59:22.183Z: JOB_MESSAGE_BASIC: BigQuery query issued 
as job: "dataflow_job_2784907202880642241". You can check its status with the 
bq tool: "bq show -j --project_id=apache-beam-testing 
dataflow_job_2784907202880642241".
root: INFO: 2019-06-13T19:01:14.111Z: JOB_MESSAGE_BASIC: BigQuery query 
completed, job : "dataflow_job_2784907202880642241"
root: INFO: 2019-06-13T19:01:14.455Z: JOB_MESSAGE_BASIC: BigQuery export job 
"dataflow_job_1516066595430977758" started. You can check its status with the 
bq tool: "bq show -j --project_id=apache-beam-testing 
dataflow_job_1516066595430977758".
root: INFO: 2019-06-13T19:01:44.774Z: JOB_MESSAGE_DETAILED: BigQuery export job 
progress: "dataflow_job_1516066595430977758" observed total of 1 exported files 
thus far.
root: INFO: 2019-06-13T19:01:44.815Z: JOB_MESSAGE_BASIC: BigQuery export job 
finished: "dataflow_job_1516066595430977758"
root: INFO: 2019-06-13T19:16:01.545Z: JOB_MESSAGE_ERROR: Startup of the worker 
pool in zone us-central1-a failed to bring up any of the desired 1 workers. 
INTERNAL_ERROR: Internal error. Please try again or contact Google Support. 
(Code: '6776894920780038994')
root: INFO: 2019-06-13T19:16:01.712Z: JOB_MESSAGE_ERROR: Workflow failed.
root: INFO: 2019-06-13T19:16:01.779Z: JOB_MESSAGE_BASIC: Finished operation 
read+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/pair_with_1+assert_that/Group/GroupByKey/Reify+assert_that/Group/GroupByKey/Write
root: INFO: 2019-06-13T19:16:01.801Z: JOB_MESSAGE_BASIC: Finished operation 
assert_that/Create/Read+assert_that/Group/pair_with_0+assert_that/Group/GroupByKey/Reify+assert_that/Group/GroupByKey/Write
root: INFO: 2019-06-13T19:16:01.944Z: JOB_MESSAGE_DETAILED: Cleaning up.
root: INFO: 2019-06-13T19:16:02.185Z: JOB_MESSAGE_DEBUG: Starting worker pool 
teardown.
root: INFO: 2019-06-13T19:16:02.218Z: JOB_MESSAGE_BASIC: Stopping worker pool...
root: INFO: 2019-06-13T19:16:20.660Z: JOB_MESSAGE_BASIC: Worker pool stopped.
root: INFO: 2019-06-13T19:16:20.708Z: JOB_MESSAGE_DEBUG: Tearing down pending 
resources...
root: INFO: Job 2019-06-13_11_59_15-13502679819294419411 is in state 
JOB_STATE_FAILED
root: INFO: Deleting dataset python_read_table_15604523455493 in project 
apache-beam-testing
--------------------- >> end captured logging << ---------------------
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1137:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_43_59-16364970736077324908?project=apache-beam-testing.
  method_to_use = self._compute_method(p, p.options)
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_59_15-13502679819294419411?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_16_42-871369653416432618?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_43_55-2504108007836455440?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_05_17-18426043707792705623?project=apache-beam-testing.
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:687:
 BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use 
WriteToBigQuery instead.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_13_42-7849352977642171075?project=apache-beam-testing.
  kms_key=transform.kms_key))
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_43_57-3116076711338048500?project=apache-beam-testing.
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1137:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_57_04-2379261609617930565?project=apache-beam-testing.
  method_to_use = self._compute_method(p, p.options)
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_04_29-7873545670290559640?project=apache-beam-testing.
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:687:
 BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use 
WriteToBigQuery instead.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_13_28-12839220612430454248?project=apache-beam-testing.
  kms_key=transform.kms_key))
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_20_57-2310782423160554178?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_43_53-15486744418608649967?project=apache-beam-testing.
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/io/fileio_test.py>:232:
 FutureWarning: MatchAll is experimental.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_08_56-9662886162960268515?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_18_52-9967468626120092378?project=apache-beam-testing.
  | 'GetPath' >> beam.Map(lambda metadata: metadata.path))
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/io/fileio_test.py>:243:
 FutureWarning: MatchAll is experimental.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_27_52-7134890599637993606?project=apache-beam-testing.
  | 'Checksums' >> beam.Map(compute_hash))
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/io/fileio_test.py>:243:
 FutureWarning: ReadMatches is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_43_53-7995424492233023600?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_53_21-14025036681842978677?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_02_07-10649549094607396983?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_13_23-12607831820243958363?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_21_43-438844323420589386?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_31_04-15138318081195443507?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_43_52-13195923421306876614?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_52_21-7121570418680863808?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_01_48-6706586218029410816?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_11_33-7303377283729418990?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_20_06-3210337629052883143?project=apache-beam-testing.
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:687:
 BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use 
WriteToBigQuery instead.
  kms_key=transform.kms_key))
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1137:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
  method_to_use = self._compute_method(p, p.options)
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:545:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
  or p.options.view_as(GoogleCloudOptions).temp_location)
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_43_55-9937530507829938822?project=apache-beam-testing.
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:687:
 BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use 
WriteToBigQuery instead.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_54_21-17060478202948206955?project=apache-beam-testing.
  kms_key=transform.kms_key))
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_02_13-6459040200144792245?project=apache-beam-testing.
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/io/gcp/big_query_query_to_table_pipeline.py>:73:
 BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use 
WriteToBigQuery instead.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_09_55-5878821088245327710?project=apache-beam-testing.
  kms_key=kms_key))
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_18_06-11469836639039290059?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_43_54-17407449781844798863?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_11_55_03-5107753182024514094?project=apache-beam-testing.
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1137:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
  method_to_use = self._compute_method(p, p.options)
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_06_00-17974390365975178847?project=apache-beam-testing.
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:545:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
  or p.options.view_as(GoogleCloudOptions).temp_location)
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-06-13_12_16_13-6548315903228987484?project=apache-beam-testing.

----------------------------------------------------------------------
XML: 
<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 42 tests in 3490.532s

FAILED (SKIP=5, errors=1)

> Task :sdks:python:test-suites:dataflow:py36:postCommitIT FAILED

FAILURE: Build completed with 3 failures.

1: Task failed with an exception.
-----------
* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/test-suites/dataflow/py35/build.gradle'>
 line: 48

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py35:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/test-suites/dataflow/py37/build.gradle'>
 line: 78

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py37:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

3: Task failed with an exception.
-----------
* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python3_Verify/ws/src/sdks/python/test-suites/dataflow/py36/build.gradle'>
 line: 48

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py36:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 59m 7s
77 actionable tasks: 60 executed, 17 from cache

Publishing build scan...
https://gradle.com/s/2mpqpc4mhgftm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to