See 
<https://builds.apache.org/job/beam_PostCommit_Python2/1133/display/redirect>

Changes:


------------------------------------------
[...truncated 1.64 MB...]
            "name": "dataflow_python_sdk.tar"
          }, 
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-1204070151-897789.1575442911.897929/dataflow-worker.jar",
 
            "name": "dataflow-worker.jar"
          }
        ], 
        "taskrunnerSettings": {
          "parallelWorkerSettings": {
            "baseUrl": "https://dataflow.googleapis.com";, 
            "servicePath": "https://dataflow.googleapis.com";
          }
        }, 
        "workerHarnessContainerImage": 
"gcr.io/cloud-dataflow/v1beta3/python:beam-master-20191112"
      }
    ]
  }, 
  "name": "beamapp-jenkins-1204070151-897789", 
  "steps": [
    {
      "kind": "ParallelRead", 
      "name": "s1", 
      "properties": {
        "bigquery_export_format": "FORMAT_AVRO", 
        "bigquery_flatten_results": true, 
        "bigquery_query": "SELECT bytes, date, time FROM 
[python_query_to_table_15754429102349.python_new_types_table]", 
        "bigquery_use_legacy_sql": true, 
        "display_data": [
          {
            "key": "source", 
            "label": "Read Source", 
            "namespace": "apache_beam.io.iobase.Read", 
            "shortValue": "BigQuerySource", 
            "type": "STRING", 
            "value": "apache_beam.io.gcp.bigquery.BigQuerySource"
          }, 
          {
            "key": "query", 
            "label": "Query", 
            "namespace": "apache_beam.io.gcp.bigquery.BigQuerySource", 
            "type": "STRING", 
            "value": "SELECT bytes, date, time FROM 
[python_query_to_table_15754429102349.python_new_types_table]"
          }, 
          {
            "key": "validation", 
            "label": "Validation Enabled", 
            "namespace": "apache_beam.io.gcp.bigquery.BigQuerySource", 
            "type": "BOOLEAN", 
            "value": false
          }
        ], 
        "format": "bigquery", 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                  "component_encodings": [
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                      "component_encodings": []
                    }, 
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "read.out"
          }
        ], 
        "user_name": "read"
      }
    }, 
    {
      "kind": "ParallelWrite", 
      "name": "s2", 
      "properties": {
        "create_disposition": "CREATE_IF_NEEDED", 
        "dataset": "python_query_to_table_15754429102349", 
        "display_data": [], 
        "encoding": {
          "@type": "kind:windowed_value", 
          "component_encodings": [
            {
              "@type": 
"RowAsDictJsonCoder$eNprYEpOLEhMzkiNT0pNzNXLzNdLTy7QS8pMLyxNLaqML8nPzynmCsovdyx2yUwu8SrOz3POT0kt4ipk0GwsZKwtZErSAwBKpRfo",
 
              "component_encodings": []
            }, 
            {
              "@type": "kind:global_window"
            }
          ], 
          "is_wrapper": true
        }, 
        "format": "bigquery", 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s1"
        }, 
        "schema": "{\"fields\": [{\"type\": \"BYTES\", \"name\": \"bytes\", 
\"mode\": \"NULLABLE\"}, {\"type\": \"DATE\", \"name\": \"date\", \"mode\": 
\"NULLABLE\"}, {\"type\": \"TIME\", \"name\": \"time\", \"mode\": 
\"NULLABLE\"}]}", 
        "table": "output_table", 
        "user_name": "write/WriteToBigQuery/NativeWrite", 
        "write_disposition": "WRITE_EMPTY"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
 createTime: u'2019-12-04T07:02:02.922366Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2019-12-03_23_02_01-13470319390034103051'
 location: u'us-central1'
 name: u'beamapp-jenkins-1204070151-897789'
 projectId: u'apache-beam-testing'
 stageStates: []
 startTime: u'2019-12-04T07:02:02.922366Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id: 
[2019-12-03_23_02_01-13470319390034103051]
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow 
monitoring console, please navigate to 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_02_01-13470319390034103051?project=apache-beam-testing
apache_beam.runners.dataflow.dataflow_runner: INFO: Job 
2019-12-03_23_02_01-13470319390034103051 is in state JOB_STATE_RUNNING
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:01.646Z: 
JOB_MESSAGE_DETAILED: Autoscaling is enabled for job 
2019-12-03_23_02_01-13470319390034103051. The number of workers will be between 
1 and 1000.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:01.646Z: 
JOB_MESSAGE_DETAILED: Autoscaling was automatically enabled for job 
2019-12-03_23_02_01-13470319390034103051.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:05.098Z: 
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service 
Account.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:06.104Z: 
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-1 in us-central1-a.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:06.841Z: 
JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:06.884Z: 
JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:06.925Z: 
JOB_MESSAGE_DETAILED: Lifting ValueCombiningMappingFns into 
MergeBucketsMappingFns
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:06.962Z: 
JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:07.179Z: 
JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:07.216Z: 
JOB_MESSAGE_DETAILED: Fusing consumer write/WriteToBigQuery/NativeWrite into 
read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:07.252Z: 
JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:07.284Z: 
JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:07.312Z: 
JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:07.347Z: 
JOB_MESSAGE_DEBUG: Assigning stage ids.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:07.543Z: 
JOB_MESSAGE_DEBUG: Executing wait step start3
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:07.610Z: 
JOB_MESSAGE_BASIC: Executing operation read+write/WriteToBigQuery/NativeWrite
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:07.661Z: 
JOB_MESSAGE_DEBUG: Starting worker pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:07.687Z: 
JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-a...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:10.034Z: 
JOB_MESSAGE_BASIC: BigQuery query issued as job: 
"dataflow_job_15591605729062557778". You can check its status with the bq tool: 
"bq show -j --project_id=apache-beam-testing dataflow_job_15591605729062557778".
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:02:33.600Z: 
JOB_MESSAGE_WARNING: Your project already contains 100 Dataflow-created metric 
descriptors and Stackdriver will not create new Dataflow custom metrics for 
this job. Each unique user-defined metric name (independent of the DoFn in 
which it is defined) produces a new metric descriptor. To delete old / unused 
metric descriptors see 
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
 and 
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:03:22.162Z: 
JOB_MESSAGE_BASIC: BigQuery query completed, job : 
"dataflow_job_15591605729062557778"
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:03:22.994Z: 
JOB_MESSAGE_BASIC: BigQuery export job "dataflow_job_12035580157443981119" 
started. You can check its status with the bq tool: "bq show -j 
--project_id=apache-beam-testing dataflow_job_12035580157443981119".
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:03:53.575Z: 
JOB_MESSAGE_DETAILED: BigQuery export job progress: 
"dataflow_job_12035580157443981119" observed total of 1 exported files thus far.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:03:53.610Z: 
JOB_MESSAGE_BASIC: BigQuery export job finished: 
"dataflow_job_12035580157443981119"
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:05:54.201Z: 
JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on 
the rate of progress in the currently running step(s).
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:08:07.516Z: 
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service 
Account.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:09:23.377Z: 
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:09:23.416Z: 
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:14:07.516Z: 
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service 
Account.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:15:24.391Z: 
JOB_MESSAGE_BASIC: Executing BigQuery import job 
"dataflow_job_15591605729062557668". You can check its status with the bq tool: 
"bq show -j --project_id=apache-beam-testing dataflow_job_15591605729062557668".
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:15:35.096Z: 
JOB_MESSAGE_BASIC: BigQuery import job "dataflow_job_15591605729062557668" done.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:15:35.912Z: 
JOB_MESSAGE_BASIC: Finished operation read+write/WriteToBigQuery/NativeWrite
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:15:36.048Z: 
JOB_MESSAGE_DEBUG: Executing success step success1
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:15:36.180Z: 
JOB_MESSAGE_DETAILED: Cleaning up.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:15:36.272Z: 
JOB_MESSAGE_DEBUG: Starting worker pool teardown.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2019-12-04T07:15:36.300Z: 
JOB_MESSAGE_BASIC: Stopping worker pool...
apache_beam.runners.dataflow.dataflow_runner: WARNING: Timing out on waiting 
for job 2019-12-03_23_02_01-13470319390034103051 after 903 seconds
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df.xml
----------------------------------------------------------------------
XML: 
<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 45 tests in 3815.690s

FAILED (SKIP=5, failures=1)
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_39_37-1437689533205745553?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_01_47-11909671375276399653?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_09_49-12064854498549684189?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_18_11-13933751367028216080?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_26_18-7614073875704279693?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_34_30-11094362106361533743?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_39_44-17844996470492712355?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_55_26-11482375866466097774?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_05_04-6865052614795920999?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_14_19-6537885105266466023?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_22_04-12538356843994250671?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_39_43-17505937709111549403?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_54_26-12137136457314355452?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_02_01-13470319390034103051?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_19_40-7447249464445992114?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_26_59-6563300292191116124?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_39_37-17842996789480061316?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_57_21-8792086651219592091?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_15_57-2827154399567968010?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_39_38-12738977726207882441?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_47_22-3321692091305697648?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_56_32-9110864445308347575?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_05_37-3242659723270943369?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_14_01-15439746279054752425?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_39_37-9738462014949419437?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_47_04-3365572095836602371?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_57_06-10320717304792994268?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_06_07-11778901042296379923?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_14_27-5895867005058727283?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_39_39-5213655990202198813?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_48_22-7998081344345349404?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_56_52-7375479468716049264?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_05_10-15768323067666409823?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_13_03-913827071797839679?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_39_37-247906133756001053?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_49_45-17826960762099534518?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_22_59_48-1310550418307551051?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_06_52-6889373903719029680?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-03_23_15_33-7005650674260834965?project=apache-beam-testing

> Task :sdks:python:test-suites:dataflow:py2:postCommitIT FAILED

FAILURE: Build failed with an exception.

* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/dataflow/py2/build.gradle'>
 line: 85

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py2:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 4m 37s
120 actionable tasks: 94 executed, 23 from cache, 3 up-to-date

Publishing build scan...
https://scans.gradle.com/s/5q6ckyft32om6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Reply via email to