See 
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/7874/display/redirect>

------------------------------------------
[...truncated 717.79 KB...]
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0411121603-819105.1554984963.819238/pbr-5.1.3.tar.gz",
 
            "name": "pbr-5.1.3.tar.gz"
          }, 
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0411121603-819105.1554984963.819238/setuptools-41.0.0.zip",
 
            "name": "setuptools-41.0.0.zip"
          }, 
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0411121603-819105.1554984963.819238/mock-2.0.0.tar.gz",
 
            "name": "mock-2.0.0.tar.gz"
          }, 
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0411121603-819105.1554984963.819238/six-1.12.0.tar.gz",
 
            "name": "six-1.12.0.tar.gz"
          }, 
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0411121603-819105.1554984963.819238/funcsigs-1.0.2.tar.gz",
 
            "name": "funcsigs-1.0.2.tar.gz"
          }, 
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0411121603-819105.1554984963.819238/dataflow_python_sdk.tar",
 
            "name": "dataflow_python_sdk.tar"
          }
        ], 
        "taskrunnerSettings": {
          "parallelWorkerSettings": {
            "baseUrl": "https://dataflow.googleapis.com";, 
            "servicePath": "https://dataflow.googleapis.com";
          }
        }, 
        "workerHarnessContainerImage": 
"gcr.io/cloud-dataflow/v1beta3/python:beam-master-20190226"
      }
    ]
  }, 
  "name": "beamapp-jenkins-0411121603-819105", 
  "steps": [
    {
      "kind": "ParallelRead", 
      "name": "s1", 
      "properties": {
        "bigquery_export_format": "FORMAT_AVRO", 
        "bigquery_flatten_results": true, 
        "bigquery_query": "SELECT * FROM (SELECT \"apple\" as fruit), (SELECT 
\"orange\" as fruit),", 
        "bigquery_use_legacy_sql": true, 
        "display_data": [
          {
            "key": "source", 
            "label": "Read Source", 
            "namespace": "apache_beam.io.iobase.Read", 
            "shortValue": "BigQuerySource", 
            "type": "STRING", 
            "value": "apache_beam.io.gcp.bigquery.BigQuerySource"
          }, 
          {
            "key": "query", 
            "label": "Query", 
            "namespace": "apache_beam.io.gcp.bigquery.BigQuerySource", 
            "type": "STRING", 
            "value": "SELECT * FROM (SELECT \"apple\" as fruit), (SELECT 
\"orange\" as fruit),"
          }, 
          {
            "key": "validation", 
            "label": "Validation Enabled", 
            "namespace": "apache_beam.io.gcp.bigquery.BigQuerySource", 
            "type": "BOOLEAN", 
            "value": false
          }
        ], 
        "format": "bigquery", 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                  "component_encodings": [
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                      "component_encodings": []
                    }, 
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "read.out"
          }
        ], 
        "user_name": "read"
      }
    }, 
    {
      "kind": "ParallelWrite", 
      "name": "s2", 
      "properties": {
        "create_disposition": "CREATE_IF_NEEDED", 
        "dataset": "python_query_to_table_15549849637488", 
        "display_data": [], 
        "encoding": {
          "@type": "kind:windowed_value", 
          "component_encodings": [
            {
              "@type": 
"RowAsDictJsonCoder$eNprYEpOLEhMzkiNT0pNzNXLzNdLTy7QS8pMLyxNLaqML8nPzynmCsovdyx2yUwu8SrOz3POT0kt4ipk0GwsZKwtZErSAwBKpRfo",
 
              "component_encodings": []
            }, 
            {
              "@type": "kind:global_window"
            }
          ], 
          "is_wrapper": true
        }, 
        "format": "bigquery", 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s1"
        }, 
        "schema": "{\"fields\": [{\"type\": \"STRING\", \"name\": \"fruit\", 
\"mode\": \"NULLABLE\"}]}", 
        "table": "output_table", 
        "user_name": "write/WriteToBigQuery/NativeWrite", 
        "write_disposition": "WRITE_EMPTY"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: u'2019-04-11T12:16:09.529406Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2019-04-11_05_16_08-14492185392985023273'
 location: u'us-central1'
 name: u'beamapp-jenkins-0411121603-819105'
 projectId: u'apache-beam-testing'
 stageStates: []
 startTime: u'2019-04-11T12:16:09.529406Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2019-04-11_05_16_08-14492185392985023273]
root: INFO: To access the Dataflow monitoring console, please navigate to 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_16_08-14492185392985023273?project=apache-beam-testing
root: INFO: Job 2019-04-11_05_16_08-14492185392985023273 is in state 
JOB_STATE_RUNNING
root: INFO: 2019-04-11T12:16:08.766Z: JOB_MESSAGE_DETAILED: Autoscaling is 
enabled for job 2019-04-11_05_16_08-14492185392985023273. The number of workers 
will be between 1 and 1000.
root: INFO: 2019-04-11T12:16:08.813Z: JOB_MESSAGE_DETAILED: Autoscaling was 
automatically enabled for job 2019-04-11_05_16_08-14492185392985023273.
root: INFO: 2019-04-11T12:16:11.430Z: JOB_MESSAGE_DETAILED: Checking 
permissions granted to controller Service Account.
root: INFO: 2019-04-11T12:16:12.016Z: JOB_MESSAGE_BASIC: Worker configuration: 
n1-standard-1 in us-central1-a.
root: INFO: 2019-04-11T12:16:12.610Z: JOB_MESSAGE_DETAILED: Expanding 
CoGroupByKey operations into optimizable parts.
root: INFO: 2019-04-11T12:16:12.646Z: JOB_MESSAGE_DETAILED: Expanding 
GroupByKey operations into optimizable parts.
root: INFO: 2019-04-11T12:16:12.694Z: JOB_MESSAGE_DETAILED: Lifting 
ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2019-04-11T12:16:12.739Z: JOB_MESSAGE_DEBUG: Annotating graph with 
Autotuner information.
root: INFO: 2019-04-11T12:16:12.952Z: JOB_MESSAGE_DETAILED: Fusing adjacent 
ParDo, Read, Write, and Flatten operations
root: INFO: 2019-04-11T12:16:13.014Z: JOB_MESSAGE_DETAILED: Fusing consumer 
write/WriteToBigQuery/NativeWrite into read
root: INFO: 2019-04-11T12:16:13.055Z: JOB_MESSAGE_DEBUG: Workflow config is 
missing a default resource spec.
root: INFO: 2019-04-11T12:16:13.097Z: JOB_MESSAGE_DEBUG: Adding StepResource 
setup and teardown to workflow graph.
root: INFO: 2019-04-11T12:16:13.143Z: JOB_MESSAGE_DEBUG: Adding workflow start 
and stop steps.
root: INFO: 2019-04-11T12:16:13.187Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
root: INFO: 2019-04-11T12:16:13.346Z: JOB_MESSAGE_DEBUG: Executing wait step 
start3
root: INFO: 2019-04-11T12:16:13.434Z: JOB_MESSAGE_BASIC: Executing operation 
read+write/WriteToBigQuery/NativeWrite
root: INFO: 2019-04-11T12:16:13.495Z: JOB_MESSAGE_DEBUG: Starting worker pool 
setup.
root: INFO: 2019-04-11T12:16:13.539Z: JOB_MESSAGE_BASIC: Starting 1 workers in 
us-central1-a...
root: INFO: 2019-04-11T12:16:17.126Z: JOB_MESSAGE_BASIC: BigQuery query issued 
as job: "dataflow_job_15522661840329699159". You can check its status with the 
bq tool: "bq show -j --project_id=apache-beam-testing 
dataflow_job_15522661840329699159".
root: INFO: 2019-04-11T12:16:24.383Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised 
the number of workers to 0 based on the rate of progress in the currently 
running step(s).
root: INFO: 2019-04-11T12:16:32.707Z: JOB_MESSAGE_ERROR: Startup of the worker 
pool in zone us-central1-a failed to bring up any of the desired 1 workers. 
Please check for errors in your job parameters, check quota and retry later, or 
please try in a different zone/region.
root: INFO: 2019-04-11T12:16:32.753Z: JOB_MESSAGE_ERROR: Workflow failed. 
Causes: Internal Issue (51122d3b4de2c922): 82159483:17
root: INFO: 2019-04-11T12:16:36.161Z: JOB_MESSAGE_DETAILED: Cleaning up.
root: INFO: 2019-04-11T12:16:36.229Z: JOB_MESSAGE_DEBUG: Starting worker pool 
teardown.
root: INFO: 2019-04-11T12:16:36.267Z: JOB_MESSAGE_BASIC: Stopping worker pool...
root: INFO: 2019-04-11T12:16:46.127Z: JOB_MESSAGE_BASIC: Worker pool stopped.
root: INFO: 2019-04-11T12:16:46.170Z: JOB_MESSAGE_DEBUG: Tearing down pending 
resources...
root: INFO: Job 2019-04-11_05_16_08-14492185392985023273 is in state 
JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: 
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 34 tests in 3173.404s

FAILED (SKIP=1, errors=3)
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_04_55-7736259112370380929?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_19_58-14597096579663661830?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_27_42-6724522557744014615?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_04_56-10591242976167564322?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_20_00-1235373375856028270?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_30_30-7733579645112616851?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_04_53-15925742527703903305?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_13_38-3398087972181197242?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_21_20-1889411654435037382?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_29_10-10746700241023393487?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_04_54-11004161225184986696?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_31_33-1238676200858930215?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_38_30-6698449833910008006?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_04_54-1977205075709278617?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_13_37-12115645682678357081?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_23_29-756144318218386296?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_31_38-8539406099610303517?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_04_54-9656723893253213395?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_25_27-8261441462815758706?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_04_54-17134601813342667479?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_13_11-15859316303220388613?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_21_30-12962081667810376431?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_28_03-5722833253222122325?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_35_07-17227550388531857504?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_43_15-14657079522949594932?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_49_43-4638067423236232778?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_04_54-13610188094963273501?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_15_12-7105694153526409117?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_16_08-14492185392985023273?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_17_08-1163284977267170100?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_24_07-10852318797318474645?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-11_05_31_42-11362949436815759195?project=apache-beam-testing.

> Task :beam-sdks-python:postCommitIT FAILED

FAILURE: Build failed with an exception.

* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/build.gradle'>
 line: 229

* What went wrong:
Execution failed for task ':beam-sdks-python:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 57m 14s
6 actionable tasks: 6 executed

Publishing build scan...
https://gradle.com/s/7ooapek476gai

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to