See 
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/7953/display/redirect?page=changes>

Changes:

[boyuanz] Add a new sdf E2E test without defer_remainder

------------------------------------------
[...truncated 437.77 KB...]
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0418193332-936392.1555616012.936524/dataflow_python_sdk.tar",
 
            "name": "dataflow_python_sdk.tar"
          }, 
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0418193332-936392.1555616012.936524/dataflow-worker.jar",
 
            "name": "dataflow-worker.jar"
          }
        ], 
        "taskrunnerSettings": {
          "parallelWorkerSettings": {
            "baseUrl": "https://dataflow.googleapis.com";, 
            "servicePath": "https://dataflow.googleapis.com";
          }
        }, 
        "workerHarnessContainerImage": 
"gcr.io/cloud-dataflow/v1beta3/python:beam-master-20190226"
      }
    ]
  }, 
  "name": "beamapp-jenkins-0418193332-936392", 
  "steps": [
    {
      "kind": "ParallelRead", 
      "name": "s1", 
      "properties": {
        "bigquery_export_format": "FORMAT_AVRO", 
        "bigquery_flatten_results": true, 
        "bigquery_query": "SELECT * FROM (SELECT \"apple\" as fruit), (SELECT 
\"orange\" as fruit),", 
        "bigquery_use_legacy_sql": true, 
        "display_data": [
          {
            "key": "source", 
            "label": "Read Source", 
            "namespace": "apache_beam.io.iobase.Read", 
            "shortValue": "BigQuerySource", 
            "type": "STRING", 
            "value": "apache_beam.io.gcp.bigquery.BigQuerySource"
          }, 
          {
            "key": "query", 
            "label": "Query", 
            "namespace": "apache_beam.io.gcp.bigquery.BigQuerySource", 
            "type": "STRING", 
            "value": "SELECT * FROM (SELECT \"apple\" as fruit), (SELECT 
\"orange\" as fruit),"
          }, 
          {
            "key": "validation", 
            "label": "Validation Enabled", 
            "namespace": "apache_beam.io.gcp.bigquery.BigQuerySource", 
            "type": "BOOLEAN", 
            "value": false
          }
        ], 
        "format": "bigquery", 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                  "component_encodings": [
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                      "component_encodings": []
                    }, 
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "read.out"
          }
        ], 
        "user_name": "read"
      }
    }, 
    {
      "kind": "ParallelWrite", 
      "name": "s2", 
      "properties": {
        "create_disposition": "CREATE_IF_NEEDED", 
        "dataset": "python_query_to_table_15556160126431", 
        "display_data": [], 
        "encoding": {
          "@type": "kind:windowed_value", 
          "component_encodings": [
            {
              "@type": 
"RowAsDictJsonCoder$eNprYEpOLEhMzkiNT0pNzNXLzNdLTy7QS8pMLyxNLaqML8nPzynmCsovdyx2yUwu8SrOz3POT0kt4ipk0GwsZKwtZErSAwBKpRfo",
 
              "component_encodings": []
            }, 
            {
              "@type": "kind:global_window"
            }
          ], 
          "is_wrapper": true
        }, 
        "format": "bigquery", 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s1"
        }, 
        "schema": "{\"fields\": [{\"type\": \"STRING\", \"name\": \"fruit\", 
\"mode\": \"NULLABLE\"}]}", 
        "table": "output_table", 
        "user_name": "write/WriteToBigQuery/NativeWrite", 
        "write_disposition": "WRITE_EMPTY"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: u'2019-04-18T19:33:44.945454Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2019-04-18_12_33_43-7248736252865323799'
 location: u'us-central1'
 name: u'beamapp-jenkins-0418193332-936392'
 projectId: u'apache-beam-testing'
 stageStates: []
 startTime: u'2019-04-18T19:33:44.945454Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2019-04-18_12_33_43-7248736252865323799]
root: INFO: To access the Dataflow monitoring console, please navigate to 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_33_43-7248736252865323799?project=apache-beam-testing
root: INFO: Job 2019-04-18_12_33_43-7248736252865323799 is in state 
JOB_STATE_RUNNING
root: INFO: 2019-04-18T19:33:43.973Z: JOB_MESSAGE_DETAILED: Autoscaling is 
enabled for job 2019-04-18_12_33_43-7248736252865323799. The number of workers 
will be between 1 and 1000.
root: INFO: 2019-04-18T19:33:44.070Z: JOB_MESSAGE_DETAILED: Autoscaling was 
automatically enabled for job 2019-04-18_12_33_43-7248736252865323799.
root: INFO: 2019-04-18T19:33:47.343Z: JOB_MESSAGE_DETAILED: Checking 
permissions granted to controller Service Account.
root: INFO: 2019-04-18T19:33:48.607Z: JOB_MESSAGE_BASIC: Worker configuration: 
n1-standard-1 in us-central1-a.
root: INFO: 2019-04-18T19:33:49.423Z: JOB_MESSAGE_DETAILED: Expanding 
CoGroupByKey operations into optimizable parts.
root: INFO: 2019-04-18T19:33:49.529Z: JOB_MESSAGE_DETAILED: Expanding 
GroupByKey operations into optimizable parts.
root: INFO: 2019-04-18T19:33:49.570Z: JOB_MESSAGE_DETAILED: Lifting 
ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2019-04-18T19:33:49.616Z: JOB_MESSAGE_DEBUG: Annotating graph with 
Autotuner information.
root: INFO: 2019-04-18T19:33:49.835Z: JOB_MESSAGE_DETAILED: Fusing adjacent 
ParDo, Read, Write, and Flatten operations
root: INFO: 2019-04-18T19:33:49.911Z: JOB_MESSAGE_DETAILED: Fusing consumer 
write/WriteToBigQuery/NativeWrite into read
root: INFO: 2019-04-18T19:33:49.979Z: JOB_MESSAGE_DEBUG: Workflow config is 
missing a default resource spec.
root: INFO: 2019-04-18T19:33:50.033Z: JOB_MESSAGE_DEBUG: Adding StepResource 
setup and teardown to workflow graph.
root: INFO: 2019-04-18T19:33:50.097Z: JOB_MESSAGE_DEBUG: Adding workflow start 
and stop steps.
root: INFO: 2019-04-18T19:33:50.164Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
root: INFO: 2019-04-18T19:33:50.465Z: JOB_MESSAGE_DEBUG: Executing wait step 
start3
root: INFO: 2019-04-18T19:33:50.641Z: JOB_MESSAGE_BASIC: Executing operation 
read+write/WriteToBigQuery/NativeWrite
root: INFO: 2019-04-18T19:33:50.712Z: JOB_MESSAGE_DEBUG: Starting worker pool 
setup.
root: INFO: 2019-04-18T19:33:50.776Z: JOB_MESSAGE_BASIC: Starting 1 workers in 
us-central1-a...
root: INFO: 2019-04-18T19:33:54.566Z: JOB_MESSAGE_BASIC: BigQuery query issued 
as job: "dataflow_job_13870926008283644006". You can check its status with the 
bq tool: "bq show -j --project_id=apache-beam-testing 
dataflow_job_13870926008283644006".
root: INFO: 2019-04-18T19:34:09.806Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised 
the number of workers to 0 based on the rate of progress in the currently 
running step(s).
root: INFO: 2019-04-18T19:34:17.587Z: JOB_MESSAGE_ERROR: Startup of the worker 
pool in zone us-central1-a failed to bring up any of the desired 1 workers. 
Please check for errors in your job parameters, check quota and retry later, or 
please try in a different zone/region.
root: INFO: 2019-04-18T19:34:17.649Z: JOB_MESSAGE_ERROR: Workflow failed. 
Causes: Internal Issue (de748b099cda0996): 82159483:17
root: INFO: 2019-04-18T19:34:19.574Z: JOB_MESSAGE_DETAILED: Cleaning up.
root: INFO: 2019-04-18T19:34:19.707Z: JOB_MESSAGE_DEBUG: Starting worker pool 
teardown.
root: INFO: 2019-04-18T19:34:19.761Z: JOB_MESSAGE_BASIC: Stopping worker pool...
root: INFO: 2019-04-18T19:34:30.815Z: JOB_MESSAGE_BASIC: Worker pool stopped.
root: INFO: 2019-04-18T19:34:30.887Z: JOB_MESSAGE_DEBUG: Tearing down pending 
resources...
root: INFO: Job 2019-04-18_12_33_43-7248736252865323799 is in state 
JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: 
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 34 tests in 3207.442s

FAILED (SKIP=1, errors=2)
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_20_17-15418363709197356795?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_28_02-17286266282766725107?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_36_56-16517856860447631757?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_43_29-5904889684559853035?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_50_48-15335077328381253726?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_58_46-9720935088068166240?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_13_06_41-10494046538893023892?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_20_19-6626949665079178415?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_35_03-3939691818219149331?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_43_38-15018083042345567661?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_20_14-1976899339282384384?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_43_18-4969998506843450095?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_20_19-8107229233717658699?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_33_43-7248736252865323799?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_34_54-8444092369238231782?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_43_42-9661805665173715288?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_20_15-15582827090877535219?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_41_31-4846744947215467526?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_20_16-873107822240031011?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_28_03-7455333854206981073?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_36_34-1127276936737911664?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_39_18-121484150625158676?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_47_02-5879131756132669167?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_20_16-14623287056267467898?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_28_18-8766369559252524110?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_36_19-13024109164293709174?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_44_04-8596837557764462954?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_50_49-11829179706715010788?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_20_14-10630895534320938043?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_28_39-3263965698095307790?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_39_35-8515603978468657666?project=apache-beam-testing.
Found: 
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-04-18_12_46_12-17292864512608617435?project=apache-beam-testing.

> Task :beam-sdks-python:postCommitIT FAILED

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/build.gradle'>
 line: 127

* What went wrong:
Execution failed for task ':beam-sdks-python:directRunnerIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/build.gradle'>
 line: 240

* What went wrong:
Execution failed for task ':beam-sdks-python:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 57m 33s
62 actionable tasks: 45 executed, 17 from cache

Publishing build scan...
https://gradle.com/s/bbdr6s6cayj3g

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Reply via email to