See 
<https://builds.apache.org/job/beam_PostCommit_Python2/2128/display/redirect>

Changes:


------------------------------------------
[...truncated 11.13 MB...]
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0403065235-489098.1585896755.489219/pickled_main_session",
 
            "name": "pickled_main_session"
          }, 
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0403065235-489098.1585896755.489219/dataflow_python_sdk.tar",
 
            "name": "dataflow_python_sdk.tar"
          }, 
          {
            "location": 
"storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0403065235-489098.1585896755.489219/dataflow-worker.jar",
 
            "name": "dataflow-worker.jar"
          }
        ], 
        "taskrunnerSettings": {
          "parallelWorkerSettings": {
            "baseUrl": "https://dataflow.googleapis.com";, 
            "servicePath": "https://dataflow.googleapis.com";
          }
        }, 
        "workerHarnessContainerImage": 
"gcr.io/cloud-dataflow/v1beta3/python-fnapi:beam-master-20200317"
      }
    ]
  }, 
  "name": "beamapp-jenkins-0403065235-489098", 
  "steps": [
    {
      "kind": "ParallelRead", 
      "name": "s1", 
      "properties": {
        "display_data": [
          {
            "key": "source", 
            "label": "Read Source", 
            "namespace": "apache_beam.io.iobase.Read", 
            "shortValue": "_PubSubSource", 
            "type": "STRING", 
            "value": "apache_beam.io.gcp.pubsub._PubSubSource"
          }, 
          {
            "key": "with_attributes", 
            "label": "With Attributes", 
            "namespace": "apache_beam.io.gcp.pubsub._PubSubSource", 
            "type": "BOOLEAN", 
            "value": false
          }, 
          {
            "key": "subscription", 
            "label": "Pubsub Subscription", 
            "namespace": "apache_beam.io.gcp.pubsub._PubSubSource", 
            "type": "STRING", 
            "value": 
"projects/apache-beam-testing/subscriptions/exercise_streaming_metrics_subscription_inpute76e3374-ae6c-4c92-8194-1c03f262328c"
          }
        ], 
        "format": "pubsub", 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "ReadFromPubSub/Read.out"
          }
        ], 
        "pubsub_subscription": 
"projects/apache-beam-testing/subscriptions/exercise_streaming_metrics_subscription_inpute76e3374-ae6c-4c92-8194-1c03f262328c",
 
        "user_name": "ReadFromPubSub/Read"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s2", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "StreamingUserMetricsDoFn", 
            "type": "STRING", 
            "value": 
"apache_beam.runners.dataflow.dataflow_exercise_streaming_metrics_pipeline.StreamingUserMetricsDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "None", 
            "user_name": "generate_metrics.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s1"
        }, 
        "serialized_fn": "ref_AppliedPTransform_generate_metrics_4", 
        "user_name": "generate_metrics"
      }
    }, 
    {
      "kind": "ParallelWrite", 
      "name": "s3", 
      "properties": {
        "display_data": [], 
        "encoding": {
          "@type": "kind:windowed_value", 
          "component_encodings": [
            {
              "@type": "kind:bytes"
            }, 
            {
              "@type": "kind:global_window"
            }
          ], 
          "is_wrapper": true
        }, 
        "format": "pubsub", 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "None", 
          "step_name": "s2"
        }, 
        "pubsub_topic": 
"projects/apache-beam-testing/topics/exercise_streaming_metrics_topic_outpute76e3374-ae6c-4c92-8194-1c03f262328c",
 
        "user_name": "dump_to_pub/Write/NativeWrite"
      }
    }
  ], 
  "type": "JOB_TYPE_STREAMING"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
 createTime: u'2020-04-03T06:52:49.956897Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2020-04-02_23_52_48-8704811492948967056'
 location: u'us-central1'
 name: u'beamapp-jenkins-0403065235-489098'
 projectId: u'apache-beam-testing'
 stageStates: []
 startTime: u'2020-04-03T06:52:49.956897Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id: 
[2020-04-02_23_52_48-8704811492948967056]
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow 
monitoring console, please navigate to 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-02_23_52_48-8704811492948967056?project=apache-beam-testing
apache_beam.runners.dataflow.dataflow_runner: INFO: Job 
2020-04-02_23_52_48-8704811492948967056 is in state JOB_STATE_RUNNING
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:52.262Z: 
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service 
Account.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.013Z: 
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-a.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.566Z: 
JOB_MESSAGE_DETAILED: Expanding SplittableParDo operations into optimizable 
parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.568Z: 
JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton operations into 
optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.577Z: 
JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.588Z: 
JOB_MESSAGE_DETAILED: Expanding SplittableProcessKeyed operations into 
optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.590Z: 
JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into streaming Read/Write 
steps
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.594Z: 
JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.621Z: 
JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.624Z: 
JOB_MESSAGE_DETAILED: Fusing consumer generate_metrics into ReadFromPubSub/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.628Z: 
JOB_MESSAGE_DETAILED: Fusing consumer dump_to_pub/Write/NativeWrite into 
generate_metrics
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.641Z: 
JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.661Z: 
JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.676Z: 
JOB_MESSAGE_DEBUG: Assigning stage ids.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.834Z: 
JOB_MESSAGE_DEBUG: Executing wait step start2
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.853Z: 
JOB_MESSAGE_DEBUG: Starting worker pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:53.860Z: 
JOB_MESSAGE_BASIC: Starting 1 workers...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:52:57.393Z: 
JOB_MESSAGE_BASIC: Executing operation 
ReadFromPubSub/Read+generate_metrics+dump_to_pub/Write/NativeWrite
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:53:23.716Z: 
JOB_MESSAGE_WARNING: Your project already contains 100 Dataflow-created metric 
descriptors and Stackdriver will not create new Dataflow custom metrics for 
this job. Each unique user-defined metric name (independent of the DoFn in 
which it is defined) produces a new metric descriptor. To delete old / unused 
metric descriptors see 
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
 and 
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:53:26.883Z: 
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service 
Account.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:53:26.884Z: 
JOB_MESSAGE_DEBUG: Executing input step topology_init_attach_disk_input_step
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:53:27.587Z: 
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-a.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-03T06:53:43.536Z: 
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: WARNING: Timing out on waiting 
for job 2020-04-02_23_52_48-8704811492948967056 after 61 seconds
google.auth.transport._http_client: DEBUG: Making request: GET 
http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET 
http://metadata.google.internal/computeMetadata/v1/project/project-id
google.auth.transport.requests: DEBUG: Making request: GET 
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): 
metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET 
/computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 
200 144
google.auth.transport.requests: DEBUG: Making request: GET 
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/[email protected]/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET 
/computeMetadata/v1/instance/service-accounts/[email protected]/token
 HTTP/1.1" 200 192
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df.xml
----------------------------------------------------------------------
XML: 
<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 59 tests in 4540.951s

FAILED (SKIP=8, errors=2)

> Task :sdks:python:test-suites:dataflow:py2:postCommitIT FAILED

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/direct/py2/build.gradle'>
 line: 81

* What went wrong:
Execution failed for task 
':sdks:python:test-suites:direct:py2:hdfsIntegrationTest'.
> Process 'command 'sh'' finished with non-zero exit value 255

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/dataflow/py2/build.gradle'>
 line: 85

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py2:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 17m 15s
127 actionable tasks: 103 executed, 21 from cache, 3 up-to-date

Publishing build scan...
https://gradle.com/s/iqljht3frucnq

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to