See 
<https://ci-beam.apache.org/job/beam_PostCommit_Python37/3028/display/redirect>

Changes:


------------------------------------------
[...truncated 36.73 MB...]
            "label": "Pubsub Subscription",
            "namespace": "apache_beam.io.gcp.pubsub._PubSubSource",
            "type": "STRING",
            "value": 
"projects/apache-beam-testing/subscriptions/exercise_streaming_metrics_subscription_input8426f285-4872-499b-bdb1-f1873716bfb3"
          },
          {
            "key": "with_attributes",
            "label": "With Attributes",
            "namespace": "apache_beam.io.gcp.pubsub._PubSubSource",
            "type": "BOOLEAN",
            "value": false
          }
        ],
        "format": "pubsub",
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "out",
            "user_name": "ReadFromPubSub/Read.out"
          }
        ],
        "pubsub_subscription": 
"projects/apache-beam-testing/subscriptions/exercise_streaming_metrics_subscription_input8426f285-4872-499b-bdb1-f1873716bfb3",
        "user_name": "ReadFromPubSub/Read"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s2",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "StreamingUserMetricsDoFn",
            "type": "STRING",
            "value": 
"apache_beam.runners.dataflow.dataflow_exercise_streaming_metrics_pipeline.StreamingUserMetricsDoFn"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "generate_metrics.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s1"
        },
        "serialized_fn": "ref_AppliedPTransform_generate_metrics_4",
        "user_name": "generate_metrics"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s3",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "CallableWrapperDoFn",
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "bytes_to_proto_str"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "dump_to_pub/ToProtobuf.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s2"
        },
        "serialized_fn": "ref_AppliedPTransform_dump_to_pub/ToProtobuf_6",
        "user_name": "dump_to_pub/ToProtobuf"
      }
    },
    {
      "kind": "ParallelWrite",
      "name": "s4",
      "properties": {
        "display_data": [],
        "encoding": {
          "@type": "kind:windowed_value",
          "component_encodings": [
            {
              "@type": "kind:bytes"
            },
            {
              "@type": "kind:global_window"
            }
          ],
          "is_wrapper": true
        },
        "format": "pubsub",
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s3"
        },
        "pubsub_serialized_attributes_fn": "",
        "pubsub_topic": 
"projects/apache-beam-testing/topics/exercise_streaming_metrics_topic_output8426f285-4872-499b-bdb1-f1873716bfb3",
        "user_name": "dump_to_pub/Write/NativeWrite"
      }
    }
  ],
  "type": "JOB_TYPE_STREAMING"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
 createTime: '2020-10-31T18:59:47.685128Z'
 currentStateTime: '1970-01-01T00:00:00Z'
 id: '2020-10-31_11_59_46-16359965557247608278'
 location: 'us-central1'
 name: 'beamapp-jenkins-1031185940-223302'
 projectId: 'apache-beam-testing'
 stageStates: []
 startTime: '2020-10-31T18:59:47.685128Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id: 
[2020-10-31_11_59_46-16359965557247608278]
apache_beam.runners.dataflow.internal.apiclient: INFO: Submitted job: 
2020-10-31_11_59_46-16359965557247608278
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow 
monitoring console, please navigate to 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-10-31_11_59_46-16359965557247608278?project=apache-beam-testing
apache_beam.runners.dataflow.dataflow_runner: INFO: Job 
2020-10-31_11_59_46-16359965557247608278 is in state JOB_STATE_RUNNING
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:50.345Z: 
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-f.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:51.970Z: 
JOB_MESSAGE_DETAILED: Expanding SplittableParDo operations into optimizable 
parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.003Z: 
JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton operations into 
optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.211Z: 
JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.269Z: 
JOB_MESSAGE_DETAILED: Expanding SplittableProcessKeyed operations into 
optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.288Z: 
JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into streaming Read/Write 
steps
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.336Z: 
JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.380Z: 
JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.416Z: 
JOB_MESSAGE_DETAILED: Fusing consumer generate_metrics into ReadFromPubSub/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.458Z: 
JOB_MESSAGE_DETAILED: Fusing consumer dump_to_pub/ToProtobuf into 
generate_metrics
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.504Z: 
JOB_MESSAGE_DETAILED: Fusing consumer dump_to_pub/Write/NativeWrite into 
dump_to_pub/ToProtobuf
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.540Z: 
JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.578Z: 
JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.630Z: 
JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.663Z: 
JOB_MESSAGE_DEBUG: Assigning stage ids.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.829Z: 
JOB_MESSAGE_DEBUG: Executing wait step start17
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.926Z: 
JOB_MESSAGE_BASIC: Executing operation 
ReadFromPubSub/Read+generate_metrics+dump_to_pub/ToProtobuf+dump_to_pub/Write/NativeWrite
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:52.978Z: 
JOB_MESSAGE_DEBUG: Starting worker pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T18:59:53.030Z: 
JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-f...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T19:00:11.326Z: 
JOB_MESSAGE_BASIC: Your project already contains 100 Dataflow-created metric 
descriptors, so new user metrics of the form custom.googleapis.com/* will not 
be created. However, all user metrics are also available in the metric 
dataflow.googleapis.com/job/user_counter. If you rely on the custom metrics, 
you can delete old / unused metric descriptors. See 
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
 and 
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T19:00:17.296Z: 
JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 so that 
the pipeline can catch up with its backlog and keep up with its input rate.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T19:00:50.105Z: 
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-10-31T19:00:50.141Z: 
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: WARNING: Timing out on waiting 
for job 2020-10-31_11_59_46-16359965557247608278 after 60 seconds
google.auth._default: DEBUG: Checking None for explicit credentials as part of 
auth process...
google.auth._default: DEBUG: Checking Cloud SDK credentials as part of auth 
process...
google.auth._default: DEBUG: Cloud SDK credentials not found on disk; not using 
them
google.auth._default: DEBUG: Checking for App Engine runtime as part of auth 
process...
google.auth._default: DEBUG: No App Engine library was found so cannot 
authentication via App Engine Identity Credentials.
google.auth.transport._http_client: DEBUG: Making request: GET 
http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET 
http://metadata.google.internal/computeMetadata/v1/project/project-id
google.auth.transport.requests: DEBUG: Making request: GET 
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): 
metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET 
/computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 
200 144
google.auth.transport.requests: DEBUG: Making request: GET 
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/[email protected]/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fpubsub
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET 
/computeMetadata/v1/instance/service-accounts/[email protected]/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fpubsub
 HTTP/1.1" 200 241
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py37.xml
----------------------------------------------------------------------
XML: 
<https://ci-beam.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 67 tests in 4309.528s

FAILED (SKIP=7, errors=1)

> Task :sdks:python:test-suites:dataflow:py37:postCommitIT FAILED

FAILURE: Build failed with an exception.

* Where:
Script 
'<https://ci-beam.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
 line: 118

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py37:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/6.6.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 14m 52s
175 actionable tasks: 131 executed, 40 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/bzal2cprxmi3k

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to