See 
<https://builds.apache.org/job/beam_PostCommit_Python2/2232/display/redirect?page=changes>

Changes:

[github] [BEAM-9642] Add SDF execution units. (#11327)


------------------------------------------
[...truncated 11.61 MB...]
          "output_name": "out", 
          "step_name": "s8"
        }, 
        "serialized_fn": "<string of 1456 bytes>", 
        "user_name": "assert_that/Group/Map(_merge_tagged_vals_under_key)"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s10", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "<lambda>"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                  "component_encodings": [
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                      "component_encodings": [], 
                      "pipeline_proto_coder_id": 
"ref_Coder_FastPrimitivesCoder_1"
                    }, 
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                      "component_encodings": [], 
                      "pipeline_proto_coder_id": 
"ref_Coder_FastPrimitivesCoder_1"
                    }
                  ], 
                  "is_pair_like": true, 
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_1"
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "None", 
            "user_name": "assert_that/Unkey.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "None", 
          "step_name": "s9"
        }, 
        "serialized_fn": "<string of 1056 bytes>", 
        "user_name": "assert_that/Unkey"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s11", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "_equal"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                  "component_encodings": [
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                      "component_encodings": [], 
                      "pipeline_proto_coder_id": 
"ref_Coder_FastPrimitivesCoder_1"
                    }, 
                    {
                      "@type": 
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
 
                      "component_encodings": [], 
                      "pipeline_proto_coder_id": 
"ref_Coder_FastPrimitivesCoder_1"
                    }
                  ], 
                  "is_pair_like": true, 
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_1"
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "None", 
            "user_name": "assert_that/Match.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "None", 
          "step_name": "s10"
        }, 
        "serialized_fn": "<string of 2104 bytes>", 
        "user_name": "assert_that/Match"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: DEBUG: Response returned status 503, retrying
root: DEBUG: Retrying request to url 
https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json
 after exception HttpError accessing 
<https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>:
 response: <{'status': '503', 'content-length': '122', 'x-xss-protection': '0', 
'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'vary': 
'Origin, X-Origin, Referer', 'server': 'ESF', '-content-encoding': 'gzip', 
'cache-control': 'private', 'date': 'Tue, 14 Apr 2020 20:31:54 GMT', 
'x-frame-options': 'SAMEORIGIN', 'content-type': 'application/json; 
charset=UTF-8'}>, content <{
  "error": {
    "code": 503,
    "message": "The service is currently unavailable.",
    "status": "UNAVAILABLE"
  }
}
>
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df.xml
----------------------------------------------------------------------
XML: 
<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 58 tests in 3797.498s

FAILED (SKIP=7, errors=1)
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_06_31-4319799099963722332?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_24_06-4005932171709600693?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_32_04-9317258690886588652?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_40_03-9570170781287014181?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_47_17-1482722421157959476?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_54_32-5891329147729689786?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_14_01_57-12098158056880839973?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_06_36-13374115788661751574?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_20_26-3304649762784607443?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_28_05-5927043606660674607?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_36_03-4533745295026563017?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_43_49-998169861256025400?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_51_37-4062828234513645432?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_06_30-1943446517302669129?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_28_42-16705494712511761488?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_36_47-14457931814732237026?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_43_50-2439773907034240117?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_51_56-12889894982068745325?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_06_33-8189612211769873504?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_18_18-17238684805329827045?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_25_37-12527683146726560367?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_33_36-11463527726382911790?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_41_43-17382224179828489494?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_49_06-12309179829682755292?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_06_31-11231991615148179390?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_14_34-3227547978723818244?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_23_34-64953570597638647?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_32_16-16046719752083595233?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_40_53-17504638165995492897?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_49_26-2489527639157619254?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_06_31-13915706072626587802?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_13_57-465160836079522948?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_22_58-11080724090186826070?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_33_03-17829922869232706932?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_41_18-13471289079184562480?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_49_31-2304401746153162654?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_56_34-10291478601191606853?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_06_31-12312469423728948635?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_14_34-6695877923224737303?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_23_27-17239372742848718467?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_31_31-16172280809251904546?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_40_05-230028619488769968?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_47_48-10204966612494375535?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_55_13-15124863118133889348?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_06_32-9420876475734870900?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_15_21-3041178938141556485?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_25_35-2766481278707642896?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_34_04-8223609187892687267?project=apache-beam-testing
Worker logs: 
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-14_13_51_00-645170596212861752?project=apache-beam-testing

> Task :sdks:python:test-suites:dataflow:py2:postCommitIT FAILED

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/direct/py2/build.gradle'>
 line: 81

* What went wrong:
Execution failed for task 
':sdks:python:test-suites:direct:py2:hdfsIntegrationTest'.
> Process 'command 'sh'' finished with non-zero exit value 255

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* Where:
Build file 
'<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/dataflow/py2/build.gradle'>
 line: 85

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py2:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 4m 52s
129 actionable tasks: 102 executed, 24 from cache, 3 up-to-date

Publishing build scan...
https://gradle.com/s/wamh64bsqy2gg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to