See 
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/359/display/redirect?page=changes>

Changes:

[noreply] Add 2.30 release date

[noreply] Remove placeholder entries from 2.30

[heejong] Update Dataflow Python container version to 20210622

[Robert Burke] [BEAM-12528] Discard failed plans.


------------------------------------------
[...truncated 1.01 MB...]
>         test_pipeline or self.create_pipeline())

apache_beam/transforms/validate_runner_xlang_test.py:290: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/transforms/validate_runner_xlang_test.py:235: in run_partition
    TEST_PARTITION_URN, None, self.expansion_service))
apache_beam/pvalue.py:136: in __or__
    return self.pipeline.apply(ptransform, self)
apache_beam/pipeline.py:694: in apply
    pvalueish_result = self.runner.apply(transform, pvalueish, self._options)
apache_beam/runners/runner.py:185: in apply
    return m(transform, input, options)
apache_beam/runners/runner.py:215: in apply_PTransform
    return transform.expand(input)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ExternalTransform(PTransform) 
label=[ExternalTransform(beam:transforms:xlang:test:partition)] at 
0x7f25446b09e8>
pvalueish = <PCollection[Create/Map(decode).None] at 0x7f254602f470>

    def expand(self, pvalueish):
      # type: (pvalue.PCollection) -> pvalue.PCollection
      if isinstance(pvalueish, pvalue.PBegin):
        self._inputs = {}
      elif isinstance(pvalueish, (list, tuple)):
        self._inputs = {str(ix): pvalue for ix, pvalue in enumerate(pvalueish)}
      elif isinstance(pvalueish, dict):
        self._inputs = pvalueish
      else:
        self._inputs = {'input': pvalueish}
      pipeline = (
          next(iter(self._inputs.values())).pipeline
          if self._inputs else pvalueish.pipeline)
      context = pipeline_context.PipelineContext(
          component_id_map=pipeline.component_id_map)
      transform_proto = beam_runner_api_pb2.PTransform(
          unique_name=pipeline._current_transform().full_label,
          spec=beam_runner_api_pb2.FunctionSpec(
              urn=self._urn, payload=self._payload))
      for tag, pcoll in self._inputs.items():
        transform_proto.inputs[tag] = context.pcollections.get_id(pcoll)
        # Conversion to/from proto assumes producers.
        # TODO: Possibly loosen this.
        context.transforms.put_proto(
            '%s_%s' % (self._IMPULSE_PREFIX, tag),
            beam_runner_api_pb2.PTransform(
                unique_name='%s_%s' % (self._IMPULSE_PREFIX, tag),
                spec=beam_runner_api_pb2.FunctionSpec(
                    urn=common_urns.primitives.IMPULSE.urn),
                outputs={'out': transform_proto.inputs[tag]}))
      components = context.to_runner_api()
      request = beam_expansion_api_pb2.ExpansionRequest(
          components=components,
          namespace=self._external_namespace,  # type: ignore  # mypy thinks 
self._namespace is threading.local
          transform=transform_proto)
    
      with self._service() as service:
        response = service.Expand(request)
        if response.error:
>         raise RuntimeError(response.error)
E         RuntimeError: java.lang.UnsupportedOperationException: Unknown urn: 
beam:transforms:xlang:test:partition
E               at 
org.apache.beam.sdk.expansion.service.ExpansionService.expand(ExpansionService.java:436)
E               at 
org.apache.beam.sdk.expansion.service.ExpansionService.expand(ExpansionService.java:494)
E               at 
org.apache.beam.model.expansion.v1.ExpansionServiceGrpc$MethodHandlers.invoke(ExpansionServiceGrpc.java:219)
E               at 
org.apache.beam.vendor.grpc.v1p36p0.io.grpc.stub.ServerCalls$UnaryServerCallHandler$UnaryServerCallListener.onHalfClose(ServerCalls.java:182)
E               at 
org.apache.beam.vendor.grpc.v1p36p0.io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.halfClosed(ServerCallImpl.java:331)
E               at 
org.apache.beam.vendor.grpc.v1p36p0.io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1HalfClosed.runInContext(ServerImpl.java:797)
E               at 
org.apache.beam.vendor.grpc.v1p36p0.io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
E               at 
org.apache.beam.vendor.grpc.v1p36p0.io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:123)
E               at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
E               at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
E               at java.lang.Thread.run(Thread.java:748)

apache_beam/transforms/external.py:297: RuntimeError
_____________________ ValidateRunnerXlangTest.test_prefix ______________________

self = 
<apache_beam.transforms.validate_runner_xlang_test.ValidateRunnerXlangTest 
testMethod=test_prefix>
test_pipeline = None

    @pytest.mark.xlang_transforms
    def test_prefix(self, test_pipeline=None):
      CrossLanguageTestPipelines().run_prefix(
>         test_pipeline or self.create_pipeline())

apache_beam/transforms/validate_runner_xlang_test.py:254: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/transforms/validate_runner_xlang_test.py:98: in run_prefix
    self.expansion_service))
apache_beam/pvalue.py:136: in __or__
    return self.pipeline.apply(ptransform, self)
apache_beam/pipeline.py:694: in apply
    pvalueish_result = self.runner.apply(transform, pvalueish, self._options)
apache_beam/runners/runner.py:185: in apply
    return m(transform, input, options)
apache_beam/runners/runner.py:215: in apply_PTransform
    return transform.expand(input)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ExternalTransform(PTransform) 
label=[ExternalTransform(beam:transforms:xlang:test:prefix)] at 0x7f2546099128>
pvalueish = <PCollection[Create/Map(decode).None] at 0x7f25457e1550>

    def expand(self, pvalueish):
      # type: (pvalue.PCollection) -> pvalue.PCollection
      if isinstance(pvalueish, pvalue.PBegin):
        self._inputs = {}
      elif isinstance(pvalueish, (list, tuple)):
        self._inputs = {str(ix): pvalue for ix, pvalue in enumerate(pvalueish)}
      elif isinstance(pvalueish, dict):
        self._inputs = pvalueish
      else:
        self._inputs = {'input': pvalueish}
      pipeline = (
          next(iter(self._inputs.values())).pipeline
          if self._inputs else pvalueish.pipeline)
      context = pipeline_context.PipelineContext(
          component_id_map=pipeline.component_id_map)
      transform_proto = beam_runner_api_pb2.PTransform(
          unique_name=pipeline._current_transform().full_label,
          spec=beam_runner_api_pb2.FunctionSpec(
              urn=self._urn, payload=self._payload))
      for tag, pcoll in self._inputs.items():
        transform_proto.inputs[tag] = context.pcollections.get_id(pcoll)
        # Conversion to/from proto assumes producers.
        # TODO: Possibly loosen this.
        context.transforms.put_proto(
            '%s_%s' % (self._IMPULSE_PREFIX, tag),
            beam_runner_api_pb2.PTransform(
                unique_name='%s_%s' % (self._IMPULSE_PREFIX, tag),
                spec=beam_runner_api_pb2.FunctionSpec(
                    urn=common_urns.primitives.IMPULSE.urn),
                outputs={'out': transform_proto.inputs[tag]}))
      components = context.to_runner_api()
      request = beam_expansion_api_pb2.ExpansionRequest(
          components=components,
          namespace=self._external_namespace,  # type: ignore  # mypy thinks 
self._namespace is threading.local
          transform=transform_proto)
    
      with self._service() as service:
        response = service.Expand(request)
        if response.error:
>         raise RuntimeError(response.error)
E         RuntimeError: java.lang.UnsupportedOperationException: Unknown urn: 
beam:transforms:xlang:test:prefix
E               at 
org.apache.beam.sdk.expansion.service.ExpansionService.expand(ExpansionService.java:436)
E               at 
org.apache.beam.sdk.expansion.service.ExpansionService.expand(ExpansionService.java:494)
E               at 
org.apache.beam.model.expansion.v1.ExpansionServiceGrpc$MethodHandlers.invoke(ExpansionServiceGrpc.java:219)
E               at 
org.apache.beam.vendor.grpc.v1p36p0.io.grpc.stub.ServerCalls$UnaryServerCallHandler$UnaryServerCallListener.onHalfClose(ServerCalls.java:182)
E               at 
org.apache.beam.vendor.grpc.v1p36p0.io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.halfClosed(ServerCallImpl.java:331)
E               at 
org.apache.beam.vendor.grpc.v1p36p0.io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1HalfClosed.runInContext(ServerImpl.java:797)
E               at 
org.apache.beam.vendor.grpc.v1p36p0.io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
E               at 
org.apache.beam.vendor.grpc.v1p36p0.io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:123)
E               at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
E               at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
E               at java.lang.Thread.run(Thread.java:748)

apache_beam/transforms/external.py:297: RuntimeError
=============================== warnings summary ===============================
apache_beam/io/filesystems_test.py:54
  
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/python/apache_beam/io/filesystems_test.py>:54:
 DeprecationWarning: invalid escape sequence \c
    self.assertIsNone(FileSystems.get_scheme('c:\\abc\cdf'))  # pylint: 
disable=anomalous-backslash-in-string

apache_beam/io/filesystems_test.py:62
  
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/python/apache_beam/io/filesystems_test.py>:62:
 DeprecationWarning: invalid escape sequence \d
    self.assertTrue(isinstance(FileSystems.get_filesystem('c:\\abc\def'),  # 
pylint: disable=anomalous-backslash-in-string

-- Docs: https://docs.pytest.org/en/latest/warnings.html
- generated xml file: 
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/python/pytest_xlangValidateRunner.xml>
 -
======= 7 failed, 1 passed, 4758 deselected, 2 warnings in 59.60 seconds =======

> Task :runners:spark:3:job-server:validatesCrossLanguageRunnerPythonUsingJava 
> FAILED

> Task :runners:spark:3:job-server:validatesCrossLanguageRunnerPythonUsingPython
>>> RUNNING integration tests with pipeline options: --runner=PortableRunner 
>>> --job_endpoint=localhost:39991 --environment_cache_millis=10000 
>>> --experiments=beam_fn_api
>>>   pytest options: 
>>>   collect markers: -m=xlang_transforms
============================= test session starts ==============================
platform linux -- Python 3.6.8, pytest-4.6.11, py-1.10.0, pluggy-0.13.1
rootdir: 
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/python,> 
inifile: pytest.ini
plugins: xdist-1.34.0, timeout-1.4.2, forked-1.3.0, requests-mock-1.9.3
timeout: 600.0s
timeout method: signal
timeout func_only: False
collected 4766 items / 4758 deselected / 8 selected

apache_beam/io/external/generate_sequence_test.py .                      [ 12%]
apache_beam/transforms/validate_runner_xlang_test.py .......             [100%]

- generated xml file: 
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/python/pytest_xlangValidateRunner.xml>
 -
================== 8 passed, 4758 deselected in 96.99 seconds ==================

> Task :runners:spark:3:job-server:validatesCrossLanguageRunnerPythonUsingSql
>>> RUNNING integration tests with pipeline options: --runner=PortableRunner 
>>> --job_endpoint=localhost:39991 --environment_cache_millis=10000 
>>> --experiments=beam_fn_api
>>>   pytest options: 
>>>   collect markers: -m=xlang_sql_expansion_service
============================= test session starts ==============================
platform linux -- Python 3.6.8, pytest-4.6.11, py-1.10.0, pluggy-0.13.1
rootdir: 
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/python,> 
inifile: pytest.ini
plugins: xdist-1.34.0, timeout-1.4.2, forked-1.3.0, requests-mock-1.9.3
timeout: 600.0s
timeout method: signal
timeout func_only: False
collected 4766 items / 4757 deselected / 9 selected

apache_beam/transforms/sql_test.py .........                             [100%]

- generated xml file: 
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/python/pytest_xlangSqlValidateRunner.xml>
 -
================= 9 passed, 4757 deselected in 183.74 seconds ==================

> Task :runners:spark:3:job-server:validatesCrossLanguageRunnerCleanup
Skipping invalid pid: 9430.
Stopping expansion service pid: 9438.

> Task :runners:spark:3:job-server:sparkJobServerCleanup
Stopping job server pid: 2984.

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task 
':runners:spark:3:job-server:validatesCrossLanguageRunnerJavaUsingJava'.
> There were failing tests. See the report at: 
> file://<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/runners/spark/3/job-server/build/reports/tests/validatesCrossLanguageRunnerJavaUsingJava/index.html>

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task 
':runners:spark:3:job-server:validatesCrossLanguageRunnerPythonUsingJava'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 18m 46s
181 actionable tasks: 130 executed, 47 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/jnu5woreb5al2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to