See 
<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/4631/display/redirect?page=changes>

Changes:

[noreply] [Playground] [Backend] Removing unused snippets manually and using the

[noreply] Implement PubsubSchemaTransformWriteConfiguration (#22262)

[noreply] Add support for FLOAT to Python RowCoder (#22626)

[noreply] Bump up python container versions (#22697)


------------------------------------------
[...truncated 44.67 KB...]

    if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
        PS1="${_OLD_VIRTUAL_PS1:-}"
        export PS1
        unset _OLD_VIRTUAL_PS1
    fi

    unset VIRTUAL_ENV
    if [ ! "${1:-}" = "nondestructive" ] ; then
    # Self destruct!
        unset -f deactivate
    fi
}

# unset irrelevant variables
deactivate nondestructive

VIRTUAL_ENV="<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/2022703441";>
export VIRTUAL_ENV

_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH

# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
    _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
    unset PYTHONHOME
fi

if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
    _OLD_VIRTUAL_PS1="${PS1:-}"
    if [ "x(2022703441) " != x ] ; then
        PS1="(2022703441) ${PS1:-}"
    else
    if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
        # special case for Aspen magic directories
        # see http://www.zetadev.com/software/aspen/
        PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
    else
        PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
    fi
    fi
    export PS1
fi

# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands.  Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
    hash -r
fi
pip install --retries 10 --upgrade pip setuptools wheel
Collecting pip
  Using cached pip-22.2.2-py3-none-any.whl (2.0 MB)
Collecting setuptools
  Using cached setuptools-64.0.3-py3-none-any.whl (1.2 MB)
Collecting wheel
  Using cached wheel-0.37.1-py2.py3-none-any.whl (35 kB)
Installing collected packages: pip, setuptools, wheel
  Attempting uninstall: pip
    Found existing installation: pip 20.1.1
    Uninstalling pip-20.1.1:
      Successfully uninstalled pip-20.1.1
  Attempting uninstall: setuptools
    Found existing installation: setuptools 47.1.0
    Uninstalling setuptools-47.1.0:
      Successfully uninstalled setuptools-47.1.0
Successfully installed pip-22.2.2 setuptools-64.0.3 wheel-0.37.1
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining 
file://<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
  Preparing metadata (setup.py): started
  Preparing metadata (setup.py): finished with status 'done'
Collecting crcmod<2.0,>=1.7
  Using cached crcmod-1.7-cp37-cp37m-linux_x86_64.whl
Collecting orjson<4.0
  Using cached orjson-3.7.11-cp37-cp37m-manylinux_2_28_x86_64.whl (148 kB)
Collecting dill<0.3.2,>=0.3.1.1
  Using cached dill-0.3.1.1-py3-none-any.whl
Collecting cloudpickle<3,>=2.1.0
  Using cached cloudpickle-2.1.0-py3-none-any.whl (25 kB)
Collecting fastavro<2,>=0.23.6
  Using cached 
fastavro-1.5.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.4 
MB)
Collecting grpcio!=1.48.0,<2,>=1.33.1
  Using cached 
grpcio-1.47.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.5 MB)
Collecting hdfs<3.0.0,>=2.1.0
  Using cached hdfs-2.7.0-py3-none-any.whl (34 kB)
Collecting httplib2<0.21.0,>=0.8
  Using cached httplib2-0.20.4-py3-none-any.whl (96 kB)
Collecting numpy<1.23.0,>=1.14.3
  Using cached 
numpy-1.21.6-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (15.7 MB)
Collecting pymongo<4.0.0,>=3.8.0
  Using cached 
pymongo-3.12.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (508 
kB)
Collecting protobuf<4,>=3.12.2
  Using cached 
protobuf-3.20.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (1.0 MB)
Collecting proto-plus<2,>=1.7.1
  Using cached proto_plus-1.22.0-py3-none-any.whl (47 kB)
Collecting pydot<2,>=1.2.0
  Using cached pydot-1.4.2-py2.py3-none-any.whl (21 kB)
Collecting python-dateutil<3,>=2.8.0
  Using cached python_dateutil-2.8.2-py2.py3-none-any.whl (247 kB)
Collecting pytz>=2018.3
  Downloading pytz-2022.2.1-py2.py3-none-any.whl (500 kB)
     ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 500.6/500.6 kB 11.7 MB/s eta 0:00:00
Collecting regex>=2020.6.8
  Using cached 
regex-2022.7.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (751 
kB)
Collecting requests<3.0.0,>=2.24.0
  Using cached requests-2.28.1-py3-none-any.whl (62 kB)
Collecting typing-extensions>=3.7.0
  Using cached typing_extensions-4.3.0-py3-none-any.whl (25 kB)
Collecting zstandard<1,>=0.18.0
  Using cached 
zstandard-0.18.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.5 
MB)
Collecting pyarrow<8.0.0,>=0.15.1
  Using cached 
pyarrow-7.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (26.7 
MB)
Collecting six>=1.5.2
  Using cached six-1.16.0-py2.py3-none-any.whl (11 kB)
Collecting docopt
  Using cached docopt-0.6.2-py2.py3-none-any.whl
Collecting pyparsing!=3.0.0,!=3.0.1,!=3.0.2,!=3.0.3,<4,>=2.4.2
  Using cached pyparsing-3.0.9-py3-none-any.whl (98 kB)
Collecting certifi>=2017.4.17
  Using cached certifi-2022.6.15-py3-none-any.whl (160 kB)
Collecting idna<4,>=2.5
  Using cached idna-3.3-py3-none-any.whl (61 kB)
Collecting urllib3<1.27,>=1.21.1
  Using cached urllib3-1.26.11-py2.py3-none-any.whl (139 kB)
Collecting charset-normalizer<3,>=2
  Using cached charset_normalizer-2.1.0-py3-none-any.whl (39 kB)
Installing collected packages: pytz, docopt, crcmod, zstandard, urllib3, 
typing-extensions, six, regex, pyparsing, pymongo, protobuf, orjson, numpy, 
idna, fastavro, dill, cloudpickle, charset-normalizer, certifi, requests, 
python-dateutil, pydot, pyarrow, proto-plus, httplib2, grpcio, hdfs, apache-beam
  Running setup.py develop for apache-beam
Successfully installed apache-beam-2.42.0.dev0 certifi-2022.6.15 
charset-normalizer-2.1.0 cloudpickle-2.1.0 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 
fastavro-1.5.4 grpcio-1.47.0 hdfs-2.7.0 httplib2-0.20.4 idna-3.3 numpy-1.21.6 
orjson-3.7.11 proto-plus-1.22.0 protobuf-3.20.1 pyarrow-7.0.0 pydot-1.4.2 
pymongo-3.12.3 pyparsing-3.0.9 python-dateutil-2.8.2 pytz-2022.2.1 
regex-2022.7.25 requests-2.28.1 six-1.16.0 typing-extensions-4.3.0 
urllib3-1.26.11 zstandard-0.18.0

PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map

# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1

pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
         | Create([0, 1, 2])
         | Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))

result = pipeline.run()
result.wait_until_finish()
"

if [[ "$RUNNER" = "FlinkRunner" ]]; then
  INPUT_JAR_ARG="flink_job_server_jar"
else
  INPUT_JAR_ARG="spark_job_server_jar"
fi

# Create the jar
OUTPUT_JAR="test-pipeline-${RUNNER}-$(date +%Y%m%d-%H%M%S).jar"
(python -c "$PIPELINE_PY" \
  --runner "$RUNNER" \
  --"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
  --output_executable_path $OUTPUT_JAR \
  --parallelism 1 \
  --sdk_worker_parallelism 1 \
  --environment_type DOCKER \
  --environment_options "docker_container_image=$PYTHON_CONTAINER_IMAGE" \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
ERROR:apache_beam.utils.subprocess_server:Starting job service with ['java', 
'-jar', 
'<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/2/job-server/build/libs/beam-runners-spark-job-server-2.42.0-SNAPSHOT.jar',>
 '--spark-master-url', 'local[4]', '--artifacts-dir', 
'/tmp/beam-tempeh86ykvg/artifacts6f1m9dd9', '--job-port', '45839', 
'--artifact-port', '0', '--expansion-port', '0']
ERROR:apache_beam.utils.subprocess_server:Error bringing up service
Traceback (most recent call last):
  File 
"<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/utils/subprocess_server.py";,>
 line 89, in start
    'Service failed to start up with error %s' % self._process.poll())
RuntimeError: Service failed to start up with error 0
Traceback (most recent call last):
  File "<string>", line 23, in <module>
  File 
"<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/pipeline.py";,>
 line 574, in run
    return self.runner.run_pipeline(self, self._options)
  File 
"<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/spark_runner.py";,>
 line 47, in run_pipeline
    return super().run_pipeline(pipeline, options)
  File 
"<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py";,>
 line 439, in run_pipeline
    job_service_handle = self.create_job_service(options)
  File 
"<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py";,>
 line 318, in create_job_service
    return self.create_job_service_handle(server.start(), options)
  File 
"<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/job_server.py";,>
 line 81, in start
    self._endpoint = self._job_server.start()
  File 
"<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/job_server.py";,>
 line 110, in start
    return self._server.start()
  File 
"<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/utils/subprocess_server.py";,>
 line 89, in start
    'Service failed to start up with error %s' % self._process.poll())
RuntimeError: Service failed to start up with error 0

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  # Execute the jar
  java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi

rm -rf $ENV_DIR
rm -f $OUTPUT_JAR

if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
  echo ">>> SUCCESS"
else
  echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
>>> FAILURE

> Task :sdks:python:test-suites:portable:py37:testJavaJarCreatorSparkRunner 
> FAILED

FAILURE: Build failed with an exception.

* Where:
Script 
'<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/test-suites/portable/common.gradle'>
 line: 364

* What went wrong:
Execution failed for task 
':sdks:python:test-suites:portable:py37:testJavaJarCreatorSparkRunner'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 8.0.

You can use '--warning-mode all' to show the individual deprecation warnings 
and determine if they come from your own scripts or plugins.

See 
https://docs.gradle.org/7.4/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 33s
89 actionable tasks: 59 executed, 26 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/4v2yep6zoa7xk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to