See
<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/1700/display/redirect>
Changes:
------------------------------------------
[...truncated 68.55 KB...]
hash -r 2>/dev/null
fi
if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
PS1="$_OLD_VIRTUAL_PS1"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
if [ ! "${1-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV="<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/build/gradleenv/-2047437407">
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH
# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1-}"
if [ "x" != x ] ; then
PS1="${PS1-}"
else
PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
fi
export PS1
fi
basename "$VIRTUAL_ENV"
# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true
pydoc () {
python -m pydoc "$@"
}
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
hash -r 2>/dev/null
fi
pip install --retries 10 -e $PYTHON_ROOT_DIR
Obtaining
file://<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python>
Collecting crcmod<2.0,>=1.7
Using cached crcmod-1.7.tar.gz (89 kB)
Collecting dill<0.3.2,>=0.3.1.1
Using cached dill-0.3.1.1.tar.gz (151 kB)
Collecting fastavro<0.24,>=0.21.4
Downloading fastavro-0.23.6-cp37-cp37m-manylinux2010_x86_64.whl (1.4 MB)
Collecting future<1.0.0,>=0.18.2
Using cached future-0.18.2.tar.gz (829 kB)
Collecting grpcio<2,>=1.29.0
Downloading grpcio-1.30.0-cp37-cp37m-manylinux2010_x86_64.whl (3.0 MB)
Collecting hdfs<3.0.0,>=2.1.0
Using cached hdfs-2.5.8.tar.gz (41 kB)
Collecting httplib2<0.18.0,>=0.8
Using cached httplib2-0.17.4-py3-none-any.whl (95 kB)
Collecting mock<3.0.0,>=1.0.1
Using cached mock-2.0.0-py2.py3-none-any.whl (56 kB)
Collecting numpy<2,>=1.14.3
Downloading numpy-1.19.1-cp37-cp37m-manylinux2010_x86_64.whl (14.5 MB)
Collecting pymongo<4.0.0,>=3.8.0
Downloading pymongo-3.10.1-cp37-cp37m-manylinux2014_x86_64.whl (462 kB)
Collecting oauth2client<4,>=2.0.1
Using cached oauth2client-3.0.0.tar.gz (77 kB)
Collecting protobuf<4,>=3.12.2
Downloading protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl (1.3 MB)
Collecting pydot<2,>=1.2.0
Using cached pydot-1.4.1-py2.py3-none-any.whl (19 kB)
Collecting python-dateutil<3,>=2.8.0
Using cached python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
Collecting pytz>=2018.3
Using cached pytz-2020.1-py2.py3-none-any.whl (510 kB)
Collecting requests<3.0.0,>=2.24.0
Using cached requests-2.24.0-py2.py3-none-any.whl (61 kB)
Collecting typing-extensions<3.8.0,>=3.7.0
Using cached typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Collecting avro-python3!=1.9.2,<1.10.0,>=1.8.1
Using cached avro-python3-1.9.2.1.tar.gz (37 kB)
Collecting pyarrow<0.18.0,>=0.15.1
Downloading pyarrow-0.17.1-cp37-cp37m-manylinux2014_x86_64.whl (63.8 MB)
Collecting six>=1.5.2
Using cached six-1.15.0-py2.py3-none-any.whl (10 kB)
Processing
/home/jenkins/.cache/pip/wheels/9b/04/dd/7daf4150b6d9b12949298737de9431a324d4b797ffd63f526e/docopt-0.6.2-py2.py3-none-any.whl
Collecting pbr>=0.11
Using cached pbr-5.4.5-py2.py3-none-any.whl (110 kB)
Collecting pyasn1>=0.1.7
Using cached pyasn1-0.4.8-py2.py3-none-any.whl (77 kB)
Collecting pyasn1-modules>=0.0.5
Using cached pyasn1_modules-0.2.8-py2.py3-none-any.whl (155 kB)
Collecting rsa>=3.1.4
Using cached rsa-4.6-py3-none-any.whl (47 kB)
Requirement already satisfied: setuptools in
./build/gradleenv/-2047437407/lib/python3.7/site-packages (from
protobuf<4,>=3.12.2->apache-beam==2.24.0.dev0) (49.2.0)
Collecting pyparsing>=2.1.4
Using cached pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
Collecting chardet<4,>=3.0.2
Using cached chardet-3.0.4-py2.py3-none-any.whl (133 kB)
Collecting certifi>=2017.4.17
Using cached certifi-2020.6.20-py2.py3-none-any.whl (156 kB)
Collecting urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1
Using cached urllib3-1.25.10-py2.py3-none-any.whl (127 kB)
Collecting idna<3,>=2.5
Using cached idna-2.10-py2.py3-none-any.whl (58 kB)
Building wheels for collected packages: crcmod, dill, future, hdfs,
oauth2client, avro-python3
Building wheel for crcmod (setup.py): started
Building wheel for crcmod (setup.py): finished with status 'done'
Created wheel for crcmod: filename=crcmod-1.7-cp37-cp37m-linux_x86_64.whl
size=30513
sha256=d5601861333522a1620dd631b1f159c585bffe4ff6418613d0db179ba2bf71de
Stored in directory:
/home/jenkins/.cache/pip/wheels/dc/9a/e9/49e627353476cec8484343c4ab656f1e0d783ee77b9dde2d1f
Building wheel for dill (setup.py): started
Building wheel for dill (setup.py): finished with status 'done'
Created wheel for dill: filename=dill-0.3.1.1-py3-none-any.whl size=78530
sha256=d27f11ba3a3abc05b95cefd087d8dee20da088c880ab248298039f493d2ce1c0
Stored in directory:
/home/jenkins/.cache/pip/wheels/a4/61/fd/c57e374e580aa78a45ed78d5859b3a44436af17e22ca53284f
Building wheel for future (setup.py): started
Building wheel for future (setup.py): finished with status 'done'
Created wheel for future: filename=future-0.18.2-py3-none-any.whl size=491058
sha256=c50e3f2e5f83fd07ec64db221b240db9c26ec515922655d8fad2ee0906417df9
Stored in directory:
/home/jenkins/.cache/pip/wheels/56/b0/fe/4410d17b32f1f0c3cf54cdfb2bc04d7b4b8f4ae377e2229ba0
Building wheel for hdfs (setup.py): started
Building wheel for hdfs (setup.py): finished with status 'done'
Created wheel for hdfs: filename=hdfs-2.5.8-py3-none-any.whl size=33213
sha256=25fd32ee101effad357a7658cfab6e5d365538a55200ede761bd07799e964e24
Stored in directory:
/home/jenkins/.cache/pip/wheels/0a/7d/38/ea4eaf831518e6cd867b515b88919a9785eb66f11def5ab859
Building wheel for oauth2client (setup.py): started
Building wheel for oauth2client (setup.py): finished with status 'done'
Created wheel for oauth2client: filename=oauth2client-3.0.0-py3-none-any.whl
size=106383
sha256=242d0dfb46a7537bb585a0135a56d26b075473edcaec62d5a56dad594acea9f2
Stored in directory:
/home/jenkins/.cache/pip/wheels/86/73/7a/3b3f76a2142176605ff38fbca574327962c71e25a43197a4c1
Building wheel for avro-python3 (setup.py): started
Building wheel for avro-python3 (setup.py): finished with status 'done'
Created wheel for avro-python3:
filename=avro_python3-1.9.2.1-py3-none-any.whl size=43513
sha256=1904cf1ea17bb982f53eec270a3294effb6e58c440d0fd1e77ba5048c5106369
Stored in directory:
/home/jenkins/.cache/pip/wheels/bc/49/5f/fdb5b9d85055c478213e0158ac122b596816149a02d82e0ab1
Successfully built crcmod dill future hdfs oauth2client avro-python3
Installing collected packages: crcmod, dill, pytz, fastavro, future, six,
grpcio, docopt, chardet, certifi, urllib3, idna, requests, hdfs, httplib2, pbr,
mock, numpy, pymongo, pyasn1, pyasn1-modules, rsa, oauth2client, protobuf,
pyparsing, pydot, python-dateutil, typing-extensions, avro-python3, pyarrow,
apache-beam
Running setup.py develop for apache-beam
Successfully installed apache-beam avro-python3-1.9.2.1 certifi-2020.6.20
chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 fastavro-0.23.6
future-0.18.2 grpcio-1.30.0 hdfs-2.5.8 httplib2-0.17.4 idna-2.10 mock-2.0.0
numpy-1.19.1 oauth2client-3.0.0 pbr-5.4.5 protobuf-3.12.2 pyarrow-0.17.1
pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pymongo-3.10.1 pyparsing-2.4.7
python-dateutil-2.8.1 pytz-2020.1 requests-2.24.0 rsa-4.6 six-1.15.0
typing-extensions-3.7.4.2 urllib3-1.25.10
PIPELINE_PY="
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from apache_beam.transforms import Map
# To test that our main session is getting plumbed through artifact staging
# correctly, create a global variable. If the main session is not plumbed
# through properly, global_var will be undefined and the pipeline will fail.
global_var = 1
pipeline_options = PipelineOptions()
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline = beam.Pipeline(options=pipeline_options)
pcoll = (pipeline
| Create([0, 1, 2])
| Map(lambda x: x + global_var))
assert_that(pcoll, equal_to([1, 2, 3]))
result = pipeline.run()
result.wait_until_finish()
"
if [[ "$RUNNER" = "FlinkRunner" ]]; then
INPUT_JAR_ARG="flink_job_server_jar"
else
INPUT_JAR_ARG="spark_job_server_jar"
fi
# Create the jar
OUTPUT_JAR=flink-test-$(date +%Y%m%d-%H%M%S).jar
date +%Y%m%d-%H%M%S
(python -c "$PIPELINE_PY" \
--runner "$RUNNER" \
--"$INPUT_JAR_ARG" "$JOB_SERVER_JAR" \
--output_executable_path $OUTPUT_JAR \
--parallelism 1 \
--sdk_worker_parallelism 1 \
--environment_type DOCKER \
--environment_config=$PYTHON_CONTAINER_IMAGE \
) || TEST_EXIT_CODE=$? # don't fail fast here; clean up before exiting
WARNING:root:Make sure that locally built Python SDK docker image has Python
3.7 interpreter.
WARNING:root:Waiting for grpc channel to be ready at localhost:42753.
WARNING:root:Waiting for grpc channel to be ready at localhost:42753.
WARNING:root:Waiting for grpc channel to be ready at localhost:42753.
WARNING:root:Waiting for grpc channel to be ready at localhost:42753.
WARNING:apache_beam.options.pipeline_options:Discarding unparseable args:
['--parallelism', '1']
ERROR:root:java.lang.IllegalArgumentException: Expected exactly one jar on
sun.misc.Launcher$AppClassLoader@1b6d3586
Traceback (most recent call last):
File "<string>", line 24, in <module>
File
"<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",>
line 547, in wait_until_finish
raise self._runtime_exception
RuntimeError: Pipeline
BeamApp-jenkins-0729182100-264e4495_ef6f9ca8-3e00-4e6b-a523-45a86cca59dd failed
in state FAILED: java.lang.IllegalArgumentException: Expected exactly one jar
on sun.misc.Launcher$AppClassLoader@1b6d3586
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
# Execute the jar
java -jar $OUTPUT_JAR || TEST_EXIT_CODE=$?
fi
rm -rf $ENV_DIR
rm -f $OUTPUT_JAR
>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
echo ">>> SUCCESS"
else
echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
> Task :runners:spark:job-server:testJavaJarCreatorPy37 FAILED
FAILURE: Build failed with an exception.
* Where:
Build file
'<https://ci-beam.apache.org/job/beam_PostCommit_PortableJar_Spark/ws/src/runners/spark/job-server/build.gradle'>
line: 166
* What went wrong:
Execution failed for task ':runners:spark:job-server:testJavaJarCreatorPy37'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 20m 44s
69 actionable tasks: 54 executed, 14 from cache, 1 up-to-date
Publishing build scan...
https://gradle.com/s/aszwxssyu2tko
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]