See 
<https://ci-beam.apache.org/job/beam_PostCommit_Py_ValCont/10986/display/redirect>

Changes:


------------------------------------------
[...truncated 866.38 KB...]
#13 1.605   Downloading prettytable-3.8.0-py3-none-any.whl (27 kB)
#13 1.666 Collecting wcwidth
#13 1.681   Downloading wcwidth-0.2.6-py2.py3-none-any.whl (29 kB)
#13 2.946 Installing collected packages: wcwidth, prettytable, pip-licenses
#13 3.119 Successfully installed pip-licenses-4.3.2 prettytable-3.8.0 
wcwidth-0.2.6
#13 3.120 WARNING: Running pip as the 'root' user can result in broken 
permissions and conflicting behaviour with the system package manager. It is 
recommended to use a virtual environment instead: 
https://pip.pypa.io/warnings/venv
#13 3.286 
#13 3.286 [notice] A new release of pip is available: 23.0.1 -> 23.2.1
#13 3.286 [notice] To update, run: pip install --upgrade pip
#13 13.59 INFO:root:Successfully pulled licenses for 137 dependencies
#13 13.59 Skip pulling license for  bs4
#13 DONE 40.3s

#14 [stage-2 1/2] COPY --from=third_party_licenses 
/opt/apache/beam/third_party_licenses /opt/apache/beam/third_party_licenses
#14 DONE 3.1s

#15 [stage-2 2/2] RUN if [ "true" != "true" ] ; then       rm -rf 
/opt/apache/beam/third_party_licenses ;     fi
#15 DONE 2.0s

#16 exporting to image
#16 exporting layers
#16 exporting layers 34.6s done
#16 writing image 
sha256:1259de1bfe5798d1375367d4b2ae64c50f10a28c24ea5a69d4f9cde698b43362
#16 writing image 
sha256:1259de1bfe5798d1375367d4b2ae64c50f10a28c24ea5a69d4f9cde698b43362 done
#16 naming to docker.io/apache/beam_python3.8_sdk:2.50.0.dev done
#16 DONE 35.0s

> Task :sdks:python:test-suites:dataflow:py38:validatesContainer

# Where to store integration test outputs.
GCS_LOCATION=${GCS_LOCATION:-gs://temp-storage-for-end-to-end-tests}

# Project for the container and integration test
PROJECT=${PROJECT:-apache-beam-testing}
REGION=${REGION:-us-central1}
IMAGE_PREFIX="$(grep 'docker_image_default_repo_prefix' gradle.properties | cut 
-d'=' -f2)"
SDK_VERSION="$(grep 'sdk_version' gradle.properties | cut -d'=' -f2)"
PY_VERSION=$1
ARCH=${3:-"x86"}
IMAGE_NAME="${IMAGE_PREFIX}python${PY_VERSION}_sdk"
CONTAINER_PROJECT="sdks:python:container:py${PY_VERSION//.}"  # Note: we 
substitute away the dot in the version.
PY_INTERPRETER="python${PY_VERSION}"
TEST_SUITE_TAG="it_validatescontainer"

XUNIT_FILE="pytest-$IMAGE_NAME.xml"

# Verify in the root of the repository
test -d sdks/python/container

# Verify docker and gcloud commands exist
command -v docker
command -v gcloud
docker -v
gcloud -v

TAG=$(date +%Y%m%d-%H%M%S%N)
CONTAINER=us.gcr.io/$PROJECT/$USER/$IMAGE_NAME
PREBUILD_SDK_CONTAINER_REGISTRY_PATH=us.gcr.io/$PROJECT/$USER/prebuild_python${PY_VERSION//.}_sdk
echo "Using container $CONTAINER"

if [[ "$ARCH" == "x86" ]]; then
  # Verify docker image has been built.
  docker images | grep "apache/$IMAGE_NAME" | grep "$SDK_VERSION"

  # Tag the docker container.
  docker tag "apache/$IMAGE_NAME:$SDK_VERSION" "$CONTAINER:$TAG"

  # Push the container
  gcloud docker -- push $CONTAINER:$TAG
elif [[ "$ARCH" == "ARM" ]]; then
  # Note: ARM test suites only run on github actions, where multi-arch Python 
SDK containers are already pushed during build.
  # Reset the test suite tag to run ARM pipelines.
  TEST_SUITE_TAG="it_dataflow_arm"

  # Reset the multi-arch Python SDK container image tag.
  TAG=$MULTIARCH_TAG
else
  printf "Please give a valid CPU architecture, either x86 or ARM."
  exit 1
fi
WARNING: `gcloud docker` will not be supported for Docker client versions above 
18.03.

As an alternative, use `gcloud auth configure-docker` to configure `docker` to
use `gcloud` as a credential helper, then use `docker` as you would for non-GCR
registries, e.g. `docker pull gcr.io/project-id/my-image`. Add
`--verbosity=error` to silence this warning: `gcloud docker
--verbosity=error -- pull gcr.io/project-id/my-image`.

See: 
https://cloud.google.com/container-registry/docs/support/deprecation-notices#gcloud-docker


function cleanup_container {
  # Delete the container locally and remotely
  docker rmi $CONTAINER:$TAG || echo "Built container image was not removed. 
Possibly, it was not not saved locally."
  for image in $(docker images --format '{{.Repository}}:{{.Tag}}' | grep 
$PREBUILD_SDK_CONTAINER_REGISTRY_PATH)
    do docker rmi $image || echo "Failed to remove prebuilt sdk container image"
  done
  gcloud --quiet container images delete $CONTAINER:$TAG || echo "Failed to 
delete container"
  for digest in $(gcloud container images list-tags 
$PREBUILD_SDK_CONTAINER_REGISTRY_PATH/beam_python_prebuilt_sdk  
--format="get(digest)")
    do gcloud container images delete 
$PREBUILD_SDK_CONTAINER_REGISTRY_PATH/beam_python_prebuilt_sdk@$digest 
--force-delete-tags --quiet || echo "Failed to remove prebuilt sdk container 
image"
  done

  echo "Removed the container"
}
trap cleanup_container EXIT

echo ">>> Successfully built and push container $CONTAINER"

cd sdks/python
SDK_LOCATION=$2

echo ">>> RUNNING DATAFLOW RUNNER VALIDATESCONTAINER TEST"
pytest -o junit_suite_name=$IMAGE_NAME \
  -m=$TEST_SUITE_TAG \
  --show-capture=no \
  --numprocesses=1 \
  --timeout=1800 \
  --junitxml=$XUNIT_FILE \
  --ignore-glob '.*py3\d?\.py$' \
  --log-cli-level=INFO \
  --test-pipeline-options=" \
    --runner=TestDataflowRunner \
    --project=$PROJECT \
    --region=$REGION \
    --sdk_container_image=$CONTAINER:$TAG \
    --staging_location=$GCS_LOCATION/staging-validatesrunner-test \
    --temp_location=$GCS_LOCATION/temp-validatesrunner-test \
    --output=$GCS_LOCATION/output \
    --sdk_location=$SDK_LOCATION \
    --num_workers=1 \
    --docker_registry_push_url=$PREBUILD_SDK_CONTAINER_REGISTRY_PATH"

echo ">>> SUCCESS DATAFLOW RUNNER VALIDATESCONTAINER TEST"
cleanup_container
Error response from daemon: invalid reference format
WARNING: Successfully resolved tag to sha256, but it is recommended to use 
sha256 directly.
Digests:
- 
us.gcr.io/apache-beam-testing/jenkins/beam_python3.8_sdk@sha256:b49a8464eed94bda4a70133c23aa66792071e2346cad72033796c4cd775a133e
  Associated tags:
 - 20230803-170240701737194
Tags:
- 
us.gcr.io/apache-beam-testing/jenkins/beam_python3.8_sdk:20230803-170240701737194
Deleted 
[us.gcr.io/apache-beam-testing/jenkins/beam_python3.8_sdk:20230803-170240701737194].
Deleted 
[us.gcr.io/apache-beam-testing/jenkins/beam_python3.8_sdk@sha256:b49a8464eed94bda4a70133c23aa66792071e2346cad72033796c4cd775a133e].
Digests:
- 
us.gcr.io/apache-beam-testing/jenkins/prebuild_python38_sdk/beam_python_prebuilt_sdk@sha256:b99b141e3a254391750eb1a8fcc76821dd2a1f1762684d224b2df682f1666317
  Associated tags:
 - 31b73dde-d904-444c-bde0-1f165590c746
Deleted 
[us.gcr.io/apache-beam-testing/jenkins/prebuild_python38_sdk/beam_python_prebuilt_sdk:31b73dde-d904-444c-bde0-1f165590c746].
Deleted 
[us.gcr.io/apache-beam-testing/jenkins/prebuild_python38_sdk/beam_python_prebuilt_sdk@sha256:b99b141e3a254391750eb1a8fcc76821dd2a1f1762684d224b2df682f1666317].
Digests:
- 
us.gcr.io/apache-beam-testing/jenkins/prebuild_python38_sdk/beam_python_prebuilt_sdk@sha256:ed621f2655ef71dfe4943720b7ae167ee387d260f1cbadee579cba8ea717d626
  Associated tags:
 - 2fe02dea-781a-4ce6-9b94-43244602148a
Deleted 
[us.gcr.io/apache-beam-testing/jenkins/prebuild_python38_sdk/beam_python_prebuilt_sdk:2fe02dea-781a-4ce6-9b94-43244602148a].
Deleted 
[us.gcr.io/apache-beam-testing/jenkins/prebuild_python38_sdk/beam_python_prebuilt_sdk@sha256:ed621f2655ef71dfe4943720b7ae167ee387d260f1cbadee579cba8ea717d626].

FAILURE: Build completed with 3 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:container:py310:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:container:py311:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
==============================================================================

3: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:container:py39:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 8.0.

You can use '--warning-mode all' to show the individual deprecation warnings 
and determine if they come from your own scripts or plugins.

See 
https://docs.gradle.org/7.6.2/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 54m 38s
59 actionable tasks: 48 executed, 4 from cache, 7 up-to-date

A build scan cannot be produced as an error occurred gathering build data.
Please report this problem via https://gradle.com/help/plugin and include the 
following via copy/paste:

----------
Gradle version: 7.6.2
Plugin version: 3.13.2

com.gradle.scan.plugin.internal.m.a: Build operation dispatch of progress 
notification [ERROR] [system.err] <Normal>#9 337.8 future by running the 
following command:
</Normal> failed.
Operation context: 
        org.gradle.launcher.exec.RunAsBuildOperationBuildActionExecutor$1 (1): 
{}
        org.gradle.execution.RunRootBuildWorkBuildOperationType$Details 
(10887): {getBuildStartTime=1691080839483}
        
org.gradle.api.internal.tasks.execution.ExecuteTaskBuildOperationDetails 
(11201): {buildPath=:, taskPath=:sdks:python:container:py39:docker, 
taskClass=org.gradle.api.tasks.Exec, taskId=2248}
        org.gradle.internal.execution.steps.ExecuteStep$Operation$Details$1 
(11203): {}
        
org.gradle.api.internal.tasks.execution.ExecuteTaskActionBuildOperationType$1 
(11204): {}
Caused by: java.lang.RuntimeException: Could not serialize event class 
'com.gradle.scan.agent.serialization.scan.a.b'.
        at com.gradle.scan.plugin.internal.h.c.c.a(SourceFile:112)
        at com.gradle.scan.plugin.internal.h.a.a(SourceFile:42)
        at com.gradle.scan.plugin.internal.h.a.b(SourceFile:34)
        at com.gradle.scan.plugin.internal.h.b.b(SourceFile:34)
        at com.gradle.scan.plugin.internal.h.d.c(SourceFile:39)
        at com.gradle.scan.plugin.internal.c.t.b.a(SourceFile:16)
        at com.gradle.scan.plugin.internal.c.t.d.a(SourceFile:32)
        at com.gradle.scan.plugin.internal.m.b$b.a(SourceFile:108)
        at com.gradle.scan.plugin.internal.m.b$b.progress(SourceFile:98)
        at com.gradle.scan.plugin.internal.m.b.a(SourceFile:60)
        at com.gradle.scan.plugin.internal.m.n.a(SourceFile:42)
        at com.gradle.scan.plugin.internal.m.d.a(SourceFile:93)
        at com.gradle.scan.plugin.internal.m.h.a(SourceFile:45)
        at com.gradle.scan.plugin.internal.r.a$a.a(SourceFile:31)
        at com.gradle.scan.plugin.internal.r.a$a.a(SourceFile:20)
        at com.gradle.scan.plugin.internal.r.a.c(SourceFile:67)
Caused by: java.lang.RuntimeException: java.io.IOException: No space left on 
device
        at 
com.gradle.scan.agent.serialization.scan.serializer.d.flush(SourceFile:245)
        at 
com.gradle.scan.agent.serialization.scan.serializer.c.a(SourceFile:116)
        at 
com.gradle.scan.agent.serialization.scan.serializer.c.a(SourceFile:106)
        at 
com.gradle.scan.agent.serialization.scan.serializer.c.a(SourceFile:75)
        at com.gradle.scan.plugin.internal.h.c.c.a(SourceFile:110)
        ... 15 more
Caused by: java.io.IOException: No space left on device
        at 
com.gradle.scan.agent.serialization.scan.serializer.d.flush(SourceFile:243)
        ... 19 more
----------

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to