kou commented on a change in pull request #9210:
URL: https://github.com/apache/arrow/pull/9210#discussion_r569110478



##########
File path: docker-compose.yml
##########
@@ -1308,3 +1308,34 @@ services:
         /arrow/ci/scripts/python_build.sh /arrow /build &&
         /arrow/ci/scripts/java_build.sh /arrow /build &&
         /arrow/ci/scripts/integration_spark.sh /arrow /spark"]
+
+  conda-python-spark-pyarrow-only:
+    # Usage:
+    #   docker-compose build conda-cpp
+    #   docker-compose build conda-python
+    #   docker-compose build conda-python-spark-pyarrow-only
+    #   docker-compose run conda-python-spark-pyarrow-only
+    image: ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}-pyarrow-only
+    build:
+      context: .
+      dockerfile: ci/docker/conda-python-spark.dockerfile
+      cache_from:
+        - ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}-pyarrow-only
+      args:
+        repo: ${REPO}
+        arch: ${ARCH}
+        python: ${PYTHON}
+        jdk: ${JDK}
+        # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
+        # be set to ${MAVEN}
+        maven: 3.5
+        spark: ${SPARK}
+    shm_size: *shm-size
+    environment:
+      <<: *ccache
+    volumes: *conda-maven-volumes
+    command:
+      ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
+        /arrow/ci/scripts/python_build.sh /arrow /build &&
+        /arrow/ci/scripts/java_build.sh /arrow /build &&
+        /arrow/ci/scripts/integration_spark.sh /arrow /spark true"]

Review comment:
       How about the following?
   
   ```diff
   diff --git a/dev/tasks/tasks.yml b/dev/tasks/tasks.yml
   index 63f38ab8f..af112c036 100644
   --- a/dev/tasks/tasks.yml
   +++ b/dev/tasks/tasks.yml
   @@ -1945,8 +1945,9 @@ tasks:
          env:
            PYTHON: 3.7
            SPARK: "branch-3.0"
   +        TEST_PYARROW_ONLY: "true"
          # use the branch-3.0 of spark, so prevent reusing any layers
   -      run: --no-leaf-cache conda-python-spark-pyarrow-only
   +      run: --no-leaf-cache conda-python-spark
    
      test-conda-python-3.8-spark-master:
        ci: github
   diff --git a/docker-compose.yml b/docker-compose.yml
   index aa688de01..ca8989796 100644
   --- a/docker-compose.yml
   +++ b/docker-compose.yml
   @@ -91,7 +91,6 @@ x-hierarchy:
          - conda-python-turbodbc
          - conda-python-kartothek
          - conda-python-spark
   -      - conda-python-spark-pyarrow-only
      - debian-cpp:
        - debian-c-glib:
          - debian-ruby
   @@ -1340,35 +1339,4 @@ services:
          ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
            /arrow/ci/scripts/python_build.sh /arrow /build &&
            /arrow/ci/scripts/java_build.sh /arrow /build &&
   -        /arrow/ci/scripts/integration_spark.sh /arrow /spark"]
   -
   -  conda-python-spark-pyarrow-only:
   -    # Usage:
   -    #   docker-compose build conda-cpp
   -    #   docker-compose build conda-python
   -    #   docker-compose build conda-python-spark-pyarrow-only
   -    #   docker-compose run conda-python-spark-pyarrow-only
   -    image: 
${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}-pyarrow-only
   -    build:
   -      context: .
   -      dockerfile: ci/docker/conda-python-spark.dockerfile
   -      cache_from:
   -        - ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}-pyarrow-only
   -      args:
   -        repo: ${REPO}
   -        arch: ${ARCH}
   -        python: ${PYTHON}
   -        jdk: ${JDK}
   -        # conda-forge doesn't have 3.5.4 so pinning explicitly, but this 
should
   -        # be set to ${MAVEN}
   -        maven: 3.5
   -        spark: ${SPARK}
   -    shm_size: *shm-size
   -    environment:
   -      <<: *ccache
   -    volumes: *conda-maven-volumes
   -    command:
   -      ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
   -        /arrow/ci/scripts/python_build.sh /arrow /build &&
   -        /arrow/ci/scripts/java_build.sh /arrow /build &&
   -        /arrow/ci/scripts/integration_spark.sh /arrow /spark true"]
   +        /arrow/ci/scripts/integration_spark.sh /arrow /spark 
${TEST_PYARROW_ONLY:-false}"]
   ```




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to