This is an automated email from the ASF dual-hosted git repository.

damccorm pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new ad73e89987e Fix PostCommit XVR GoUsingJava Dataflow job (#36757)
ad73e89987e is described below

commit ad73e89987eb859d492a2f5057d2d741816f3da1
Author: Abdelrahman Ibrahim <[email protected]>
AuthorDate: Mon Nov 10 20:09:04 2025 +0200

    Fix PostCommit XVR GoUsingJava Dataflow job (#36757)
    
    * Fix buildx image loading for Python container
    
    * Added missing auth
    
    * Fix gcloud config dir issue
    
    * Fixed image push
    
    * use docker push command
    
    * Refactoring
    
    * eliminate if conditions duplication
---
 .../beam_PostCommit_XVR_GoUsingJava_Dataflow.yml   |   4 -
 runners/google-cloud-dataflow-java/build.gradle    | 117 ++++++++++++---------
 2 files changed, 70 insertions(+), 51 deletions(-)

diff --git a/.github/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml 
b/.github/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml
index c22bec84760..cc52476ca30 100644
--- a/.github/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml
+++ b/.github/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml
@@ -77,13 +77,9 @@ jobs:
           python-version: default
       - name: Set up Docker Buildx
         uses: docker/setup-buildx-action@v3
-      - name: GCloud Docker credential helper
-        run: |
-          gcloud auth configure-docker us.gcr.io
       - name: run XVR GoUsingJava Dataflow script
         env:
           USER: github-actions
-          CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}}
         uses: ./.github/actions/gradle-command-self-hosted-action
         with:
           gradle-command: 
:runners:google-cloud-dataflow-java:validatesCrossLanguageRunnerGoUsingJava
diff --git a/runners/google-cloud-dataflow-java/build.gradle 
b/runners/google-cloud-dataflow-java/build.gradle
index 38b996354a2..463049cad44 100644
--- a/runners/google-cloud-dataflow-java/build.gradle
+++ b/runners/google-cloud-dataflow-java/build.gradle
@@ -212,17 +212,17 @@ def commonLegacyExcludeCategories = [
 ]
 
 def commonRunnerV2ExcludeCategories = [
-        'org.apache.beam.sdk.testing.UsesExternalService',
-        'org.apache.beam.sdk.testing.UsesGaugeMetrics',
-        'org.apache.beam.sdk.testing.UsesSetState',
-        'org.apache.beam.sdk.testing.UsesMapState',
-        'org.apache.beam.sdk.testing.UsesMultimapState',
-        'org.apache.beam.sdk.testing.UsesMetricsPusher',
-        'org.apache.beam.sdk.testing.UsesOrderedListState',
-        'org.apache.beam.sdk.testing.UsesTestStream',
-        'org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime',
-        'org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput',
-        'org.apache.beam.sdk.testing.UsesBoundedTrieMetrics', // Dataflow QM 
as of now does not support returning back BoundedTrie in metric result.
+  'org.apache.beam.sdk.testing.UsesExternalService',
+  'org.apache.beam.sdk.testing.UsesGaugeMetrics',
+  'org.apache.beam.sdk.testing.UsesSetState',
+  'org.apache.beam.sdk.testing.UsesMapState',
+  'org.apache.beam.sdk.testing.UsesMultimapState',
+  'org.apache.beam.sdk.testing.UsesMetricsPusher',
+  'org.apache.beam.sdk.testing.UsesOrderedListState',
+  'org.apache.beam.sdk.testing.UsesTestStream',
+  'org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime',
+  'org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput',
+  'org.apache.beam.sdk.testing.UsesBoundedTrieMetrics', // Dataflow QM as of 
now does not support returning back BoundedTrie in metric result.
 ]
 
 def createLegacyWorkerValidatesRunnerTest = { Map args ->
@@ -241,7 +241,7 @@ def createLegacyWorkerValidatesRunnerTest = { Map args ->
     maxParallelForks Integer.MAX_VALUE
     classpath = configurations.validatesRunner
     testClassesDirs = 
files(project(":sdks:java:core").sourceSets.test.output.classesDirs) +
-            files(project(project.path).sourceSets.test.output.classesDirs)
+      files(project(project.path).sourceSets.test.output.classesDirs)
     useJUnit {
       includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
       commonLegacyExcludeCategories.each {
@@ -274,7 +274,7 @@ def createRunnerV2ValidatesRunnerTest = { Map args ->
     maxParallelForks Integer.MAX_VALUE
     classpath = configurations.validatesRunner
     testClassesDirs = 
files(project(":sdks:java:core").sourceSets.test.output.classesDirs) +
-            files(project(project.path).sourceSets.test.output.classesDirs)
+      files(project(project.path).sourceSets.test.output.classesDirs)
     useJUnit {
       includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
       commonRunnerV2ExcludeCategories.each {
@@ -306,9 +306,9 @@ def buildAndPushDockerJavaContainer = 
tasks.register("buildAndPushDockerJavaCont
 
   dependsOn ":sdks:java:container:${javaVer}:docker"
   def defaultDockerImageName = containerImageName(
-          name: "${project.docker_image_default_repo_prefix}${javaVer}_sdk",
-          root: "apache",
-          tag: project.sdk_version)
+    name: "${project.docker_image_default_repo_prefix}${javaVer}_sdk",
+    root: "apache",
+    tag: project.sdk_version)
   doLast {
     exec {
       commandLine "docker", "tag", "${defaultDockerImageName}", 
"${dockerJavaImageName}"
@@ -368,14 +368,37 @@ def buildAndPushDockerPythonContainer = 
tasks.create("buildAndPushDockerPythonCo
   def pythonVer = project.project(':sdks:python').pythonVersion
   dependsOn ":sdks:python:container:py"+pythonVer.replace('.', '')+":docker"
   def defaultDockerImageName = containerImageName(
-          name: 
"${project.docker_image_default_repo_prefix}python${pythonVer}_sdk",
-          root: "apache",
-          tag: project.sdk_version)
+    name: "${project.docker_image_default_repo_prefix}python${pythonVer}_sdk",
+    root: "apache",
+    tag: project.sdk_version)
+  doFirst {
+    def cloudsdkConfig = System.getenv("CLOUDSDK_CONFIG")
+    if (cloudsdkConfig == null || !new File(cloudsdkConfig).canWrite()) {
+      cloudsdkConfig = "/tmp/gcloud"
+    }
+    if (cloudsdkConfig == "/tmp/gcloud") {
+      def tmpGcloudDir = new File(cloudsdkConfig)
+      tmpGcloudDir.mkdirs()
+      System.setProperty("CLOUDSDK_CONFIG", cloudsdkConfig)
+    }
+    exec {
+      environment "CLOUDSDK_CONFIG", cloudsdkConfig
+      commandLine "gcloud", "--quiet", "auth", "configure-docker", "us.gcr.io"
+      ignoreExitValue = false
+    }
+    exec {
+      environment "CLOUDSDK_CONFIG", cloudsdkConfig
+      commandLine "gcloud", "--quiet", "auth", "configure-docker", "gcr.io"
+      ignoreExitValue = false
+    }
+  }
   doLast {
     exec {
       commandLine "docker", "tag", "${defaultDockerImageName}", 
"${dockerPythonImageName}"
     }
+    def cloudsdkConfig = System.getenv("CLOUDSDK_CONFIG") ?: 
System.getProperty("CLOUDSDK_CONFIG") ?: "/tmp/gcloud"
     exec {
+      environment "CLOUDSDK_CONFIG", cloudsdkConfig
       commandLine "gcloud", "docker", "--", "push", "${dockerPythonImageName}"
     }
   }
@@ -594,13 +617,13 @@ task googleCloudPlatformLegacyWorkerIntegrationTest(type: 
Test, dependsOn: copyG
   group = "Verification"
   dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar"
   systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
-          "--runner=TestDataflowRunner",
-          "--project=${gcpProject}",
-          "--region=${gcpRegion}",
-          "--tempRoot=${dataflowPostCommitTempRoot}",
-          "--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
-          "--workerHarnessContainerImage=",
-          "--firestoreDb=${firestoreDb}",
+    "--runner=TestDataflowRunner",
+    "--project=${gcpProject}",
+    "--region=${gcpRegion}",
+    "--tempRoot=${dataflowPostCommitTempRoot}",
+    "--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
+    "--workerHarnessContainerImage=",
+    "--firestoreDb=${firestoreDb}",
   ])
 
   include '**/*IT.class'
@@ -633,14 +656,14 @@ task 
googleCloudPlatformLegacyWorkerKmsIntegrationTest(type: Test) {
   group = "Verification"
   dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar"
   systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
-          "--runner=TestDataflowRunner",
-          "--project=${gcpProject}",
-          "--region=${gcpRegion}",
-          "--tempRoot=${dataflowPostCommitTempRootKms}",
-          "--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
-          "--workerHarnessContainerImage=",
-          "--dataflowKmsKey=${dataflowKmsKey}",
-          "--firestoreDb=${firestoreDb}",
+    "--runner=TestDataflowRunner",
+    "--project=${gcpProject}",
+    "--region=${gcpRegion}",
+    "--tempRoot=${dataflowPostCommitTempRootKms}",
+    "--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
+    "--workerHarnessContainerImage=",
+    "--dataflowKmsKey=${dataflowKmsKey}",
+    "--firestoreDb=${firestoreDb}",
   ])
 
   include '**/*IT.class'
@@ -738,12 +761,12 @@ task coreSDKJavaLegacyWorkerIntegrationTest(type: Test) {
   dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar"
 
   systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
-          "--runner=TestDataflowRunner",
-          "--project=${gcpProject}",
-          "--region=${gcpRegion}",
-          "--tempRoot=${dataflowPostCommitTempRoot}",
-          "--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
-          "--workerHarnessContainerImage=",
+    "--runner=TestDataflowRunner",
+    "--project=${gcpProject}",
+    "--region=${gcpRegion}",
+    "--tempRoot=${dataflowPostCommitTempRoot}",
+    "--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
+    "--workerHarnessContainerImage=",
   ])
 
   include '**/*IT.class'
@@ -843,17 +866,17 @@ createJavaExamplesArchetypeValidationTask(type: 
'MobileGaming',
 
 // Generates :runners:google-cloud-dataflow-java:runMobileGamingJavaDataflowBom
 createJavaExamplesArchetypeValidationTask(type: 'MobileGaming',
-        runner: 'DataflowBom',
-        gcpProject: gcpProject,
-        gcpRegion: gcpRegion,
-        gcsBucket: gcsBucket,
-        bqDataset: bqDataset,
-        pubsubTopic: pubsubTopic)
+  runner: 'DataflowBom',
+  gcpProject: gcpProject,
+  gcpRegion: gcpRegion,
+  gcsBucket: gcsBucket,
+  bqDataset: bqDataset,
+  pubsubTopic: pubsubTopic)
 
 // Standalone task for testing GCS upload, use with -PfilesToStage and 
-PgcpTempRoot.
 task GCSUpload(type: JavaExec) {
   mainClass = 'org.apache.beam.runners.dataflow.util.GCSUploadMain'
   classpath = sourceSets.test.runtimeClasspath
   args "--stagingLocation=${dataflowUploadTemp}/staging",
-       "--filesToStage=${testFilesToStage}"
+    "--filesToStage=${testFilesToStage}"
 }

Reply via email to