This is an automated email from the ASF dual-hosted git repository.

gaborgsomogyi pushed a commit to branch release-1.20
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.20 by this push:
     new ca2b082114e [FLINK-38487][build] bump vmImage version to ubuntu-24.04
ca2b082114e is described below

commit ca2b082114e08e468b41eb0c2a9589e3a70b1399
Author: Gabor Somogyi <[email protected]>
AuthorDate: Thu Oct 9 09:25:47 2025 +0200

    [FLINK-38487][build] bump vmImage version to ubuntu-24.04
---
 azure-pipelines.yml                           |  6 +++---
 tools/azure-pipelines/build-apache-repo.yml   | 18 +++++++++---------
 tools/azure-pipelines/build-nightly-dist.yml  |  4 ++--
 tools/azure-pipelines/build-python-wheels.yml |  2 +-
 tools/azure-pipelines/e2e-template.yml        | 11 +++++++++++
 5 files changed, 26 insertions(+), 15 deletions(-)

diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index a63e4f4acc5..74ec96ad527 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -73,16 +73,16 @@ stages:
         parameters: # see template file for a definition of the parameters.
           stage_name: ci_build
           test_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2"
           run_end_to_end: false
           container: flink-build-container
           jdk: 8
       - job: docs_404_check # run on a MSFT provided machine
         pool:
-          vmImage: 'ubuntu-22.04'
+          vmImage: 'ubuntu-24.04'
         steps:
           - task: GoTool@0
             inputs:
diff --git a/tools/azure-pipelines/build-apache-repo.yml 
b/tools/azure-pipelines/build-apache-repo.yml
index e849b529098..631098de6cc 100644
--- a/tools/azure-pipelines/build-apache-repo.yml
+++ b/tools/azure-pipelines/build-apache-repo.yml
@@ -68,14 +68,14 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2"
           run_end_to_end: false
           container: flink-build-container
           jdk: 8
       - job: docs_404_check # run on a MSFT provided machine
         pool:
-          vmImage: 'ubuntu-22.04'
+          vmImage: 'ubuntu-24.04'
         steps:
           # Skip docs check if this is a pull request that doesn't contain a 
documentation change
           - task: GoTool@0
@@ -110,9 +110,9 @@ stages:
         parameters:
           stage_name: cron_azure
           test_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2"
           run_end_to_end: true
           container: flink-build-container
@@ -123,7 +123,7 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=3.2.3 
-Phadoop3-tests,hive3"
           run_end_to_end: true
           container: flink-build-container
@@ -134,7 +134,7 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 
-Pjava11-target"
           run_end_to_end: true
           container: flink-build-container
@@ -156,7 +156,7 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Djdk17 
-Djdk21 -Pjava21-target"
           run_end_to_end: true
           container: flink-build-container
@@ -167,14 +167,14 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2 
-Penable-adaptive-scheduler"
           run_end_to_end: true
           container: flink-build-container
           jdk: 8
       - job: docs_404_check # run on a MSFT provided machine
         pool:
-          vmImage: 'ubuntu-22.04'
+          vmImage: 'ubuntu-24.04'
         steps:
           - task: GoTool@0
             inputs:
diff --git a/tools/azure-pipelines/build-nightly-dist.yml 
b/tools/azure-pipelines/build-nightly-dist.yml
index 61a5bb46b89..21420361106 100644
--- a/tools/azure-pipelines/build-nightly-dist.yml
+++ b/tools/azure-pipelines/build-nightly-dist.yml
@@ -19,7 +19,7 @@ parameters:
 jobs:
   - job: ${{parameters.stage_name}}_binary
     pool:
-      vmImage: 'ubuntu-22.04'
+      vmImage: 'ubuntu-24.04'
     container: flink-build-container
     workspace:
       clean: all
@@ -69,7 +69,7 @@ jobs:
       #    artifact: nightly-release
   - job: ${{parameters.stage_name}}_maven
     pool:
-      vmImage: 'ubuntu-22.04'
+      vmImage: 'ubuntu-24.04'
     container: flink-build-container
     timeoutInMinutes: 240
     workspace:
diff --git a/tools/azure-pipelines/build-python-wheels.yml 
b/tools/azure-pipelines/build-python-wheels.yml
index d68069c1632..251a5b7844b 100644
--- a/tools/azure-pipelines/build-python-wheels.yml
+++ b/tools/azure-pipelines/build-python-wheels.yml
@@ -16,7 +16,7 @@
 jobs:
   - job: build_wheels_on_Linux
     pool:
-      vmImage: 'ubuntu-22.04'
+      vmImage: 'ubuntu-24.04'
     steps:
       - script: |
           cd flink-python
diff --git a/tools/azure-pipelines/e2e-template.yml 
b/tools/azure-pipelines/e2e-template.yml
index ca0d6c89111..2806e11926c 100644
--- a/tools/azure-pipelines/e2e-template.yml
+++ b/tools/azure-pipelines/e2e-template.yml
@@ -48,6 +48,17 @@ jobs:
     - script: ./tools/azure-pipelines/free_disk_space.sh
       target: host
       displayName: Free up disk space
+    # Install upstream kubectl so we don't use the agent's FIPS-patched build.
+    - bash: |
+        set -euo pipefail
+        echo ">>> Installing upstream kubectl"
+        curl -sSL -o kubectl "https://dl.k8s.io/release/$(curl -sSL 
https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl"
+        sudo install -m 0755 kubectl /usr/local/bin/kubectl
+        echo ">>> kubectl on PATH:"
+        which kubectl
+        kubectl version --client=true -o=yaml
+      displayName: "Use upstream kubectl (avoid host FIPS build)"
+      condition: not(eq(variables['SKIP'], '1'))
     # the cache task does not create directories on a cache miss, and can 
later fail when trying to tar the directory if the test haven't created it
     # this may for example happen if a given directory is only used by a 
subset of tests, which are run in a different 'group'
     - bash: |

Reply via email to