This is an automated email from the ASF dual-hosted git repository.

ruanhang1993 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
     new 48aed8418d6 [FLINK-38487][build] bump vmImage version to ubuntu-24.04 
(#27095)
48aed8418d6 is described below

commit 48aed8418d6ab27b6dc12b101d3f438ff9005d9c
Author: Gabor Somogyi <[email protected]>
AuthorDate: Fri Oct 10 05:00:05 2025 +0200

    [FLINK-38487][build] bump vmImage version to ubuntu-24.04 (#27095)
---
 azure-pipelines.yml                           |  6 +++---
 tools/azure-pipelines/build-apache-repo.yml   | 18 +++++++++---------
 tools/azure-pipelines/build-nightly-dist.yml  |  4 ++--
 tools/azure-pipelines/build-python-wheels.yml |  2 +-
 tools/azure-pipelines/e2e-template.yml        | 11 +++++++++++
 5 files changed, 26 insertions(+), 15 deletions(-)

diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index ebc7e5289d5..8963f07b03f 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -73,16 +73,16 @@ stages:
         parameters: # see template file for a definition of the parameters.
           stage_name: ci_build
           test_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 
-Pjava17-target"
           run_end_to_end: false
           container: flink-build-container
           jdk: 17
       - job: docs_404_check # run on a MSFT provided machine
         pool:
-          vmImage: 'ubuntu-22.04'
+          vmImage: 'ubuntu-24.04'
         steps:
           - task: GoTool@0
             inputs:
diff --git a/tools/azure-pipelines/build-apache-repo.yml 
b/tools/azure-pipelines/build-apache-repo.yml
index 8c2eac1a0fa..5911d16a936 100644
--- a/tools/azure-pipelines/build-apache-repo.yml
+++ b/tools/azure-pipelines/build-apache-repo.yml
@@ -68,14 +68,14 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 
-Pjava17-target"
           run_end_to_end: false
           container: flink-build-container
           jdk: 17
       - job: docs_404_check # run on a MSFT provided machine
         pool:
-          vmImage: 'ubuntu-22.04'
+          vmImage: 'ubuntu-24.04'
         steps:
           # Skip docs check if this is a pull request that doesn't contain a 
documentation change
           - task: GoTool@0
@@ -112,9 +112,9 @@ stages:
         parameters:
           stage_name: cron_azure
           test_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 
-Pjava17-target"
           run_end_to_end: true
           container: flink-build-container
@@ -125,7 +125,7 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=3.2.3 
-Phadoop3-tests,hive3 -Djdk17 -Pjava17-target"
           run_end_to_end: true
           container: flink-build-container
@@ -136,7 +136,7 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 
-Pjava11-target"
           run_end_to_end: true
           container: flink-build-container
@@ -147,7 +147,7 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk21 
-Pjava21-target"
           run_end_to_end: true
           container: flink-build-container
@@ -158,14 +158,14 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-22.04'
+            vmImage: 'ubuntu-24.04'
           environment: PROFILE="-Dflink.hadoop.version=2.10.2 
-Penable-adaptive-scheduler -Djdk17 -Pjava17-target"
           run_end_to_end: true
           container: flink-build-container
           jdk: 17
       - job: docs_404_check # run on a MSFT provided machine
         pool:
-          vmImage: 'ubuntu-22.04'
+          vmImage: 'ubuntu-24.04'
         steps:
           - task: GoTool@0
             inputs:
diff --git a/tools/azure-pipelines/build-nightly-dist.yml 
b/tools/azure-pipelines/build-nightly-dist.yml
index 61a5bb46b89..21420361106 100644
--- a/tools/azure-pipelines/build-nightly-dist.yml
+++ b/tools/azure-pipelines/build-nightly-dist.yml
@@ -19,7 +19,7 @@ parameters:
 jobs:
   - job: ${{parameters.stage_name}}_binary
     pool:
-      vmImage: 'ubuntu-22.04'
+      vmImage: 'ubuntu-24.04'
     container: flink-build-container
     workspace:
       clean: all
@@ -69,7 +69,7 @@ jobs:
       #    artifact: nightly-release
   - job: ${{parameters.stage_name}}_maven
     pool:
-      vmImage: 'ubuntu-22.04'
+      vmImage: 'ubuntu-24.04'
     container: flink-build-container
     timeoutInMinutes: 240
     workspace:
diff --git a/tools/azure-pipelines/build-python-wheels.yml 
b/tools/azure-pipelines/build-python-wheels.yml
index 1ec944b487b..418bdeb88c2 100644
--- a/tools/azure-pipelines/build-python-wheels.yml
+++ b/tools/azure-pipelines/build-python-wheels.yml
@@ -16,7 +16,7 @@
 jobs:
   - job: build_wheels_on_Linux
     pool:
-      vmImage: 'ubuntu-22.04'
+      vmImage: 'ubuntu-24.04'
     steps:
       - task: UsePythonVersion@0
         inputs:
diff --git a/tools/azure-pipelines/e2e-template.yml 
b/tools/azure-pipelines/e2e-template.yml
index 57e45dc86e7..3b1229af1a0 100644
--- a/tools/azure-pipelines/e2e-template.yml
+++ b/tools/azure-pipelines/e2e-template.yml
@@ -51,6 +51,17 @@ jobs:
     - task: UsePythonVersion@0
       inputs:
         versionSpec: '3.12'
+    # Install upstream kubectl so we don't use the agent's FIPS-patched build.
+    - bash: |
+        set -euo pipefail
+        echo ">>> Installing upstream kubectl"
+        curl -sSL -o kubectl "https://dl.k8s.io/release/$(curl -sSL 
https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl"
+        sudo install -m 0755 kubectl /usr/local/bin/kubectl
+        echo ">>> kubectl on PATH:"
+        which kubectl
+        kubectl version --client=true -o=yaml
+      displayName: "Use upstream kubectl (avoid host FIPS build)"
+      condition: not(eq(variables['SKIP'], '1'))
     # the cache task does not create directories on a cache miss, and can 
later fail when trying to tar the directory if the test haven't created it
     # this may for example happen if a given directory is only used by a 
subset of tests, which are run in a different 'group'
     - bash: |

Reply via email to