https://github.com/boomanaiden154 created 
https://github.com/llvm/llvm-project/pull/150949

This patch backports all of the recent changes to the release branch. This will 
get the CI functioning again. This backport also includes a couple 
refactorings, but those will probably end up being necessary for backporting 
future patches. They are relatively safe because they have already been 
extensively tested on main and only impact the CI.

>From b00e0d2a7e21468b20487a4f5f122817cf5de540 Mon Sep 17 00:00:00 2001
From: Aiden Grossman <aidengross...@google.com>
Date: Mon, 28 Jul 2025 13:21:44 +0000
Subject: [PATCH] [CI] Backport recent changes to release branch

This patch backports all of the recent changes to the release branch. This
will get the CI functioning again. This backport also includes a couple
refactorings, but those will probably end up being necessary for backporting
future patches. They are relatively safe because they have already been
extensively tested on main and only impact the CI.
---
 .ci/compute_projects.py         | 65 ++++++++++++++++++----
 .ci/compute_projects_test.py    | 98 ++++++++++++++++++++++++++++++++-
 .ci/metrics/metrics.py          | 10 ++++
 .ci/metrics/metrics_test.py     | 75 +++++++++++++++++++++++++
 .ci/monolithic-linux.sh         | 14 ++---
 .ci/monolithic-windows.sh       |  5 --
 .github/workflows/premerge.yaml | 28 ++++++----
 7 files changed, 256 insertions(+), 39 deletions(-)
 create mode 100644 .ci/metrics/metrics_test.py

diff --git a/.ci/compute_projects.py b/.ci/compute_projects.py
index c3cf714ce6c10..2dc5629328457 100644
--- a/.ci/compute_projects.py
+++ b/.ci/compute_projects.py
@@ -19,6 +19,7 @@
 PROJECT_DEPENDENCIES = {
     "llvm": set(),
     "clang": {"llvm"},
+    "CIR": {"clang", "mlir"},
     "bolt": {"clang", "lld", "llvm"},
     "clang-tools-extra": {"clang", "llvm"},
     "compiler-rt": {"clang", "lld"},
@@ -55,6 +56,7 @@
     ".ci": {
         "llvm",
         "clang",
+        "CIR",
         "lld",
         "lldb",
         "bolt",
@@ -128,6 +130,7 @@
     "lldb": "check-lldb",
     "llvm": "check-llvm",
     "clang": "check-clang",
+    "CIR": "check-clang-cir",
     "bolt": "check-bolt",
     "lld": "check-lld",
     "flang": "check-flang",
@@ -141,6 +144,23 @@
 
 RUNTIMES = {"libcxx", "libcxxabi", "libunwind", "compiler-rt", "libc"}
 
+# Meta projects are projects that need explicit handling but do not reside
+# in their own top level folder. To add a meta project, the start of the path
+# for the metaproject should be mapped to the name of the project below.
+# Multiple paths can map to the same metaproject.
+META_PROJECTS = {
+    ("clang", "lib", "CIR"): "CIR",
+    ("clang", "test", "CIR"): "CIR",
+    ("clang", "include", "clang", "CIR"): "CIR",
+    ("*", "docs"): "docs",
+    ("llvm", "utils", "gn"): "gn",
+    (".github", "workflows", "premerge.yaml"): ".ci",
+    ("third-party",): ".ci",
+}
+
+# Projects that should not run any tests. These need to be metaprojects.
+SKIP_PROJECTS = ["docs", "gn"]
+
 
 def _add_dependencies(projects: Set[str], runtimes: Set[str]) -> Set[str]:
     projects_with_dependents = set(projects)
@@ -233,21 +253,34 @@ def _compute_runtimes_to_build(
     return _exclude_projects(runtimes_to_build, platform)
 
 
+def _path_matches(matcher: tuple[str], file_path: tuple[str]) -> bool:
+    if len(file_path) < len(matcher):
+        return False
+    for match_part, file_part in zip(matcher, file_path):
+        if match_part == "*" or file_part == "*":
+            continue
+        if match_part != file_part:
+            return False
+    return True
+
+
+def _get_modified_projects_for_file(modified_file: str) -> Set[str]:
+    modified_projects = set()
+    path_parts = pathlib.Path(modified_file).parts
+    for meta_project_files in META_PROJECTS.keys():
+        if _path_matches(meta_project_files, path_parts):
+            meta_project = META_PROJECTS[meta_project_files]
+            if meta_project in SKIP_PROJECTS:
+                return set()
+            modified_projects.add(meta_project)
+    modified_projects.add(pathlib.Path(modified_file).parts[0])
+    return modified_projects
+
+
 def _get_modified_projects(modified_files: list[str]) -> Set[str]:
     modified_projects = set()
     for modified_file in modified_files:
-        path_parts = pathlib.Path(modified_file).parts
-        # Exclude files in the docs directory. They do not impact an test
-        # targets and there is a separate workflow used for ensuring the
-        # documentation builds.
-        if len(path_parts) > 2 and path_parts[1] == "docs":
-            continue
-        # Exclude files for the gn build. We do not test it within premerge
-        # and changes occur often enough that they otherwise take up
-        # capacity.
-        if len(path_parts) > 3 and path_parts[:3] == ("llvm", "utils", "gn"):
-            continue
-        modified_projects.add(pathlib.Path(modified_file).parts[0])
+        
modified_projects.update(_get_modified_projects_for_file(modified_file))
     return modified_projects
 
 
@@ -267,6 +300,13 @@ def get_env_variables(modified_files: list[str], platform: 
str) -> Set[str]:
     runtimes_check_targets_needs_reconfig = _compute_project_check_targets(
         runtimes_to_test_needs_reconfig
     )
+
+    # CIR is used as a pseudo-project in this script. It is built as part of 
the
+    # clang build, but it requires an explicit option to enable. We set that
+    # option here, and remove it from the projects_to_build list.
+    enable_cir = "ON" if "CIR" in projects_to_build else "OFF"
+    projects_to_build.discard("CIR")
+
     # We use a semicolon to separate the projects/runtimes as they get passed
     # to the CMake invocation and thus we need to use the CMake list separator
     # (;). We use spaces to separate the check targets as they end up getting
@@ -279,6 +319,7 @@ def get_env_variables(modified_files: list[str], platform: 
str) -> Set[str]:
         "runtimes_check_targets_needs_reconfig": " ".join(
             sorted(runtimes_check_targets_needs_reconfig)
         ),
+        "enable_cir": enable_cir,
     }
 
 
diff --git a/.ci/compute_projects_test.py b/.ci/compute_projects_test.py
index 6299931e1ec34..11c4aea9b4e35 100644
--- a/.ci/compute_projects_test.py
+++ b/.ci/compute_projects_test.py
@@ -1,7 +1,7 @@
 # Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
 # See https://llvm.org/LICENSE.txt for license information.
 # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
-"""Does some stuff."""
+"""Tests for compute_projects.py"""
 
 import unittest
 
@@ -104,6 +104,10 @@ def test_clang(self):
             env_variables["runtimes_check_targets_needs_reconfig"],
             "check-cxx check-cxxabi check-unwind",
         )
+        self.assertEqual(
+            env_variables["enable_cir"],
+            "OFF",
+        )
 
     def test_clang_windows(self):
         env_variables = compute_projects.get_env_variables(
@@ -126,6 +130,32 @@ def test_clang_windows(self):
             env_variables["runtimes_check_targets_needs_reconfig"],
             "check-cxx check-cxxabi check-unwind",
         )
+        self.assertEqual(env_variables["enable_cir"], "OFF")
+
+    def test_cir(self):
+        env_variables = compute_projects.get_env_variables(
+            ["clang/lib/CIR/CMakeLists.txt"], "Linux"
+        )
+        self.assertEqual(
+            env_variables["projects_to_build"],
+            "clang;clang-tools-extra;lld;llvm;mlir",
+        )
+        self.assertEqual(
+            env_variables["project_check_targets"],
+            "check-clang check-clang-cir check-clang-tools",
+        )
+        self.assertEqual(
+            env_variables["runtimes_to_build"], 
"compiler-rt;libcxx;libcxxabi;libunwind"
+        )
+        self.assertEqual(
+            env_variables["runtimes_check_targets"],
+            "check-compiler-rt",
+        )
+        self.assertEqual(
+            env_variables["runtimes_check_targets_needs_reconfig"],
+            "check-cxx check-cxxabi check-unwind",
+        )
+        self.assertEqual(env_variables["enable_cir"], "ON")
 
     def test_bolt(self):
         env_variables = compute_projects.get_env_variables(
@@ -158,6 +188,7 @@ def test_mlir(self):
         self.assertEqual(env_variables["runtimes_to_build"], "")
         self.assertEqual(env_variables["runtimes_check_targets"], "")
         
self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "")
+        self.assertEqual(env_variables["enable_cir"], "OFF")
 
     def test_flang(self):
         env_variables = compute_projects.get_env_variables(
@@ -168,10 +199,11 @@ def test_flang(self):
         self.assertEqual(env_variables["runtimes_to_build"], "")
         self.assertEqual(env_variables["runtimes_check_targets"], "")
         
self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "")
+        self.assertEqual(env_variables["enable_cir"], "OFF")
 
     def test_invalid_subproject(self):
         env_variables = compute_projects.get_env_variables(
-            ["third-party/benchmark/CMakeLists.txt"], "Linux"
+            ["llvm-libgcc/CMakeLists.txt"], "Linux"
         )
         self.assertEqual(env_variables["projects_to_build"], "")
         self.assertEqual(env_variables["project_check_targets"], "")
@@ -237,7 +269,7 @@ def test_ci(self):
         )
         self.assertEqual(
             env_variables["project_check_targets"],
-            "check-bolt check-clang check-clang-tools check-flang check-lld 
check-lldb check-llvm check-mlir check-polly",
+            "check-bolt check-clang check-clang-cir check-clang-tools 
check-flang check-lld check-lldb check-llvm check-mlir check-polly",
         )
         self.assertEqual(
             env_variables["runtimes_to_build"],
@@ -276,6 +308,66 @@ def test_clang_tools_extra(self):
         self.assertEqual(env_variables["runtimes_check_targets"], "check-libc")
         
self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "")
 
+    def test_premerge_workflow(self):
+        env_variables = compute_projects.get_env_variables(
+            [".github/workflows/premerge.yaml"], "Linux"
+        )
+        self.assertEqual(
+            env_variables["projects_to_build"],
+            
"bolt;clang;clang-tools-extra;flang;libclc;lld;lldb;llvm;mlir;polly",
+        )
+        self.assertEqual(
+            env_variables["project_check_targets"],
+            "check-bolt check-clang check-clang-cir check-clang-tools 
check-flang check-lld check-lldb check-llvm check-mlir check-polly",
+        )
+        self.assertEqual(
+            env_variables["runtimes_to_build"],
+            "compiler-rt;libc;libcxx;libcxxabi;libunwind",
+        )
+        self.assertEqual(
+            env_variables["runtimes_check_targets"],
+            "check-compiler-rt check-libc",
+        )
+        self.assertEqual(
+            env_variables["runtimes_check_targets_needs_reconfig"],
+            "check-cxx check-cxxabi check-unwind",
+        )
+
+    def test_other_github_workflow(self):
+        env_variables = compute_projects.get_env_variables(
+            [".github/workflows/docs.yml"], "Linux"
+        )
+        self.assertEqual(env_variables["projects_to_build"], "")
+        self.assertEqual(env_variables["project_check_targets"], "")
+        self.assertEqual(env_variables["runtimes_to_build"], "")
+        self.assertEqual(env_variables["runtimes_check_targets"], "")
+        
self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "")
+
+    def test_third_party_benchmark(self):
+        env_variables = compute_projects.get_env_variables(
+            ["third-party/benchmark/CMakeLists.txt"], "Linux"
+        )
+        self.assertEqual(
+            env_variables["projects_to_build"],
+            
"bolt;clang;clang-tools-extra;flang;libclc;lld;lldb;llvm;mlir;polly",
+        )
+        self.assertEqual(
+            env_variables["project_check_targets"],
+            "check-bolt check-clang check-clang-cir check-clang-tools 
check-flang check-lld check-lldb check-llvm check-mlir check-polly",
+        )
+        self.assertEqual(
+            env_variables["runtimes_to_build"],
+            "compiler-rt;libc;libcxx;libcxxabi;libunwind",
+        )
+        self.assertEqual(
+            env_variables["runtimes_check_targets"],
+            "check-compiler-rt check-libc",
+        )
+        self.assertEqual(
+            env_variables["runtimes_check_targets_needs_reconfig"],
+            "check-cxx check-cxxabi check-unwind",
+        )
+
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/.ci/metrics/metrics.py b/.ci/metrics/metrics.py
index 143e6ab4cf46a..26fdeef1913ab 100644
--- a/.ci/metrics/metrics.py
+++ b/.ci/metrics/metrics.py
@@ -1,3 +1,13 @@
+# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+# See https://llvm.org/LICENSE.txt for license information.
+# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+"""Collects Github metrics and uploads them to Grafana.
+
+This script contains machinery that will pull metrics periodically from Github
+about workflow runs. It will upload the collected metrics to the specified
+Grafana instance.
+"""
+
 import collections
 import datetime
 import github
diff --git a/.ci/metrics/metrics_test.py b/.ci/metrics/metrics_test.py
new file mode 100644
index 0000000000000..259e55f817939
--- /dev/null
+++ b/.ci/metrics/metrics_test.py
@@ -0,0 +1,75 @@
+# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+# See https://llvm.org/LICENSE.txt for license information.
+# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+"""Tests for metrics.py"""
+
+from dataclasses import dataclass
+import requests
+import unittest
+import unittest.mock
+
+import metrics
+
+
+class TestMetrics(unittest.TestCase):
+    def test_upload_gauge_metric(self):
+        """Test that we can upload a gauge metric correctly.
+
+        Also verify that we pass around parameters like API keys and user IDs
+        correctly to the HTTP POST request.
+        """
+        test_metrics = [metrics.GaugeMetric("gauge_test", 5, 1000)]
+        return_value = requests.Response()
+        return_value.status_code = 204
+        with unittest.mock.patch(
+            "requests.post", return_value=return_value
+        ) as post_mock:
+            metrics.upload_metrics(test_metrics, "test_userid", "test_api_key")
+            self.assertSequenceEqual(post_mock.call_args.args, 
[metrics.GRAFANA_URL])
+            self.assertEqual(
+                post_mock.call_args.kwargs["data"], "gauge_test value=5 1000"
+            )
+            self.assertEqual(
+                post_mock.call_args.kwargs["auth"], ("test_userid", 
"test_api_key")
+            )
+
+    def test_upload_job_metric(self):
+        """Test that we can upload a job metric correctly."""
+        test_metrics = [
+            metrics.JobMetrics("test_job", 5, 10, 1, 1000, 7, "test_workflow")
+        ]
+        return_value = requests.Response()
+        return_value.status_code = 204
+        with unittest.mock.patch(
+            "requests.post", return_value=return_value
+        ) as post_mock:
+            metrics.upload_metrics(test_metrics, "test_userid", "test_aoi_key")
+            self.assertEqual(
+                post_mock.call_args.kwargs["data"],
+                "test_job queue_time=5,run_time=10,status=1 1000",
+            )
+
+    def test_upload_unknown_metric(self):
+        """Test we report an error if we encounter an unknown metric type."""
+
+        @dataclass
+        class FakeMetric:
+            fake_data: str
+
+        test_metrics = [FakeMetric("test")]
+
+        with self.assertRaises(ValueError):
+            metrics.upload_metrics(test_metrics, "test_userid", "test_api_key")
+
+    def test_bad_response_code(self):
+        """Test that we gracefully handle HTTP response errors."""
+        test_metrics = [metrics.GaugeMetric("gauge_test", 5, 1000)]
+        return_value = requests.Response()
+        return_value.status_code = 403
+        # Just assert that we continue running here and do not raise anything.
+        with unittest.mock.patch("requests.post", return_value=return_value) 
as _:
+            metrics.upload_metrics(test_metrics, "test_userid", "test_api_key")
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/.ci/monolithic-linux.sh b/.ci/monolithic-linux.sh
index 8d1faab13986c..6db24d894eb73 100755
--- a/.ci/monolithic-linux.sh
+++ b/.ci/monolithic-linux.sh
@@ -21,12 +21,7 @@ BUILD_DIR="${BUILD_DIR:=${MONOREPO_ROOT}/build}"
 INSTALL_DIR="${BUILD_DIR}/install"
 rm -rf "${BUILD_DIR}"
 
-ccache --zero-stats
-
-if [[ -n "${CLEAR_CACHE:-}" ]]; then
-  echo "clearing cache"
-  ccache --clear
-fi
+sccache --zero-stats
 
 mkdir -p artifacts/reproducers
 
@@ -36,7 +31,7 @@ export CLANG_CRASH_DIAGNOSTICS_DIR=`realpath 
artifacts/reproducers`
 function at-exit {
   retcode=$?
 
-  ccache --print-stats > artifacts/ccache_stats.txt
+  sccache --show-stats > artifacts/sccache_stats.txt
   cp "${BUILD_DIR}"/.ninja_log artifacts/.ninja_log
   cp "${BUILD_DIR}"/test-results.*.xml artifacts/ || :
 
@@ -53,6 +48,7 @@ targets="${2}"
 runtimes="${3}"
 runtime_targets="${4}"
 runtime_targets_needs_reconfig="${5}"
+enable_cir="${6}"
 
 lit_args="-v --xunit-xml-output ${BUILD_DIR}/test-results.xml 
--use-unique-output-file-name --timeout=1200 --time-tests"
 
@@ -72,13 +68,15 @@ cmake -S "${MONOREPO_ROOT}"/llvm -B "${BUILD_DIR}" \
       -G Ninja \
       -D CMAKE_PREFIX_PATH="${HOME}/.local" \
       -D CMAKE_BUILD_TYPE=Release \
+      -D CLANG_ENABLE_CIR=${enable_cir} \
       -D LLVM_ENABLE_ASSERTIONS=ON \
       -D LLVM_BUILD_EXAMPLES=ON \
       -D COMPILER_RT_BUILD_LIBFUZZER=OFF \
       -D LLVM_LIT_ARGS="${lit_args}" \
       -D LLVM_ENABLE_LLD=ON \
       -D CMAKE_CXX_FLAGS=-gmlt \
-      -D LLVM_CCACHE_BUILD=ON \
+      -D CMAKE_C_COMPILER_LAUNCHER=sccache \
+      -D CMAKE_CXX_COMPILER_LAUNCHER=sccache \
       -D LIBCXX_CXX_ABI=libcxxabi \
       -D MLIR_ENABLE_BINDINGS_PYTHON=ON \
       -D LLDB_ENABLE_PYTHON=ON \
diff --git a/.ci/monolithic-windows.sh b/.ci/monolithic-windows.sh
index 176350fac604c..50a741677d734 100755
--- a/.ci/monolithic-windows.sh
+++ b/.ci/monolithic-windows.sh
@@ -21,11 +21,6 @@ BUILD_DIR="${BUILD_DIR:=${MONOREPO_ROOT}/build}"
 
 rm -rf "${BUILD_DIR}"
 
-if [[ -n "${CLEAR_CACHE:-}" ]]; then
-  echo "clearing sccache"
-  rm -rf "$SCCACHE_DIR"
-fi
-
 sccache --zero-stats
 function at-exit {
   retcode=$?
diff --git a/.github/workflows/premerge.yaml b/.github/workflows/premerge.yaml
index ff63355222065..d0518fa6879e2 100644
--- a/.github/workflows/premerge.yaml
+++ b/.github/workflows/premerge.yaml
@@ -34,10 +34,6 @@ jobs:
         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # 
v4.2.2
         with:
           fetch-depth: 2
-      - name: Setup ccache
-        uses: 
hendrikmuhs/ccache-action@a1209f81afb8c005c13b4296c32e363431bffea5 # v1.2.17
-        with:
-          max-size: "2000M"
       - name: Build and Test
         # Mark the job as a success even if the step fails so that people do
         # not get notified while the new premerge pipeline is in an
@@ -61,7 +57,20 @@ jobs:
           export CC=/opt/llvm/bin/clang
           export CXX=/opt/llvm/bin/clang++
 
-          ./.ci/monolithic-linux.sh "${projects_to_build}" 
"${project_check_targets}" "${runtimes_to_build}" "${runtimes_check_targets}" 
"${runtimes_check_targets_needs_reconfig}"
+          # This environment variable is passes into the container through the
+          # runner pod definition. This differs between our two clusters which
+          # why we do not hardcode it.
+          export SCCACHE_GCS_BUCKET=$CACHE_GCS_BUCKET
+          export SCCACHE_GCS_RW_MODE=READ_WRITE
+
+          # Set the idle timeout to zero to ensure sccache runs for the
+          # entire duration of the job. Otherwise it might stop if we run
+          # several test suites in a row and discard statistics that we want
+          # to save in the end.
+          export SCCACHE_IDLE_TIMEOUT=0
+          sccache --start-server
+
+          ./.ci/monolithic-linux.sh "${projects_to_build}" 
"${project_check_targets}" "${runtimes_to_build}" "${runtimes_check_targets}" 
"${runtimes_check_targets_needs_reconfig}" "${enable_cir}"
       - name: Upload Artifacts
         if: '!cancelled()'
         uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 
# v4.6.0
@@ -85,11 +94,6 @@ jobs:
         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # 
v4.2.2
         with:
           fetch-depth: 2
-      - name: Setup ccache
-        uses: 
hendrikmuhs/ccache-action@a1209f81afb8c005c13b4296c32e363431bffea5 # v1.2.17
-        with:
-          variant: "sccache"
-          max-size: "2000M"
       - name: Compute Projects
         id: vars
         run: |
@@ -112,7 +116,9 @@ jobs:
         shell: cmd
         run: |
           call C:\\BuildTools\\Common7\\Tools\\VsDevCmd.bat -arch=amd64 
-host_arch=amd64
-          bash .ci/monolithic-windows.sh "${{ 
steps.vars.outputs.windows-projects }}" "${{ 
steps.vars.outputs.windows-check-targets }}"
+          # See the comments above in the Linux job for why we define each of
+          # these environment variables.
+          bash -c "export SCCACHE_GCS_BUCKET=$CACHE_GCS_BUCKET; export 
SCCACHE_GCS_RW_MODE=READ_WRITE; export SCCACHE_IDLE_TIMEOUT=0; sccache 
--start-server; .ci/monolithic-windows.sh \"${{ 
steps.vars.outputs.windows-projects }}\" \"${{ 
steps.vars.outputs.windows-check-targets }}\""
       - name: Upload Artifacts
         if: '!cancelled()'
         uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 
# v4.6.0

_______________________________________________
llvm-branch-commits mailing list
llvm-branch-commits@lists.llvm.org
https://lists.llvm.org/cgi-bin/mailman/listinfo/llvm-branch-commits

Reply via email to