This is an automated email from the ASF dual-hosted git repository.

zhonghongsheng pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new 5202befdacd feat: dynamic matrix generation for e2e-sql CI workflow 
(#38274)
5202befdacd is described below

commit 5202befdacd748b08b439652d105d1f4373cb7b1
Author: Hongsheng Zhong <[email protected]>
AuthorDate: Sat Feb 28 19:33:11 2026 +0800

    feat: dynamic matrix generation for e2e-sql CI workflow (#38274)
---
 .github/workflows/e2e-sql.yml                      |  93 +++++---
 .../workflows/resources/filter/e2e-sql-filters.yml | 103 +++++++++
 .../resources/scripts/generate-e2e-sql-matrix.sh   | 254 +++++++++++++++++++++
 3 files changed, 420 insertions(+), 30 deletions(-)

diff --git a/.github/workflows/e2e-sql.yml b/.github/workflows/e2e-sql.yml
index 95e68fbb4b9..a44049abc67 100644
--- a/.github/workflows/e2e-sql.yml
+++ b/.github/workflows/e2e-sql.yml
@@ -53,10 +53,67 @@ jobs:
     name: Import Global Environment
     uses: ./.github/workflows/required-reusable.yml
 
+  detect-and-generate-matrix:
+    name: Detect Changes and Generate Matrix
+    runs-on: ubuntu-latest
+    timeout-minutes: 5
+    outputs:
+      matrix: ${{ steps.generate-matrix.outputs.matrix }}
+      has-jobs: ${{ steps.generate-matrix.outputs.has-jobs }}
+      need-proxy-image: ${{ steps.generate-matrix.outputs.need-proxy-image }}
+    steps:
+      - uses: actions/[email protected]
+      - id: filter
+        uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
+        with:
+          filters: .github/workflows/resources/filter/e2e-sql-filters.yml
+      - id: generate-matrix
+        env:
+          FILTER_ADAPTER_PROXY: ${{ steps.filter.outputs.adapter_proxy }}
+          FILTER_ADAPTER_JDBC: ${{ steps.filter.outputs.adapter_jdbc }}
+          FILTER_MODE_STANDALONE: ${{ steps.filter.outputs.mode_standalone }}
+          FILTER_MODE_CLUSTER: ${{ steps.filter.outputs.mode_cluster }}
+          FILTER_MODE_CORE: ${{ steps.filter.outputs.mode_core }}
+          FILTER_DATABASE_MYSQL: ${{ steps.filter.outputs.database_mysql }}
+          FILTER_DATABASE_POSTGRESQL: ${{ 
steps.filter.outputs.database_postgresql }}
+          FILTER_FEATURE_SHARDING: ${{ steps.filter.outputs.feature_sharding }}
+          FILTER_FEATURE_ENCRYPT: ${{ steps.filter.outputs.feature_encrypt }}
+          FILTER_FEATURE_READWRITE_SPLITTING: ${{ 
steps.filter.outputs.feature_readwrite_splitting }}
+          FILTER_FEATURE_SHADOW: ${{ steps.filter.outputs.feature_shadow }}
+          FILTER_FEATURE_MASK: ${{ steps.filter.outputs.feature_mask }}
+          FILTER_FEATURE_BROADCAST: ${{ steps.filter.outputs.feature_broadcast 
}}
+          FILTER_FEATURE_DISTSQL: ${{ steps.filter.outputs.feature_distsql }}
+          FILTER_FEATURE_SQL_FEDERATION: ${{ 
steps.filter.outputs.feature_sql_federation }}
+          FILTER_CORE_INFRA: ${{ steps.filter.outputs.core_infra }}
+          FILTER_TEST_FRAMEWORK: ${{ steps.filter.outputs.test_framework }}
+          FILTER_POM_CHANGES: ${{ steps.filter.outputs.pom_changes }}
+        run: |
+          filters=$(jq -cn \
+            --arg adapter_proxy          "$FILTER_ADAPTER_PROXY" \
+            --arg adapter_jdbc           "$FILTER_ADAPTER_JDBC" \
+            --arg mode_standalone        "$FILTER_MODE_STANDALONE" \
+            --arg mode_cluster           "$FILTER_MODE_CLUSTER" \
+            --arg mode_core              "$FILTER_MODE_CORE" \
+            --arg database_mysql         "$FILTER_DATABASE_MYSQL" \
+            --arg database_postgresql    "$FILTER_DATABASE_POSTGRESQL" \
+            --arg feature_sharding       "$FILTER_FEATURE_SHARDING" \
+            --arg feature_encrypt        "$FILTER_FEATURE_ENCRYPT" \
+            --arg feature_readwrite_splitting 
"$FILTER_FEATURE_READWRITE_SPLITTING" \
+            --arg feature_shadow         "$FILTER_FEATURE_SHADOW" \
+            --arg feature_mask           "$FILTER_FEATURE_MASK" \
+            --arg feature_broadcast      "$FILTER_FEATURE_BROADCAST" \
+            --arg feature_distsql        "$FILTER_FEATURE_DISTSQL" \
+            --arg feature_sql_federation "$FILTER_FEATURE_SQL_FEDERATION" \
+            --arg core_infra             "$FILTER_CORE_INFRA" \
+            --arg test_framework         "$FILTER_TEST_FRAMEWORK" \
+            --arg pom_changes            "$FILTER_POM_CHANGES" \
+            '$ARGS.named')
+          bash .github/workflows/resources/scripts/generate-e2e-sql-matrix.sh 
"$filters"
+
   build-e2e-image:
     name: Build E2E Image
-    if: github.repository == 'apache/shardingsphere'
-    needs: global-environment
+    if: github.repository == 'apache/shardingsphere' && 
needs.detect-and-generate-matrix.outputs.has-jobs == 'true' && 
needs.detect-and-generate-matrix.outputs.need-proxy-image == 'true'
+    needs: [ global-environment, detect-and-generate-matrix ]
     runs-on: ubuntu-latest
     timeout-minutes: 40
     steps:
@@ -80,41 +137,17 @@ jobs:
           name: e2e-image
           path: /tmp/apache-shardingsphere-proxy-test.tar
           retention-days: 10
-  
+
   e2e-sql:
     name: E2E - SQL
-    needs: [ build-e2e-image, global-environment ]
-    if: github.repository == 'apache/shardingsphere'
+    needs: [ detect-and-generate-matrix, build-e2e-image, global-environment ]
+    if: always() && github.repository == 'apache/shardingsphere' && 
needs.detect-and-generate-matrix.outputs.has-jobs == 'true' && 
needs.detect-and-generate-matrix.result == 'success' && 
(needs.build-e2e-image.result == 'success' || needs.build-e2e-image.result == 
'skipped')
     runs-on: ubuntu-latest
     timeout-minutes: 40
     strategy:
       max-parallel: 15
       fail-fast: false
-      matrix:
-        adapter: [ proxy, jdbc ]
-        mode: [ Standalone, Cluster ]
-        database: [ MySQL, PostgreSQL ]
-        scenario: [ empty_rules, distsql_rdl, passthrough, db, tbl, encrypt, 
readwrite_splitting, shadow, mask, dbtbl_with_readwrite_splitting, 
dbtbl_with_readwrite_splitting_and_encrypt, sharding_and_encrypt, 
encrypt_and_readwrite_splitting, encrypt_shadow, 
readwrite_splitting_and_shadow, sharding_and_shadow, sharding_encrypt_shadow, 
mask_encrypt, mask_sharding, mask_encrypt_sharding, db_tbl_sql_federation ]
-        additional-options: [ '' ]
-        include:
-          - adapter: proxy
-            database: MySQL
-            scenario: passthrough
-            additional-options: '-Dmysql-connector-java.version=8.3.0'
-        exclude:
-          - adapter: jdbc
-            scenario: passthrough
-          - adapter: jdbc
-            mode: Cluster
-          - adapter: proxy
-            mode: Standalone
-            scenario: empty_rules
-          - adapter: proxy
-            mode: Standalone
-            scenario: distsql_rdl
-          - adapter: proxy
-            mode: Standalone
-            scenario: passthrough
+      matrix: ${{ fromJSON(needs.detect-and-generate-matrix.outputs.matrix) }}
     steps:
       - uses: actions/[email protected]
       - uses: ./.github/workflows/resources/actions/setup-build-environment
diff --git a/.github/workflows/resources/filter/e2e-sql-filters.yml 
b/.github/workflows/resources/filter/e2e-sql-filters.yml
new file mode 100644
index 00000000000..8c9f9b6f1d2
--- /dev/null
+++ b/.github/workflows/resources/filter/e2e-sql-filters.yml
@@ -0,0 +1,103 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# ---- Adapter dimension ----
+adapter_proxy:
+  - 'proxy/**/src/main/**'
+  - 'distribution/proxy/**'
+  - '!distribution/proxy/src/main/release-docs/**'
+
+adapter_jdbc:
+  - 'jdbc/src/main/**'
+  - 'jdbc-dialect/**/src/main/**'
+
+# ---- Mode dimension ----
+mode_standalone:
+  - 'mode/type/standalone/**/src/main/**'
+
+mode_cluster:
+  - 'mode/type/cluster/**/src/main/**'
+
+mode_core:
+  - 'mode/core/**/src/main/**'
+  - 'mode/api/**/src/main/**'
+
+# ---- Database dimension ----
+database_mysql:
+  - 'parser/dialect/mysql/**/src/main/**'
+  - 'database/protocol/mysql/**/src/main/**'
+  - 'proxy/frontend/type/mysql/**/src/main/**'
+  - 'jdbc-dialect/mysql/**/src/main/**'
+
+database_postgresql:
+  - 'parser/dialect/postgresql/**/src/main/**'
+  - 'parser/dialect/opengauss/**/src/main/**'
+  - 'database/protocol/postgresql/**/src/main/**'
+  - 'database/protocol/opengauss/**/src/main/**'
+  - 'proxy/frontend/type/postgresql/**/src/main/**'
+  - 'proxy/frontend/type/opengauss/**/src/main/**'
+  - 'jdbc-dialect/postgresql/**/src/main/**'
+  - 'jdbc-dialect/opengauss/**/src/main/**'
+
+# ---- Feature / Scenario dimension ----
+feature_sharding:
+  - 'features/sharding/**/src/main/**'
+
+feature_encrypt:
+  - 'features/encrypt/**/src/main/**'
+
+feature_readwrite_splitting:
+  - 'features/readwrite-splitting/**/src/main/**'
+
+feature_shadow:
+  - 'features/shadow/**/src/main/**'
+
+feature_mask:
+  - 'features/mask/**/src/main/**'
+
+feature_broadcast:
+  - 'features/broadcast/**/src/main/**'
+
+feature_distsql:
+  - '**/*-distsql*/**/src/main/**'
+
+feature_sql_federation:
+  - 'kernel/sql-federation/**/src/main/**'
+
+# ---- Core / Infra (full fallback triggers) ----
+core_infra:
+  - 'infra/**/src/main/**'
+  - 'parser/core/**/src/main/**'
+  - 'database/connector/core/**/src/main/**'
+  - 'database/exception/**/src/main/**'
+  - 'kernel/authority/**/src/main/**'
+  - 'kernel/logging/**/src/main/**'
+  - 'kernel/metadata/**/src/main/**'
+  - 'kernel/single/**/src/main/**'
+  - 'kernel/sql-parser/**/src/main/**'
+
+# ---- Test framework changes (full fallback triggers) ----
+test_framework:
+  - '.github/workflows/e2e-sql.yml'
+  - 'test/pom.xml'
+  - 'test/e2e/fixture/**'
+  - 'test/e2e/env/**'
+  - 'test/e2e/sql/**'
+
+# ---- POM changes (full fallback triggers) ----
+pom_changes:
+  - '**/pom.xml'
diff --git a/.github/workflows/resources/scripts/generate-e2e-sql-matrix.sh 
b/.github/workflows/resources/scripts/generate-e2e-sql-matrix.sh
new file mode 100755
index 00000000000..abe9ba6842f
--- /dev/null
+++ b/.github/workflows/resources/scripts/generate-e2e-sql-matrix.sh
@@ -0,0 +1,254 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Usage: generate-e2e-sql-matrix.sh '<json-with-all-18-filter-labels>'
+# Output: writes matrix=<JSON>, has-jobs=<true|false>, and 
need-proxy-image=<true|false> to $GITHUB_OUTPUT
+
+set -euo pipefail
+
+FILTERS_JSON="$1"
+
+get_filter() {
+  echo "$FILTERS_JSON" | jq -r ".$1"
+}
+
+# Read all 18 filter labels
+adapter_proxy=$(get_filter adapter_proxy)
+adapter_jdbc=$(get_filter adapter_jdbc)
+mode_standalone=$(get_filter mode_standalone)
+mode_cluster=$(get_filter mode_cluster)
+mode_core=$(get_filter mode_core)
+database_mysql=$(get_filter database_mysql)
+database_postgresql=$(get_filter database_postgresql)
+feature_sharding=$(get_filter feature_sharding)
+feature_encrypt=$(get_filter feature_encrypt)
+feature_readwrite_splitting=$(get_filter feature_readwrite_splitting)
+feature_shadow=$(get_filter feature_shadow)
+feature_mask=$(get_filter feature_mask)
+feature_broadcast=$(get_filter feature_broadcast)
+feature_distsql=$(get_filter feature_distsql)
+feature_sql_federation=$(get_filter feature_sql_federation)
+core_infra=$(get_filter core_infra)
+test_framework=$(get_filter test_framework)
+pom_changes=$(get_filter pom_changes)
+
+ALL_SCENARIOS=$(jq -cn '[
+  "empty_rules", "distsql_rdl", "passthrough",
+  "db", "tbl", "encrypt", "readwrite_splitting",
+  "shadow", "mask",
+  "dbtbl_with_readwrite_splitting",
+  "dbtbl_with_readwrite_splitting_and_encrypt",
+  "sharding_and_encrypt", "encrypt_and_readwrite_splitting",
+  "encrypt_shadow", "readwrite_splitting_and_shadow",
+  "sharding_and_shadow", "sharding_encrypt_shadow",
+  "mask_encrypt", "mask_sharding", "mask_encrypt_sharding",
+  "db_tbl_sql_federation"
+]')
+
+ALL_ADAPTERS='["proxy","jdbc"]'
+ALL_MODES='["Standalone","Cluster"]'
+ALL_DATABASES='["MySQL","PostgreSQL"]'
+SMOKE_SCENARIOS='["empty_rules","db","tbl","encrypt","readwrite_splitting","passthrough"]'
+
+# Build matrix JSON from dimension arrays and scenarios, applying 
exclude/include rules
+build_matrix() {
+  local adapters="$1"
+  local modes="$2"
+  local databases="$3"
+  local scenarios="$4"
+
+  jq -cn \
+    --argjson adapters "$adapters" \
+    --argjson modes "$modes" \
+    --argjson databases "$databases" \
+    --argjson scenarios "$scenarios" \
+    '
+    def should_exclude(adapter; mode; scenario):
+      (adapter == "jdbc" and scenario == "passthrough") or
+      (adapter == "jdbc" and mode == "Cluster") or
+      (adapter == "proxy" and mode == "Standalone" and
+        (scenario == "empty_rules" or scenario == "distsql_rdl" or scenario == 
"passthrough"));
+
+    [
+      $adapters[] as $adapter |
+      $modes[] as $mode |
+      $databases[] as $database |
+      $scenarios[] as $scenario |
+      select(should_exclude($adapter; $mode; $scenario) | not) |
+      {adapter: $adapter, mode: $mode, database: $database, scenario: 
$scenario, "additional-options": ""}
+    ] as $base_jobs |
+
+    ([$base_jobs[] | select(.adapter == "proxy" and .mode == "Cluster")] | 
length > 0) as $has_proxy_cluster |
+    ($scenarios | map(select(. == "passthrough")) | length > 0) as 
$has_passthrough |
+
+    (if $has_proxy_cluster and $has_passthrough
+     then [{adapter:"proxy", mode:"Cluster", database:"MySQL", 
scenario:"passthrough", 
"additional-options":"-Dmysql-connector-java.version=8.3.0"}]
+     else []
+     end) as $extra_job |
+
+    {include: ($base_jobs + $extra_job)}
+    '
+}
+
+# Full fallback: run the entire matrix
+if [ "$core_infra" = "true" ] || [ "$test_framework" = "true" ] || [ 
"$pom_changes" = "true" ]; then
+  MATRIX=$(build_matrix "$ALL_ADAPTERS" "$ALL_MODES" "$ALL_DATABASES" 
"$ALL_SCENARIOS")
+  echo "matrix=$(echo "$MATRIX" | jq -c .)" >> "$GITHUB_OUTPUT"
+  echo "has-jobs=true" >> "$GITHUB_OUTPUT"
+  echo "need-proxy-image=true" >> "$GITHUB_OUTPUT"
+  exit 0
+fi
+
+# Check whether any relevant dimension changed at all
+any_relevant_change=false
+if [ "$feature_sharding" = "true" ] || [ "$feature_encrypt" = "true" ] || \
+   [ "$feature_readwrite_splitting" = "true" ] || [ "$feature_shadow" = "true" 
] || \
+   [ "$feature_mask" = "true" ] || [ "$feature_broadcast" = "true" ] || \
+   [ "$feature_distsql" = "true" ] || [ "$feature_sql_federation" = "true" ] 
|| \
+   [ "$mode_standalone" = "true" ] || [ "$mode_cluster" = "true" ] || [ 
"$mode_core" = "true" ] || \
+   [ "$database_mysql" = "true" ] || [ "$database_postgresql" = "true" ] || \
+   [ "$adapter_proxy" = "true" ] || [ "$adapter_jdbc" = "true" ]; then
+  any_relevant_change=true
+fi
+
+if [ "$any_relevant_change" = "false" ]; then
+  echo "matrix={\"include\":[]}" >> "$GITHUB_OUTPUT"
+  echo "has-jobs=false" >> "$GITHUB_OUTPUT"
+  echo "need-proxy-image=false" >> "$GITHUB_OUTPUT"
+  exit 0
+fi
+
+# Determine adapters
+if [ "$adapter_proxy" = "true" ] && [ "$adapter_jdbc" = "false" ]; then
+  adapters='["proxy"]'
+elif [ "$adapter_jdbc" = "true" ] && [ "$adapter_proxy" = "false" ]; then
+  adapters='["jdbc"]'
+else
+  adapters="$ALL_ADAPTERS"
+fi
+
+# Determine modes
+if [ "$mode_standalone" = "true" ] && [ "$mode_cluster" = "false" ] && [ 
"$mode_core" = "false" ]; then
+  modes='["Standalone"]'
+elif [ "$mode_cluster" = "true" ] && [ "$mode_standalone" = "false" ] && [ 
"$mode_core" = "false" ]; then
+  modes='["Cluster"]'
+else
+  modes="$ALL_MODES"
+fi
+
+# Determine databases
+if [ "$database_mysql" = "true" ] && [ "$database_postgresql" = "false" ]; then
+  databases='["MySQL"]'
+elif [ "$database_postgresql" = "true" ] && [ "$database_mysql" = "false" ]; 
then
+  databases='["PostgreSQL"]'
+else
+  databases="$ALL_DATABASES"
+fi
+
+# Determine scenarios from feature labels
+any_feature_triggered=false
+scenarios_set=()
+
+add_scenario() {
+  local s="$1"
+  for existing in "${scenarios_set[@]+"${scenarios_set[@]}"}"; do
+    [ "$existing" = "$s" ] && return
+  done
+  scenarios_set+=("$s")
+}
+
+if [ "$feature_sharding" = "true" ]; then
+  any_feature_triggered=true
+  for s in db tbl dbtbl_with_readwrite_splitting 
dbtbl_with_readwrite_splitting_and_encrypt \
+            sharding_and_encrypt sharding_and_shadow sharding_encrypt_shadow \
+            mask_sharding mask_encrypt_sharding db_tbl_sql_federation; do
+    add_scenario "$s"
+  done
+fi
+
+if [ "$feature_encrypt" = "true" ]; then
+  any_feature_triggered=true
+  for s in encrypt dbtbl_with_readwrite_splitting_and_encrypt 
sharding_and_encrypt \
+            encrypt_and_readwrite_splitting encrypt_shadow 
sharding_encrypt_shadow \
+            mask_encrypt mask_encrypt_sharding; do
+    add_scenario "$s"
+  done
+fi
+
+if [ "$feature_readwrite_splitting" = "true" ]; then
+  any_feature_triggered=true
+  for s in readwrite_splitting dbtbl_with_readwrite_splitting \
+            dbtbl_with_readwrite_splitting_and_encrypt 
encrypt_and_readwrite_splitting \
+            readwrite_splitting_and_shadow; do
+    add_scenario "$s"
+  done
+fi
+
+if [ "$feature_shadow" = "true" ]; then
+  any_feature_triggered=true
+  for s in shadow encrypt_shadow readwrite_splitting_and_shadow 
sharding_and_shadow \
+            sharding_encrypt_shadow; do
+    add_scenario "$s"
+  done
+fi
+
+if [ "$feature_mask" = "true" ]; then
+  any_feature_triggered=true
+  for s in mask mask_encrypt mask_sharding mask_encrypt_sharding; do
+    add_scenario "$s"
+  done
+fi
+
+if [ "$feature_distsql" = "true" ]; then
+  any_feature_triggered=true
+  add_scenario "distsql_rdl"
+fi
+
+if [ "$feature_sql_federation" = "true" ]; then
+  any_feature_triggered=true
+  add_scenario "db_tbl_sql_federation"
+fi
+
+if [ "$feature_broadcast" = "true" ]; then
+  any_feature_triggered=true
+  add_scenario "empty_rules"
+fi
+
+# If no feature triggered, use core smoke scenario set
+if [ "$any_feature_triggered" = "false" ]; then
+  scenarios_json="$SMOKE_SCENARIOS"
+else
+  scenarios_json=$(printf '%s\n' "${scenarios_set[@]}" | jq -R . | jq -sc .)
+fi
+
+MATRIX=$(build_matrix "$adapters" "$modes" "$databases" "$scenarios_json")
+
+JOB_COUNT=$(echo "$MATRIX" | jq '.include | length')
+
+if [ "$JOB_COUNT" -eq 0 ]; then
+  echo "matrix={\"include\":[]}" >> "$GITHUB_OUTPUT"
+  echo "has-jobs=false" >> "$GITHUB_OUTPUT"
+  echo "need-proxy-image=false" >> "$GITHUB_OUTPUT"
+  exit 0
+fi
+
+HAS_PROXY=$(echo "$MATRIX" | jq '[.include[] | select(.adapter == "proxy")] | 
length > 0')
+
+echo "matrix=$(echo "$MATRIX" | jq -c .)" >> "$GITHUB_OUTPUT"
+echo "has-jobs=true" >> "$GITHUB_OUTPUT"
+echo "need-proxy-image=$HAS_PROXY" >> "$GITHUB_OUTPUT"

Reply via email to