This is an automated email from the ASF dual-hosted git repository.

tqchen pushed a commit to branch refactor
in repository https://gitbox.apache.org/repos/asf/tvm.git

commit 4cf29af08eb584ce7bb6b6081f7e4bdf4816ac48
Author: tqchen <[email protected]>
AuthorDate: Sat Feb 15 19:37:13 2025 -0500

    Simplify CI to focus on UT
    
    The main rationale is that we should only have very few target
    dependent UT in tests/python/codegen and possible
    a new category in future for op-level integration if needed.
---
 ci/jenkins/generated/arm_jenkinsfile.groovy        | 272 +----------
 ci/jenkins/generated/cpu_jenkinsfile.groovy        | 232 +---------
 ci/jenkins/generated/hexagon_jenkinsfile.groovy    | 509 +--------------------
 ci/jenkins/templates/arm_jenkinsfile.groovy.j2     |  21 -
 ci/jenkins/templates/cpu_jenkinsfile.groovy.j2     |  17 +-
 ci/jenkins/templates/hexagon_jenkinsfile.groovy.j2 |  19 -
 tests/scripts/ci.py                                |   1 -
 7 files changed, 23 insertions(+), 1048 deletions(-)

diff --git a/ci/jenkins/generated/arm_jenkinsfile.groovy 
b/ci/jenkins/generated/arm_jenkinsfile.groovy
index 5e48cc6500..36caff72e9 100644
--- a/ci/jenkins/generated/arm_jenkinsfile.groovy
+++ b/ci/jenkins/generated/arm_jenkinsfile.groovy
@@ -60,7 +60,7 @@
 // 'python3 jenkins/generate.py'
 // Note: This timestamp is here to ensure that updates to the Jenkinsfile are
 // always rebased on main before merging:
-// Generated at 2025-02-15T10:14:10.162250
+// Generated at 2025-02-15T19:39:05.034202
 
 import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
 // These are set at runtime from data in ci/jenkins/docker-images.yml, update
@@ -546,273 +546,3 @@ def build() {
 }
 build()
 
-
-
-def shard_run_integration_aarch64_1_of_4(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-arm") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_arm)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=arm',
-            'TEST_STEP_NAME=integration: aarch64',
-            'TVM_NUM_SHARDS=4',
-            'TVM_SHARD_INDEX=0',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_arm)
-              python_unittest(ci_arm)
-              sh (
-                script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_integration.sh",
-                label: 'Run CPU integration tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_aarch64 --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('integration: aarch64 1 of 4')
-  }
-}
-
-def shard_run_integration_aarch64_2_of_4(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-arm") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_arm)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=arm',
-            'TEST_STEP_NAME=integration: aarch64',
-            'TVM_NUM_SHARDS=4',
-            'TVM_SHARD_INDEX=1',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_arm)
-              python_unittest(ci_arm)
-              sh (
-                script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_integration.sh",
-                label: 'Run CPU integration tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_aarch64 --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('integration: aarch64 2 of 4')
-  }
-}
-
-def shard_run_integration_aarch64_3_of_4(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-arm") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_arm)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=arm',
-            'TEST_STEP_NAME=integration: aarch64',
-            'TVM_NUM_SHARDS=4',
-            'TVM_SHARD_INDEX=2',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_arm)
-              python_unittest(ci_arm)
-              sh (
-                script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_integration.sh",
-                label: 'Run CPU integration tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_aarch64 --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('integration: aarch64 3 of 4')
-  }
-}
-
-def shard_run_integration_aarch64_4_of_4(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-arm") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_arm)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=arm',
-            'TEST_STEP_NAME=integration: aarch64',
-            'TVM_NUM_SHARDS=4',
-            'TVM_SHARD_INDEX=3',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_arm)
-              python_unittest(ci_arm)
-              sh (
-                script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_integration.sh",
-                label: 'Run CPU integration tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_aarch64 --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('integration: aarch64 4 of 4')
-  }
-}
-
-
-
-def test() {
-  stage('Test') {
-    environment {
-      SKIP_SLOW_TESTS = "${skip_slow_tests}"
-    }
-    parallel(
-    'integration: aarch64 1 of 4': {
-      try {
-      shard_run_integration_aarch64_1_of_4('ARM-GRAVITON3-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_integration_aarch64_1_of_4('ARM-GRAVITON3')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'integration: aarch64 2 of 4': {
-      try {
-      shard_run_integration_aarch64_2_of_4('ARM-GRAVITON3-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_integration_aarch64_2_of_4('ARM-GRAVITON3')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'integration: aarch64 3 of 4': {
-      try {
-      shard_run_integration_aarch64_3_of_4('ARM-GRAVITON3-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_integration_aarch64_3_of_4('ARM-GRAVITON3')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'integration: aarch64 4 of 4': {
-      try {
-      shard_run_integration_aarch64_4_of_4('ARM-GRAVITON3-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_integration_aarch64_4_of_4('ARM-GRAVITON3')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    )
-  }
-}
-test()
diff --git a/ci/jenkins/generated/cpu_jenkinsfile.groovy 
b/ci/jenkins/generated/cpu_jenkinsfile.groovy
index b54fdf51ca..627bb85862 100644
--- a/ci/jenkins/generated/cpu_jenkinsfile.groovy
+++ b/ci/jenkins/generated/cpu_jenkinsfile.groovy
@@ -60,7 +60,7 @@
 // 'python3 jenkins/generate.py'
 // Note: This timestamp is here to ensure that updates to the Jenkinsfile are
 // always rebased on main before merging:
-// Generated at 2025-02-15T10:14:10.181874
+// Generated at 2025-02-15T19:40:24.687837
 
 import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
 // These are set at runtime from data in ci/jenkins/docker-images.yml, update
@@ -553,158 +553,21 @@ build()
 
 
 
-def shard_run_integration_CPU_1_of_4(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-cpu") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_cpu)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=cpu',
-            'TEST_STEP_NAME=integration: CPU',
-            'TVM_NUM_SHARDS=4',
-            'TVM_SHARD_INDEX=0',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_cpu)
-              sh (
-                script: "${docker_run} ${ci_cpu} 
./tests/scripts/task_python_integration.sh",
-                label: 'Run CPU integration tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_CPU --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('integration: CPU 1 of 4')
-  }
-}
-
-def shard_run_integration_CPU_2_of_4(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-cpu") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_cpu)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=cpu',
-            'TEST_STEP_NAME=integration: CPU',
-            'TVM_NUM_SHARDS=4',
-            'TVM_SHARD_INDEX=1',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_cpu)
-              sh (
-                script: "${docker_run} ${ci_cpu} 
./tests/scripts/task_python_integration.sh",
-                label: 'Run CPU integration tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_CPU --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('integration: CPU 2 of 4')
-  }
-}
-
-def shard_run_integration_CPU_3_of_4(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-cpu") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_cpu)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=cpu',
-            'TEST_STEP_NAME=integration: CPU',
-            'TVM_NUM_SHARDS=4',
-            'TVM_SHARD_INDEX=2',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_cpu)
-              sh (
-                script: "${docker_run} ${ci_cpu} 
./tests/scripts/task_python_integration.sh",
-                label: 'Run CPU integration tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_CPU --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('integration: CPU 3 of 4')
-  }
-}
 
-def shard_run_integration_CPU_4_of_4(node_type) {
+def shard_run_unittest_CPU_1_of_2(node_type) {
   echo 'Begin running on node_type ' + node_type
   if (!skip_ci && is_docs_only_build != 1) {
     node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-cpu") {
+      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-cpu") {
         // NOTE: if exception happens, it will be caught outside
         init_git()
         docker_init(ci_cpu)
         timeout(time: max_time, unit: 'MINUTES') {
           withEnv([
             'PLATFORM=cpu',
-            'TEST_STEP_NAME=integration: CPU',
-            'TVM_NUM_SHARDS=4',
-            'TVM_SHARD_INDEX=3',
+            'TEST_STEP_NAME=unittest: CPU',
+            'TVM_NUM_SHARDS=2',
+            'TVM_SHARD_INDEX=0',
             "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
             sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
@@ -712,16 +575,14 @@ def shard_run_integration_CPU_4_of_4(node_type) {
                 )
 
               ci_setup(ci_cpu)
-              sh (
-                script: "${docker_run} ${ci_cpu} 
./tests/scripts/task_python_integration.sh",
-                label: 'Run CPU integration tests',
-              )
+              cpp_unittest(ci_cpu)
+              python_unittest(ci_cpu)
           })
         }
         // only run upload if things are successful
         try {
           sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_CPU --items 
build/pytest-results",
+            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/unittest_CPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
@@ -733,13 +594,11 @@ def shard_run_integration_CPU_4_of_4(node_type) {
     }
     echo 'End running on node_type ' + node_type
   } else {
-    Utils.markStageSkippedForConditional('integration: CPU 4 of 4')
+    Utils.markStageSkippedForConditional('unittest: CPU 1 of 2')
   }
 }
 
-
-
-def shard_run_unittest_CPU_1_of_1(node_type) {
+def shard_run_unittest_CPU_2_of_2(node_type) {
   echo 'Begin running on node_type ' + node_type
   if (!skip_ci && is_docs_only_build != 1) {
     node(node_type) {
@@ -751,8 +610,8 @@ def shard_run_unittest_CPU_1_of_1(node_type) {
           withEnv([
             'PLATFORM=cpu',
             'TEST_STEP_NAME=unittest: CPU',
-            'TVM_NUM_SHARDS=1',
-            'TVM_SHARD_INDEX=0',
+            'TVM_NUM_SHARDS=2',
+            'TVM_SHARD_INDEX=1',
             "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
             sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
@@ -779,7 +638,7 @@ def shard_run_unittest_CPU_1_of_1(node_type) {
     }
     echo 'End running on node_type ' + node_type
   } else {
-    Utils.markStageSkippedForConditional('unittest: CPU 1 of 1')
+    Utils.markStageSkippedForConditional('unittest: CPU 2 of 2')
   }
 }
 
@@ -790,60 +649,9 @@ def test() {
       SKIP_SLOW_TESTS = "${skip_slow_tests}"
     }
     parallel(
-    'integration: CPU 1 of 4': {
-      try {
-      shard_run_integration_CPU_1_of_4('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_integration_CPU_1_of_4('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'integration: CPU 2 of 4': {
-      try {
-      shard_run_integration_CPU_2_of_4('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_integration_CPU_2_of_4('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'integration: CPU 3 of 4': {
-      try {
-      shard_run_integration_CPU_3_of_4('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_integration_CPU_3_of_4('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'integration: CPU 4 of 4': {
+    'unittest: CPU 1 of 2': {
       try {
-      shard_run_integration_CPU_4_of_4('CPU-SMALL-SPOT')
+      shard_run_unittest_CPU_1_of_2('CPU-SMALL-SPOT')
       } catch (Throwable ex) {
         if (is_last_build()) {
           // retry if at last build
@@ -851,16 +659,16 @@ def test() {
           // and try again via on demand node
           echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
           currentBuild.result = 'SUCCESS'
-          shard_run_integration_CPU_4_of_4('CPU-SMALL')
+          shard_run_unittest_CPU_1_of_2('CPU-SMALL')
         } else {
           echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
           throw ex
         }
       }
     },
-    'unittest: CPU 1 of 1': {
+    'unittest: CPU 2 of 2': {
       try {
-      shard_run_unittest_CPU_1_of_1('CPU-SMALL-SPOT')
+      shard_run_unittest_CPU_2_of_2('CPU-SMALL-SPOT')
       } catch (Throwable ex) {
         if (is_last_build()) {
           // retry if at last build
@@ -868,7 +676,7 @@ def test() {
           // and try again via on demand node
           echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
           currentBuild.result = 'SUCCESS'
-          shard_run_unittest_CPU_1_of_1('CPU-SMALL')
+          shard_run_unittest_CPU_2_of_2('CPU-SMALL')
         } else {
           echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
           throw ex
diff --git a/ci/jenkins/generated/hexagon_jenkinsfile.groovy 
b/ci/jenkins/generated/hexagon_jenkinsfile.groovy
index da20f33bbb..a9014337a7 100644
--- a/ci/jenkins/generated/hexagon_jenkinsfile.groovy
+++ b/ci/jenkins/generated/hexagon_jenkinsfile.groovy
@@ -60,7 +60,7 @@
 // 'python3 jenkins/generate.py'
 // Note: This timestamp is here to ensure that updates to the Jenkinsfile are
 // always rebased on main before merging:
-// Generated at 2025-02-15T10:14:10.056677
+// Generated at 2025-02-15T19:31:36.031215
 
 import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
 // These are set at runtime from data in ci/jenkins/docker-images.yml, update
@@ -552,519 +552,12 @@ build()
 
 
 
-
-def shard_run_test_Hexagon_1_of_8(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_hexagon)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=hexagon',
-            'TEST_STEP_NAME=test: Hexagon',
-            'TVM_NUM_SHARDS=8',
-            'TVM_SHARD_INDEX=0',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_hexagon)
-              cpp_unittest(ci_hexagon)
-              sh (
-                script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
-                label: 'Run Hexagon tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('test: Hexagon 1 of 8')
-  }
-}
-
-def shard_run_test_Hexagon_2_of_8(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_hexagon)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=hexagon',
-            'TEST_STEP_NAME=test: Hexagon',
-            'TVM_NUM_SHARDS=8',
-            'TVM_SHARD_INDEX=1',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_hexagon)
-              sh (
-                script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
-                label: 'Run Hexagon tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('test: Hexagon 2 of 8')
-  }
-}
-
-def shard_run_test_Hexagon_3_of_8(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_hexagon)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=hexagon',
-            'TEST_STEP_NAME=test: Hexagon',
-            'TVM_NUM_SHARDS=8',
-            'TVM_SHARD_INDEX=2',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_hexagon)
-              sh (
-                script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
-                label: 'Run Hexagon tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('test: Hexagon 3 of 8')
-  }
-}
-
-def shard_run_test_Hexagon_4_of_8(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_hexagon)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=hexagon',
-            'TEST_STEP_NAME=test: Hexagon',
-            'TVM_NUM_SHARDS=8',
-            'TVM_SHARD_INDEX=3',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_hexagon)
-              sh (
-                script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
-                label: 'Run Hexagon tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('test: Hexagon 4 of 8')
-  }
-}
-
-def shard_run_test_Hexagon_5_of_8(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_hexagon)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=hexagon',
-            'TEST_STEP_NAME=test: Hexagon',
-            'TVM_NUM_SHARDS=8',
-            'TVM_SHARD_INDEX=4',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_hexagon)
-              sh (
-                script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
-                label: 'Run Hexagon tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('test: Hexagon 5 of 8')
-  }
-}
-
-def shard_run_test_Hexagon_6_of_8(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_hexagon)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=hexagon',
-            'TEST_STEP_NAME=test: Hexagon',
-            'TVM_NUM_SHARDS=8',
-            'TVM_SHARD_INDEX=5',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_hexagon)
-              sh (
-                script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
-                label: 'Run Hexagon tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('test: Hexagon 6 of 8')
-  }
-}
-
-def shard_run_test_Hexagon_7_of_8(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_hexagon)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=hexagon',
-            'TEST_STEP_NAME=test: Hexagon',
-            'TVM_NUM_SHARDS=8',
-            'TVM_SHARD_INDEX=6',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_hexagon)
-              sh (
-                script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
-                label: 'Run Hexagon tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('test: Hexagon 7 of 8')
-  }
-}
-
-def shard_run_test_Hexagon_8_of_8(node_type) {
-  echo 'Begin running on node_type ' + node_type
-  if (!skip_ci && is_docs_only_build != 1) {
-    node(node_type) {
-      ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        // NOTE: if exception happens, it will be caught outside
-        init_git()
-        docker_init(ci_hexagon)
-        timeout(time: max_time, unit: 'MINUTES') {
-          withEnv([
-            'PLATFORM=hexagon',
-            'TEST_STEP_NAME=test: Hexagon',
-            'TVM_NUM_SHARDS=8',
-            'TVM_SHARD_INDEX=7',
-            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-            sh(
-                  script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
-                  label: 'Download artifacts from S3',
-                )
-
-              ci_setup(ci_hexagon)
-              sh (
-                script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
-                label: 'Run Hexagon tests',
-              )
-          })
-        }
-        // only run upload if things are successful
-        try {
-          sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
-            label: 'Upload JUnits to S3',
-          )
-
-          junit 'build/pytest-results/*.xml'
-        } catch (Exception e) {
-          echo 'Exception during JUnit upload: ' + e.toString()
-        }
-      }
-    }
-    echo 'End running on node_type ' + node_type
-  } else {
-    Utils.markStageSkippedForConditional('test: Hexagon 8 of 8')
-  }
-}
-
-
 def test() {
   stage('Test') {
     environment {
       SKIP_SLOW_TESTS = "${skip_slow_tests}"
     }
     parallel(
-    'test: Hexagon 1 of 8': {
-      try {
-      shard_run_test_Hexagon_1_of_8('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_test_Hexagon_1_of_8('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'test: Hexagon 2 of 8': {
-      try {
-      shard_run_test_Hexagon_2_of_8('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_test_Hexagon_2_of_8('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'test: Hexagon 3 of 8': {
-      try {
-      shard_run_test_Hexagon_3_of_8('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_test_Hexagon_3_of_8('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'test: Hexagon 4 of 8': {
-      try {
-      shard_run_test_Hexagon_4_of_8('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_test_Hexagon_4_of_8('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'test: Hexagon 5 of 8': {
-      try {
-      shard_run_test_Hexagon_5_of_8('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_test_Hexagon_5_of_8('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'test: Hexagon 6 of 8': {
-      try {
-      shard_run_test_Hexagon_6_of_8('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_test_Hexagon_6_of_8('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'test: Hexagon 7 of 8': {
-      try {
-      shard_run_test_Hexagon_7_of_8('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_test_Hexagon_7_of_8('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
-    'test: Hexagon 8 of 8': {
-      try {
-      shard_run_test_Hexagon_8_of_8('CPU-SMALL-SPOT')
-      } catch (Throwable ex) {
-        if (is_last_build()) {
-          // retry if at last build
-          // mark the current stage as success
-          // and try again via on demand node
-          echo 'Exception during SPOT run ' + ex.toString() + ' retry 
on-demand'
-          currentBuild.result = 'SUCCESS'
-          shard_run_test_Hexagon_8_of_8('CPU-SMALL')
-        } else {
-          echo 'Exception during SPOT run ' + ex.toString() + ' exit since it 
is not last build'
-          throw ex
-        }
-      }
-    },
     )
   }
 }
diff --git a/ci/jenkins/templates/arm_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/arm_jenkinsfile.groovy.j2
index aa999408a7..84220db394 100644
--- a/ci/jenkins/templates/arm_jenkinsfile.groovy.j2
+++ b/ci/jenkins/templates/arm_jenkinsfile.groovy.j2
@@ -34,24 +34,3 @@
   {{ m.upload_artifacts(tag='arm', filenames=tvm_lib + cpptest) }}
 {% endcall %}
 
-{% set test_method_names = [] %}
-
-{% call(shard_index, num_shards) m.sharded_test_step(
-  name="integration: aarch64",
-  num_shards=4,
-  ws="tvm/ut-python-arm",
-  platform="arm",
-  docker_image="ci_arm",
-  test_method_names=test_method_names,
-) %}
-  {{ m.download_artifacts(tag='arm') }}
-  ci_setup(ci_arm)
-  python_unittest(ci_arm)
-  sh (
-    script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_integration.sh",
-    label: 'Run CPU integration tests',
-  )
-{% endcall %}
-
-
-{{ m.invoke_tests(node="ARM-GRAVITON3", test_method_names=test_method_names) 
-}}
diff --git a/ci/jenkins/templates/cpu_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/cpu_jenkinsfile.groovy.j2
index e34132c941..c84b0c48a2 100644
--- a/ci/jenkins/templates/cpu_jenkinsfile.groovy.j2
+++ b/ci/jenkins/templates/cpu_jenkinsfile.groovy.j2
@@ -40,27 +40,12 @@
 
 {% set test_method_names = [] %}
 
-{% call(shard_index, num_shards) m.sharded_test_step(
-  name="integration: CPU",
-  num_shards=4,
-  ws="tvm/integration-python-cpu",
-  platform="cpu",
-  docker_image="ci_cpu",
-  test_method_names=test_method_names,
-) %}
-  {{ m.download_artifacts(tag='cpu') }}
-  ci_setup(ci_cpu)
-  sh (
-    script: "${docker_run} ${ci_cpu} 
./tests/scripts/task_python_integration.sh",
-    label: 'Run CPU integration tests',
-  )
-{% endcall %}
 
 {% call(shard_index, num_shards) m.sharded_test_step(
   name="unittest: CPU",
   ws="tvm/ut-python-cpu",
   platform="cpu",
-  num_shards=1,
+  num_shards=2,
   docker_image="ci_cpu",
   test_method_names=test_method_names,
 ) %}
diff --git a/ci/jenkins/templates/hexagon_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/hexagon_jenkinsfile.groovy.j2
index 91d3ce9ece..b4177b3329 100644
--- a/ci/jenkins/templates/hexagon_jenkinsfile.groovy.j2
+++ b/ci/jenkins/templates/hexagon_jenkinsfile.groovy.j2
@@ -41,23 +41,4 @@
 
 {% set test_method_names = [] %}
 
-{% call(shard_index, num_shards) m.sharded_test_step(
-  name="test: Hexagon",
-  ws="tvm/test-hexagon",
-  platform="hexagon",
-  docker_image="ci_hexagon",
-  test_method_names=test_method_names,
-  num_shards=8,
-) %}
-  {{ m.download_artifacts(tag='hexagon') }}
-  ci_setup(ci_hexagon)
-  {% if shard_index == 1 %}
-  cpp_unittest(ci_hexagon)
-  {% endif %}
-  sh (
-    script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
-    label: 'Run Hexagon tests',
-  )
-{% endcall %}
-
 {{ m.invoke_tests(node="CPU-SMALL", test_method_names=test_method_names) -}}
diff --git a/tests/scripts/ci.py b/tests/scripts/ci.py
index 0bd97e4ee0..10d6312912 100755
--- a/tests/scripts/ci.py
+++ b/tests/scripts/ci.py
@@ -683,7 +683,6 @@ generated = [
                 "run full Python tests",
                 [
                     "./tests/scripts/task_python_unittest.sh",
-                    "./tests/scripts/task_python_arm_compute_library.sh",
                 ],
             ),
         },


Reply via email to