This is an automated email from the ASF dual-hosted git repository.

tqchen pushed a commit to branch ci
in repository https://gitbox.apache.org/repos/asf/tvm.git

commit 49c49f1e8d941a8a9ef01ddb25f7c2a7d079dc51
Author: tqchen <[email protected]>
AuthorDate: Thu Feb 6 12:13:25 2025 -0500

    [CI] Robustify CI for SPOT failure
    
    This PR cleans up some legacy CI files and robustifies
    the CI for spot failure by marking a stage as success then retry
    on the on demand node.
---
 ci/jenkins/data.py                                 |  14 +-
 ci/jenkins/generated/arm_jenkinsfile.groovy        | 340 +++++++------
 ci/jenkins/generated/cpu_jenkinsfile.groovy        | 256 +++++-----
 ci/jenkins/generated/gpu_jenkinsfile.groovy        | 556 +++++++++++----------
 ci/jenkins/generated/hexagon_jenkinsfile.groovy    | 338 +++++++------
 ci/jenkins/generated/i386_jenkinsfile.groovy       | 131 ++---
 ci/jenkins/templates/arm_jenkinsfile.groovy.j2     |   2 +-
 ci/jenkins/templates/cortexm_jenkinsfile.groovy.j2 |  63 ---
 ci/jenkins/templates/cpu_jenkinsfile.groovy.j2     |   2 +-
 ci/jenkins/templates/gpu_jenkinsfile.groovy.j2     |   6 +-
 ci/jenkins/templates/i386_jenkinsfile.groovy.j2    |   2 +-
 .../minimal_cross_isa_jenkinsfile.groovy.j2        |  57 ---
 ci/jenkins/templates/minimal_jenkinsfile.groovy.j2 |  55 --
 ci/jenkins/templates/riscv_jenkinsfile.groovy.j2   |  61 ---
 ci/jenkins/templates/utils/macros.j2               |  43 +-
 ci/jenkins/unity_jenkinsfile.groovy                |   2 +-
 16 files changed, 884 insertions(+), 1044 deletions(-)

diff --git a/ci/jenkins/data.py b/ci/jenkins/data.py
index 8cf762f161..f991319033 100644
--- a/ci/jenkins/data.py
+++ b/ci/jenkins/data.py
@@ -21,8 +21,6 @@ import sys
 files_to_stash = {
     # Executables and build files needed to run c++ tests
     "cpptest": ["build/cpptest", "build/build.ninja", 
"build/CMakeFiles/rules.ninja"],
-    # Executables and build files needed to c runtime tests
-    "crttest": ["build/crttest"],
     # Folder for hexagon build
     "hexagon_api": [
         "build/hexagon_api_output",
@@ -33,13 +31,11 @@ files_to_stash = {
     "tvm_runtime": ["build/libtvm_runtime.so", "build/config.cmake"],
     # compiler files
     "tvm_lib": ["build/libtvm.so", "build/libtvm_runtime.so", 
"build/config.cmake"],
-    # compiler files and fsim
-    "tvm_multilib": [
-        "build/libtvm.so",
-        "build/libvta_fsim.so",
-        "build/libtvm_runtime.so",
-        "build/config.cmake",
-    ],
+    # gpu related compiler files
+    "tvm_lib_gpu_extra": [
+        "build/3rdparty/libflash_attn/src/libflash_attn.so",
+        
"build/3rdparty/cutlass_fpA_intB_gemm/cutlass_kernels/libfpA_intB_gemm.so"
+    ]
 }
 
 
diff --git a/ci/jenkins/generated/arm_jenkinsfile.groovy 
b/ci/jenkins/generated/arm_jenkinsfile.groovy
index c33be22a6f..3bfbc2f4ef 100644
--- a/ci/jenkins/generated/arm_jenkinsfile.groovy
+++ b/ci/jenkins/generated/arm_jenkinsfile.groovy
@@ -60,7 +60,7 @@
 // 'python3 jenkins/generate.py'
 // Note: This timestamp is here to ensure that updates to the Jenkinsfile are
 // always rebased on main before merging:
-// Generated at 2024-01-10T13:15:25.226391
+// Generated at 2025-02-06T12:11:28.802705
 
 import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
 // These are set at runtime from data in ci/jenkins/docker-images.yml, update
@@ -533,7 +533,7 @@ def build(node_type) {
         cmake_build(ci_arm, 'build', '-j4')
         make_cpp_tests(ci_arm, 'build')
         sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/arm --items build/libtvm.so 
build/libvta_fsim.so build/libtvm_runtime.so build/config.cmake build/cpptest 
build/build.ninja build/CMakeFiles/rules.ninja build/crttest build/build.ninja 
build/microtvm_template_projects",
+            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/arm --items build/libtvm.so 
build/libtvm_runtime.so build/config.cmake build/cpptest build/build.ninja 
build/CMakeFiles/rules.ninja",
             label: 'Upload artifacts to S3',
           )
             })
@@ -560,17 +560,17 @@ def 
shard_run_integration_aarch64_1_of_4(node_type='ARM-GRAVITON3-SPOT', on_dema
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-arm") {
-        try {
-          init_git()
-          docker_init(ci_arm)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=arm',
-              'TEST_STEP_NAME=integration: aarch64',
-              'TVM_NUM_SHARDS=4',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_arm)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=arm',
+            'TEST_STEP_NAME=integration: aarch64',
+            'TVM_NUM_SHARDS=4',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
                   label: 'Download artifacts from S3',
                 )
@@ -581,19 +581,18 @@ def 
shard_run_integration_aarch64_1_of_4(node_type='ARM-GRAVITON3-SPOT', on_dema
                 script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_integration.sh",
                 label: 'Run CPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_aarch64 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -609,17 +608,17 @@ def 
shard_run_integration_aarch64_2_of_4(node_type='ARM-GRAVITON3-SPOT', on_dema
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-arm") {
-        try {
-          init_git()
-          docker_init(ci_arm)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=arm',
-              'TEST_STEP_NAME=integration: aarch64',
-              'TVM_NUM_SHARDS=4',
-              'TVM_SHARD_INDEX=1',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_arm)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=arm',
+            'TEST_STEP_NAME=integration: aarch64',
+            'TVM_NUM_SHARDS=4',
+            'TVM_SHARD_INDEX=1',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
                   label: 'Download artifacts from S3',
                 )
@@ -630,19 +629,18 @@ def 
shard_run_integration_aarch64_2_of_4(node_type='ARM-GRAVITON3-SPOT', on_dema
                 script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_integration.sh",
                 label: 'Run CPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_aarch64 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -658,17 +656,17 @@ def 
shard_run_integration_aarch64_3_of_4(node_type='ARM-GRAVITON3-SPOT', on_dema
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-arm") {
-        try {
-          init_git()
-          docker_init(ci_arm)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=arm',
-              'TEST_STEP_NAME=integration: aarch64',
-              'TVM_NUM_SHARDS=4',
-              'TVM_SHARD_INDEX=2',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_arm)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=arm',
+            'TEST_STEP_NAME=integration: aarch64',
+            'TVM_NUM_SHARDS=4',
+            'TVM_SHARD_INDEX=2',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
                   label: 'Download artifacts from S3',
                 )
@@ -679,19 +677,18 @@ def 
shard_run_integration_aarch64_3_of_4(node_type='ARM-GRAVITON3-SPOT', on_dema
                 script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_integration.sh",
                 label: 'Run CPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_aarch64 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -707,17 +704,17 @@ def 
shard_run_integration_aarch64_4_of_4(node_type='ARM-GRAVITON3-SPOT', on_dema
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-arm") {
-        try {
-          init_git()
-          docker_init(ci_arm)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=arm',
-              'TEST_STEP_NAME=integration: aarch64',
-              'TVM_NUM_SHARDS=4',
-              'TVM_SHARD_INDEX=3',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_arm)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=arm',
+            'TEST_STEP_NAME=integration: aarch64',
+            'TVM_NUM_SHARDS=4',
+            'TVM_SHARD_INDEX=3',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
                   label: 'Download artifacts from S3',
                 )
@@ -728,19 +725,18 @@ def 
shard_run_integration_aarch64_4_of_4(node_type='ARM-GRAVITON3-SPOT', on_dema
                 script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_integration.sh",
                 label: 'Run CPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_aarch64 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -758,17 +754,17 @@ def 
shard_run_topi_aarch64_1_of_2(node_type='ARM-GRAVITON3-SPOT', on_demand=fals
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-arm") {
-        try {
-          init_git()
-          docker_init(ci_arm)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=arm',
-              'TEST_STEP_NAME=topi: aarch64',
-              'TVM_NUM_SHARDS=2',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_arm)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=arm',
+            'TEST_STEP_NAME=topi: aarch64',
+            'TVM_NUM_SHARDS=2',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
                   label: 'Download artifacts from S3',
                 )
@@ -783,19 +779,18 @@ def 
shard_run_topi_aarch64_1_of_2(node_type='ARM-GRAVITON3-SPOT', on_demand=fals
                 script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_topi.sh",
                 label: 'Run TOPI tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/topi_aarch64 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -811,17 +806,17 @@ def 
shard_run_topi_aarch64_2_of_2(node_type='ARM-GRAVITON3-SPOT', on_demand=fals
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-arm") {
-        try {
-          init_git()
-          docker_init(ci_arm)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=arm',
-              'TEST_STEP_NAME=topi: aarch64',
-              'TVM_NUM_SHARDS=2',
-              'TVM_SHARD_INDEX=1',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_arm)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=arm',
+            'TEST_STEP_NAME=topi: aarch64',
+            'TVM_NUM_SHARDS=2',
+            'TVM_SHARD_INDEX=1',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
                   label: 'Download artifacts from S3',
                 )
@@ -835,19 +830,18 @@ def 
shard_run_topi_aarch64_2_of_2(node_type='ARM-GRAVITON3-SPOT', on_demand=fals
                 script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_topi.sh",
                 label: 'Run TOPI tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/topi_aarch64 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -865,17 +859,17 @@ def 
shard_run_frontend_aarch64_1_of_2(node_type='ARM-GRAVITON3-SPOT', on_demand=
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/frontend-python-arm") {
-        try {
-          init_git()
-          docker_init(ci_arm)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=arm',
-              'TEST_STEP_NAME=frontend: aarch64',
-              'TVM_NUM_SHARDS=2',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_arm)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=arm',
+            'TEST_STEP_NAME=frontend: aarch64',
+            'TVM_NUM_SHARDS=2',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
                   label: 'Download artifacts from S3',
                 )
@@ -885,19 +879,18 @@ def 
shard_run_frontend_aarch64_1_of_2(node_type='ARM-GRAVITON3-SPOT', on_demand=
                 script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_frontend_cpu.sh",
                 label: 'Run Python frontend tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/frontend_aarch64 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -913,17 +906,17 @@ def 
shard_run_frontend_aarch64_2_of_2(node_type='ARM-GRAVITON3-SPOT', on_demand=
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/frontend-python-arm") {
-        try {
-          init_git()
-          docker_init(ci_arm)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=arm',
-              'TEST_STEP_NAME=frontend: aarch64',
-              'TVM_NUM_SHARDS=2',
-              'TVM_SHARD_INDEX=1',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_arm)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=arm',
+            'TEST_STEP_NAME=frontend: aarch64',
+            'TVM_NUM_SHARDS=2',
+            'TVM_SHARD_INDEX=1',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/arm",
                   label: 'Download artifacts from S3',
                 )
@@ -933,19 +926,18 @@ def 
shard_run_frontend_aarch64_2_of_2(node_type='ARM-GRAVITON3-SPOT', on_demand=
                 script: "${docker_run} ${ci_arm} 
./tests/scripts/task_python_frontend_cpu.sh",
                 label: 'Run Python frontend tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/frontend_aarch64 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -965,6 +957,9 @@ def test() {
       try {
       shard_run_integration_aarch64_1_of_4()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_integration_aarch64_1_of_4(on_demand = true)
       }
     },
@@ -972,6 +967,9 @@ def test() {
       try {
       shard_run_integration_aarch64_2_of_4()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_integration_aarch64_2_of_4(on_demand = true)
       }
     },
@@ -979,6 +977,9 @@ def test() {
       try {
       shard_run_integration_aarch64_3_of_4()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_integration_aarch64_3_of_4(on_demand = true)
       }
     },
@@ -986,6 +987,9 @@ def test() {
       try {
       shard_run_integration_aarch64_4_of_4()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_integration_aarch64_4_of_4(on_demand = true)
       }
     },
@@ -993,6 +997,9 @@ def test() {
       try {
       shard_run_topi_aarch64_1_of_2()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_topi_aarch64_1_of_2(on_demand = true)
       }
     },
@@ -1000,6 +1007,9 @@ def test() {
       try {
       shard_run_topi_aarch64_2_of_2()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_topi_aarch64_2_of_2(on_demand = true)
       }
     },
@@ -1007,6 +1017,9 @@ def test() {
       try {
       shard_run_frontend_aarch64_1_of_2()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_frontend_aarch64_1_of_2(on_demand = true)
       }
     },
@@ -1014,6 +1027,9 @@ def test() {
       try {
       shard_run_frontend_aarch64_2_of_2()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_frontend_aarch64_2_of_2(on_demand = true)
       }
     },
diff --git a/ci/jenkins/generated/cpu_jenkinsfile.groovy 
b/ci/jenkins/generated/cpu_jenkinsfile.groovy
index 04ab19f404..fc29e0c324 100644
--- a/ci/jenkins/generated/cpu_jenkinsfile.groovy
+++ b/ci/jenkins/generated/cpu_jenkinsfile.groovy
@@ -60,7 +60,7 @@
 // 'python3 jenkins/generate.py'
 // Note: This timestamp is here to ensure that updates to the Jenkinsfile are
 // always rebased on main before merging:
-// Generated at 2024-01-10T13:15:25.103852
+// Generated at 2025-02-06T12:11:28.820034
 
 import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
 // These are set at runtime from data in ci/jenkins/docker-images.yml, update
@@ -533,7 +533,7 @@ def build(node_type) {
         cmake_build(ci_cpu, 'build', '-j2')
         make_cpp_tests(ci_cpu, 'build')
         sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/cpu --items build/libvta_tsim.so 
build/libtvm.so build/libvta_fsim.so build/libtvm_runtime.so build/config.cmake 
build/libtvm_allvisible.so build/crttest build/cpptest build/build.ninja 
build/CMakeFiles/rules.ninja build/build.ninja 
build/microtvm_template_projects",
+            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/cpu --items build/libtvm.so 
build/libtvm_runtime.so build/config.cmake build/libtvm_allvisible.so 
build/cpptest build/build.ninja build/CMakeFiles/rules.ninja",
             label: 'Upload artifacts to S3',
           )
 
@@ -565,17 +565,17 @@ def 
shard_run_integration_CPU_1_of_4(node_type='CPU-SMALL-SPOT', on_demand=false
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-cpu") {
-        try {
-          init_git()
-          docker_init(ci_cpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=cpu',
-              'TEST_STEP_NAME=integration: CPU',
-              'TVM_NUM_SHARDS=4',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_cpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=cpu',
+            'TEST_STEP_NAME=integration: CPU',
+            'TVM_NUM_SHARDS=4',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
                   label: 'Download artifacts from S3',
                 )
@@ -585,19 +585,18 @@ def 
shard_run_integration_CPU_1_of_4(node_type='CPU-SMALL-SPOT', on_demand=false
                 script: "${docker_run} ${ci_cpu} 
./tests/scripts/task_python_integration.sh",
                 label: 'Run CPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_CPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -613,17 +612,17 @@ def 
shard_run_integration_CPU_2_of_4(node_type='CPU-SMALL-SPOT', on_demand=false
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-cpu") {
-        try {
-          init_git()
-          docker_init(ci_cpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=cpu',
-              'TEST_STEP_NAME=integration: CPU',
-              'TVM_NUM_SHARDS=4',
-              'TVM_SHARD_INDEX=1',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_cpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=cpu',
+            'TEST_STEP_NAME=integration: CPU',
+            'TVM_NUM_SHARDS=4',
+            'TVM_SHARD_INDEX=1',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
                   label: 'Download artifacts from S3',
                 )
@@ -633,19 +632,18 @@ def 
shard_run_integration_CPU_2_of_4(node_type='CPU-SMALL-SPOT', on_demand=false
                 script: "${docker_run} ${ci_cpu} 
./tests/scripts/task_python_integration.sh",
                 label: 'Run CPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_CPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -661,17 +659,17 @@ def 
shard_run_integration_CPU_3_of_4(node_type='CPU-SMALL-SPOT', on_demand=false
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-cpu") {
-        try {
-          init_git()
-          docker_init(ci_cpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=cpu',
-              'TEST_STEP_NAME=integration: CPU',
-              'TVM_NUM_SHARDS=4',
-              'TVM_SHARD_INDEX=2',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_cpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=cpu',
+            'TEST_STEP_NAME=integration: CPU',
+            'TVM_NUM_SHARDS=4',
+            'TVM_SHARD_INDEX=2',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
                   label: 'Download artifacts from S3',
                 )
@@ -681,19 +679,18 @@ def 
shard_run_integration_CPU_3_of_4(node_type='CPU-SMALL-SPOT', on_demand=false
                 script: "${docker_run} ${ci_cpu} 
./tests/scripts/task_python_integration.sh",
                 label: 'Run CPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_CPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -709,17 +706,17 @@ def 
shard_run_integration_CPU_4_of_4(node_type='CPU-SMALL-SPOT', on_demand=false
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-cpu") {
-        try {
-          init_git()
-          docker_init(ci_cpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=cpu',
-              'TEST_STEP_NAME=integration: CPU',
-              'TVM_NUM_SHARDS=4',
-              'TVM_SHARD_INDEX=3',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_cpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=cpu',
+            'TEST_STEP_NAME=integration: CPU',
+            'TVM_NUM_SHARDS=4',
+            'TVM_SHARD_INDEX=3',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
                   label: 'Download artifacts from S3',
                 )
@@ -729,19 +726,18 @@ def 
shard_run_integration_CPU_4_of_4(node_type='CPU-SMALL-SPOT', on_demand=false
                 script: "${docker_run} ${ci_cpu} 
./tests/scripts/task_python_integration.sh",
                 label: 'Run CPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/integration_CPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -759,17 +755,17 @@ def 
shard_run_unittest_CPU_1_of_1(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-cpu") {
-        try {
-          init_git()
-          docker_init(ci_cpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=cpu',
-              'TEST_STEP_NAME=unittest: CPU',
-              'TVM_NUM_SHARDS=1',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_cpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=cpu',
+            'TEST_STEP_NAME=unittest: CPU',
+            'TVM_NUM_SHARDS=1',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
                   label: 'Download artifacts from S3',
                 )
@@ -777,19 +773,18 @@ def 
shard_run_unittest_CPU_1_of_1(node_type='CPU-SMALL-SPOT', on_demand=false) {
               ci_setup(ci_cpu)
               cpp_unittest(ci_cpu)
               python_unittest(ci_cpu)
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/unittest_CPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -806,17 +801,17 @@ def 
shard_run_frontend_CPU_1_of_1(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/frontend-python-cpu") {
-        try {
-          init_git()
-          docker_init(ci_cpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=cpu',
-              'TEST_STEP_NAME=frontend: CPU',
-              'TVM_NUM_SHARDS=1',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_cpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=cpu',
+            'TEST_STEP_NAME=frontend: CPU',
+            'TVM_NUM_SHARDS=1',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/cpu",
                   label: 'Download artifacts from S3',
                 )
@@ -826,19 +821,18 @@ def 
shard_run_frontend_CPU_1_of_1(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_cpu} 
./tests/scripts/task_python_frontend_cpu.sh",
                 label: 'Run Python frontend tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/frontend_CPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -858,6 +852,9 @@ def test() {
       try {
       shard_run_integration_CPU_1_of_4()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_integration_CPU_1_of_4(on_demand = true)
       }
     },
@@ -865,6 +862,9 @@ def test() {
       try {
       shard_run_integration_CPU_2_of_4()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_integration_CPU_2_of_4(on_demand = true)
       }
     },
@@ -872,6 +872,9 @@ def test() {
       try {
       shard_run_integration_CPU_3_of_4()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_integration_CPU_3_of_4(on_demand = true)
       }
     },
@@ -879,6 +882,9 @@ def test() {
       try {
       shard_run_integration_CPU_4_of_4()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_integration_CPU_4_of_4(on_demand = true)
       }
     },
@@ -886,6 +892,9 @@ def test() {
       try {
       shard_run_unittest_CPU_1_of_1()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_unittest_CPU_1_of_1(on_demand = true)
       }
     },
@@ -893,6 +902,9 @@ def test() {
       try {
       shard_run_frontend_CPU_1_of_1()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_frontend_CPU_1_of_1(on_demand = true)
       }
     },
diff --git a/ci/jenkins/generated/gpu_jenkinsfile.groovy 
b/ci/jenkins/generated/gpu_jenkinsfile.groovy
index 014377f05d..be07049054 100644
--- a/ci/jenkins/generated/gpu_jenkinsfile.groovy
+++ b/ci/jenkins/generated/gpu_jenkinsfile.groovy
@@ -60,7 +60,7 @@
 // 'python3 jenkins/generate.py'
 // Note: This timestamp is here to ensure that updates to the Jenkinsfile are
 // always rebased on main before merging:
-// Generated at 2024-01-10T13:15:25.186261
+// Generated at 2025-02-06T12:11:28.848101
 
 import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
 // These are set at runtime from data in ci/jenkins/docker-images.yml, update
@@ -529,7 +529,7 @@ def build(node_type) {
               sh "${docker_run} --no-gpu ${ci_gpu} 
./tests/scripts/task_config_build_gpu.sh build"
         cmake_build("${ci_gpu} --no-gpu", 'build', '-j2')
         sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/gpu --items build/libtvm.so 
build/libvta_fsim.so build/libtvm_runtime.so build/config.cmake 
build/libtvm_allvisible.so build/microtvm_template_projects build/crttest 
build/build.ninja build/3rdparty/libflash_attn/src/libflash_attn.so 
build/3rdparty/cutlass_fpA_intB_gemm/cutlass_kernels/libfpA_intB_gemm.so",
+            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/gpu --items build/libtvm.so 
build/libtvm_runtime.so build/config.cmake build/libtvm_allvisible.so 
build/3rdparty/libflash_attn/src/libflash_attn.so 
build/3rdparty/cutlass_fpA_intB_gemm/cutlass_kernels/libfpA_intB_gemm.so",
             label: 'Upload artifacts to S3',
           )
 
@@ -539,7 +539,7 @@ def build(node_type) {
         sh "${docker_run} --no-gpu ${ci_gpu} 
./tests/scripts/task_config_build_gpu_other.sh build"
         cmake_build("${ci_gpu} --no-gpu", 'build', '-j2')
         sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/gpu2 --items build/libtvm.so 
build/libtvm_runtime.so build/config.cmake build/crttest build/build.ninja 
build/3rdparty/libflash_attn/src/libflash_attn.so 
build/3rdparty/cutlass_fpA_intB_gemm/cutlass_kernels/libfpA_intB_gemm.so",
+            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/gpu2 --items build/libtvm.so 
build/libtvm_runtime.so build/config.cmake",
             label: 'Upload artifacts to S3',
           )
             })
@@ -552,9 +552,9 @@ def build(node_type) {
   }
 }
 try {
-    build('CPU')
+    build('CPU-SPOT')
 } catch (Exception ex) {
-    build('CPU-SMALL')
+    build('CPU')
 }
 
 
@@ -566,17 +566,17 @@ def shard_run_unittest_GPU_1_of_3(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=unittest: GPU',
-              'TVM_NUM_SHARDS=3',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=unittest: GPU',
+            'TVM_NUM_SHARDS=3',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu2",
                   label: 'Download artifacts from S3',
                 )
@@ -612,19 +612,18 @@ def shard_run_unittest_GPU_1_of_3(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_integration_gpuonly.sh",
                 label: 'Run Python GPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/unittest_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -640,17 +639,17 @@ def shard_run_unittest_GPU_2_of_3(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=unittest: GPU',
-              'TVM_NUM_SHARDS=3',
-              'TVM_SHARD_INDEX=1',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=unittest: GPU',
+            'TVM_NUM_SHARDS=3',
+            'TVM_SHARD_INDEX=1',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -668,19 +667,18 @@ def shard_run_unittest_GPU_2_of_3(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_integration_gpuonly.sh",
                 label: 'Run Python GPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/unittest_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -696,17 +694,17 @@ def shard_run_unittest_GPU_3_of_3(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/ut-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=unittest: GPU',
-              'TVM_NUM_SHARDS=3',
-              'TVM_SHARD_INDEX=2',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=unittest: GPU',
+            'TVM_NUM_SHARDS=3',
+            'TVM_SHARD_INDEX=2',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -720,19 +718,18 @@ def shard_run_unittest_GPU_3_of_3(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_integration_gpuonly.sh",
                 label: 'Run Python GPU integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/unittest_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -750,17 +747,17 @@ def shard_run_topi_GPU_1_of_3(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/topi-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=topi: GPU',
-              'TVM_NUM_SHARDS=3',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=topi: GPU',
+            'TVM_NUM_SHARDS=3',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -770,19 +767,18 @@ def shard_run_topi_GPU_1_of_3(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_topi.sh",
                 label: 'Run TOPI tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/topi_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -798,17 +794,17 @@ def shard_run_topi_GPU_2_of_3(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/topi-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=topi: GPU',
-              'TVM_NUM_SHARDS=3',
-              'TVM_SHARD_INDEX=1',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=topi: GPU',
+            'TVM_NUM_SHARDS=3',
+            'TVM_SHARD_INDEX=1',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -818,19 +814,18 @@ def shard_run_topi_GPU_2_of_3(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_topi.sh",
                 label: 'Run TOPI tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/topi_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -846,17 +841,17 @@ def shard_run_topi_GPU_3_of_3(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/topi-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=topi: GPU',
-              'TVM_NUM_SHARDS=3',
-              'TVM_SHARD_INDEX=2',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=topi: GPU',
+            'TVM_NUM_SHARDS=3',
+            'TVM_SHARD_INDEX=2',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -866,19 +861,18 @@ def shard_run_topi_GPU_3_of_3(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_topi.sh",
                 label: 'Run TOPI tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/topi_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -896,17 +890,17 @@ def shard_run_frontend_GPU_1_of_6(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/frontend-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=frontend: GPU',
-              'TVM_NUM_SHARDS=6',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=frontend: GPU',
+            'TVM_NUM_SHARDS=6',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -916,19 +910,18 @@ def shard_run_frontend_GPU_1_of_6(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_frontend.sh",
                 label: 'Run Python frontend tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/frontend_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -944,17 +937,17 @@ def shard_run_frontend_GPU_2_of_6(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/frontend-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=frontend: GPU',
-              'TVM_NUM_SHARDS=6',
-              'TVM_SHARD_INDEX=1',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=frontend: GPU',
+            'TVM_NUM_SHARDS=6',
+            'TVM_SHARD_INDEX=1',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -964,19 +957,18 @@ def shard_run_frontend_GPU_2_of_6(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_frontend.sh",
                 label: 'Run Python frontend tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/frontend_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -992,17 +984,17 @@ def shard_run_frontend_GPU_3_of_6(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/frontend-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=frontend: GPU',
-              'TVM_NUM_SHARDS=6',
-              'TVM_SHARD_INDEX=2',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=frontend: GPU',
+            'TVM_NUM_SHARDS=6',
+            'TVM_SHARD_INDEX=2',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -1012,19 +1004,18 @@ def shard_run_frontend_GPU_3_of_6(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_frontend.sh",
                 label: 'Run Python frontend tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/frontend_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -1040,17 +1031,17 @@ def shard_run_frontend_GPU_4_of_6(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/frontend-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=frontend: GPU',
-              'TVM_NUM_SHARDS=6',
-              'TVM_SHARD_INDEX=3',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=frontend: GPU',
+            'TVM_NUM_SHARDS=6',
+            'TVM_SHARD_INDEX=3',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -1060,19 +1051,18 @@ def shard_run_frontend_GPU_4_of_6(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_frontend.sh",
                 label: 'Run Python frontend tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/frontend_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -1088,17 +1078,17 @@ def shard_run_frontend_GPU_5_of_6(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/frontend-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=frontend: GPU',
-              'TVM_NUM_SHARDS=6',
-              'TVM_SHARD_INDEX=4',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=frontend: GPU',
+            'TVM_NUM_SHARDS=6',
+            'TVM_SHARD_INDEX=4',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -1108,19 +1098,18 @@ def shard_run_frontend_GPU_5_of_6(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_frontend.sh",
                 label: 'Run Python frontend tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/frontend_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -1136,17 +1125,17 @@ def shard_run_frontend_GPU_6_of_6(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/frontend-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=frontend: GPU',
-              'TVM_NUM_SHARDS=6',
-              'TVM_SHARD_INDEX=5',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=frontend: GPU',
+            'TVM_NUM_SHARDS=6',
+            'TVM_SHARD_INDEX=5',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -1156,19 +1145,18 @@ def shard_run_frontend_GPU_6_of_6(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "${docker_run} ${ci_gpu} 
./tests/scripts/task_python_frontend.sh",
                 label: 'Run Python frontend tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/frontend_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -1186,17 +1174,17 @@ def shard_run_docs_GPU_1_of_1(node_type='GPU-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/docs-python-gpu") {
-        try {
-          init_git()
-          docker_init(ci_gpu)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=gpu',
-              'TEST_STEP_NAME=docs: GPU',
-              'TVM_NUM_SHARDS=1',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_gpu)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=gpu',
+            'TEST_STEP_NAME=docs: GPU',
+            'TVM_NUM_SHARDS=1',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/gpu",
                   label: 'Download artifacts from S3',
                 )
@@ -1215,19 +1203,18 @@ def shard_run_docs_GPU_1_of_1(node_type='GPU-SPOT', 
on_demand=false) {
                 script: "aws s3 cp --no-progress _docs 
s3://${s3_bucket}/${s3_prefix}/docs --recursive",
                 label: 'Upload docs to S3',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/docs_GPU --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -1248,6 +1235,9 @@ def test() {
       try {
       shard_run_unittest_GPU_1_of_3()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_unittest_GPU_1_of_3(on_demand = true)
       }
     },
@@ -1255,6 +1245,9 @@ def test() {
       try {
       shard_run_unittest_GPU_2_of_3()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_unittest_GPU_2_of_3(on_demand = true)
       }
     },
@@ -1262,6 +1255,9 @@ def test() {
       try {
       shard_run_unittest_GPU_3_of_3()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_unittest_GPU_3_of_3(on_demand = true)
       }
     },
@@ -1269,6 +1265,9 @@ def test() {
       try {
       shard_run_topi_GPU_1_of_3()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_topi_GPU_1_of_3(on_demand = true)
       }
     },
@@ -1276,6 +1275,9 @@ def test() {
       try {
       shard_run_topi_GPU_2_of_3()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_topi_GPU_2_of_3(on_demand = true)
       }
     },
@@ -1283,6 +1285,9 @@ def test() {
       try {
       shard_run_topi_GPU_3_of_3()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_topi_GPU_3_of_3(on_demand = true)
       }
     },
@@ -1290,6 +1295,9 @@ def test() {
       try {
       shard_run_frontend_GPU_1_of_6()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_frontend_GPU_1_of_6(on_demand = true)
       }
     },
@@ -1297,6 +1305,9 @@ def test() {
       try {
       shard_run_frontend_GPU_2_of_6()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_frontend_GPU_2_of_6(on_demand = true)
       }
     },
@@ -1304,6 +1315,9 @@ def test() {
       try {
       shard_run_frontend_GPU_3_of_6()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_frontend_GPU_3_of_6(on_demand = true)
       }
     },
@@ -1311,6 +1325,9 @@ def test() {
       try {
       shard_run_frontend_GPU_4_of_6()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_frontend_GPU_4_of_6(on_demand = true)
       }
     },
@@ -1318,6 +1335,9 @@ def test() {
       try {
       shard_run_frontend_GPU_5_of_6()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_frontend_GPU_5_of_6(on_demand = true)
       }
     },
@@ -1325,6 +1345,9 @@ def test() {
       try {
       shard_run_frontend_GPU_6_of_6()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_frontend_GPU_6_of_6(on_demand = true)
       }
     },
@@ -1332,6 +1355,9 @@ def test() {
       try {
       shard_run_docs_GPU_1_of_1()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_docs_GPU_1_of_1(on_demand = true)
       }
     },
diff --git a/ci/jenkins/generated/hexagon_jenkinsfile.groovy 
b/ci/jenkins/generated/hexagon_jenkinsfile.groovy
index a48b12ecd2..fdc7df9197 100644
--- a/ci/jenkins/generated/hexagon_jenkinsfile.groovy
+++ b/ci/jenkins/generated/hexagon_jenkinsfile.groovy
@@ -60,7 +60,7 @@
 // 'python3 jenkins/generate.py'
 // Note: This timestamp is here to ensure that updates to the Jenkinsfile are
 // always rebased on main before merging:
-// Generated at 2024-01-10T13:15:25.087221
+// Generated at 2025-02-06T12:11:28.787674
 
 import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
 // These are set at runtime from data in ci/jenkins/docker-images.yml, update
@@ -565,17 +565,17 @@ def 
shard_run_test_Hexagon_1_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        try {
-          init_git()
-          docker_init(ci_hexagon)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=hexagon',
-              'TEST_STEP_NAME=test: Hexagon',
-              'TVM_NUM_SHARDS=8',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_hexagon)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=hexagon',
+            'TEST_STEP_NAME=test: Hexagon',
+            'TVM_NUM_SHARDS=8',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
                   label: 'Download artifacts from S3',
                 )
@@ -586,19 +586,18 @@ def 
shard_run_test_Hexagon_1_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
                 label: 'Run Hexagon tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -614,17 +613,17 @@ def 
shard_run_test_Hexagon_2_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        try {
-          init_git()
-          docker_init(ci_hexagon)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=hexagon',
-              'TEST_STEP_NAME=test: Hexagon',
-              'TVM_NUM_SHARDS=8',
-              'TVM_SHARD_INDEX=1',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_hexagon)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=hexagon',
+            'TEST_STEP_NAME=test: Hexagon',
+            'TVM_NUM_SHARDS=8',
+            'TVM_SHARD_INDEX=1',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
                   label: 'Download artifacts from S3',
                 )
@@ -634,19 +633,18 @@ def 
shard_run_test_Hexagon_2_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
                 label: 'Run Hexagon tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -662,17 +660,17 @@ def 
shard_run_test_Hexagon_3_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        try {
-          init_git()
-          docker_init(ci_hexagon)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=hexagon',
-              'TEST_STEP_NAME=test: Hexagon',
-              'TVM_NUM_SHARDS=8',
-              'TVM_SHARD_INDEX=2',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_hexagon)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=hexagon',
+            'TEST_STEP_NAME=test: Hexagon',
+            'TVM_NUM_SHARDS=8',
+            'TVM_SHARD_INDEX=2',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
                   label: 'Download artifacts from S3',
                 )
@@ -682,19 +680,18 @@ def 
shard_run_test_Hexagon_3_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
                 label: 'Run Hexagon tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -710,17 +707,17 @@ def 
shard_run_test_Hexagon_4_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        try {
-          init_git()
-          docker_init(ci_hexagon)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=hexagon',
-              'TEST_STEP_NAME=test: Hexagon',
-              'TVM_NUM_SHARDS=8',
-              'TVM_SHARD_INDEX=3',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_hexagon)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=hexagon',
+            'TEST_STEP_NAME=test: Hexagon',
+            'TVM_NUM_SHARDS=8',
+            'TVM_SHARD_INDEX=3',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
                   label: 'Download artifacts from S3',
                 )
@@ -730,19 +727,18 @@ def 
shard_run_test_Hexagon_4_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
                 label: 'Run Hexagon tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -758,17 +754,17 @@ def 
shard_run_test_Hexagon_5_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        try {
-          init_git()
-          docker_init(ci_hexagon)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=hexagon',
-              'TEST_STEP_NAME=test: Hexagon',
-              'TVM_NUM_SHARDS=8',
-              'TVM_SHARD_INDEX=4',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_hexagon)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=hexagon',
+            'TEST_STEP_NAME=test: Hexagon',
+            'TVM_NUM_SHARDS=8',
+            'TVM_SHARD_INDEX=4',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
                   label: 'Download artifacts from S3',
                 )
@@ -778,19 +774,18 @@ def 
shard_run_test_Hexagon_5_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
                 label: 'Run Hexagon tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -806,17 +801,17 @@ def 
shard_run_test_Hexagon_6_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        try {
-          init_git()
-          docker_init(ci_hexagon)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=hexagon',
-              'TEST_STEP_NAME=test: Hexagon',
-              'TVM_NUM_SHARDS=8',
-              'TVM_SHARD_INDEX=5',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_hexagon)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=hexagon',
+            'TEST_STEP_NAME=test: Hexagon',
+            'TVM_NUM_SHARDS=8',
+            'TVM_SHARD_INDEX=5',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
                   label: 'Download artifacts from S3',
                 )
@@ -826,19 +821,18 @@ def 
shard_run_test_Hexagon_6_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
                 label: 'Run Hexagon tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -854,17 +848,17 @@ def 
shard_run_test_Hexagon_7_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        try {
-          init_git()
-          docker_init(ci_hexagon)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=hexagon',
-              'TEST_STEP_NAME=test: Hexagon',
-              'TVM_NUM_SHARDS=8',
-              'TVM_SHARD_INDEX=6',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_hexagon)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=hexagon',
+            'TEST_STEP_NAME=test: Hexagon',
+            'TVM_NUM_SHARDS=8',
+            'TVM_SHARD_INDEX=6',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
                   label: 'Download artifacts from S3',
                 )
@@ -874,19 +868,18 @@ def 
shard_run_test_Hexagon_7_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
                 label: 'Run Hexagon tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -902,17 +895,17 @@ def 
shard_run_test_Hexagon_8_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/test-hexagon") {
-        try {
-          init_git()
-          docker_init(ci_hexagon)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=hexagon',
-              'TEST_STEP_NAME=test: Hexagon',
-              'TVM_NUM_SHARDS=8',
-              'TVM_SHARD_INDEX=7',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_hexagon)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=hexagon',
+            'TEST_STEP_NAME=test: Hexagon',
+            'TVM_NUM_SHARDS=8',
+            'TVM_SHARD_INDEX=7',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/hexagon",
                   label: 'Download artifacts from S3',
                 )
@@ -922,19 +915,18 @@ def 
shard_run_test_Hexagon_8_of_8(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_hexagon} 
./tests/scripts/task_python_hexagon.sh",
                 label: 'Run Hexagon tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/test_Hexagon --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -954,6 +946,9 @@ def test() {
       try {
       shard_run_test_Hexagon_1_of_8()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_test_Hexagon_1_of_8(on_demand = true)
       }
     },
@@ -961,6 +956,9 @@ def test() {
       try {
       shard_run_test_Hexagon_2_of_8()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_test_Hexagon_2_of_8(on_demand = true)
       }
     },
@@ -968,6 +966,9 @@ def test() {
       try {
       shard_run_test_Hexagon_3_of_8()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_test_Hexagon_3_of_8(on_demand = true)
       }
     },
@@ -975,6 +976,9 @@ def test() {
       try {
       shard_run_test_Hexagon_4_of_8()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_test_Hexagon_4_of_8(on_demand = true)
       }
     },
@@ -982,6 +986,9 @@ def test() {
       try {
       shard_run_test_Hexagon_5_of_8()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_test_Hexagon_5_of_8(on_demand = true)
       }
     },
@@ -989,6 +996,9 @@ def test() {
       try {
       shard_run_test_Hexagon_6_of_8()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_test_Hexagon_6_of_8(on_demand = true)
       }
     },
@@ -996,6 +1006,9 @@ def test() {
       try {
       shard_run_test_Hexagon_7_of_8()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_test_Hexagon_7_of_8(on_demand = true)
       }
     },
@@ -1003,6 +1016,9 @@ def test() {
       try {
       shard_run_test_Hexagon_8_of_8()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_test_Hexagon_8_of_8(on_demand = true)
       }
     },
diff --git a/ci/jenkins/generated/i386_jenkinsfile.groovy 
b/ci/jenkins/generated/i386_jenkinsfile.groovy
index 47d2056473..fc159c40ed 100644
--- a/ci/jenkins/generated/i386_jenkinsfile.groovy
+++ b/ci/jenkins/generated/i386_jenkinsfile.groovy
@@ -60,7 +60,7 @@
 // 'python3 jenkins/generate.py'
 // Note: This timestamp is here to ensure that updates to the Jenkinsfile are
 // always rebased on main before merging:
-// Generated at 2024-01-10T13:15:25.169799
+// Generated at 2025-02-06T12:11:28.760889
 
 import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
 // These are set at runtime from data in ci/jenkins/docker-images.yml, update
@@ -507,7 +507,6 @@ def cpp_unittest(image) {
   )
 }
 
-
 cancel_previous_build()
 
 try {
@@ -534,7 +533,7 @@ def build(node_type) {
         cmake_build(ci_i386, 'build', '-j2')
         make_cpp_tests(ci_i386, 'build')
         sh(
-            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/i386 --items build/libvta_tsim.so 
build/libtvm.so build/libvta_fsim.so build/libtvm_runtime.so build/config.cmake 
build/build.ninja build/crttest build/cpptest build/build.ninja 
build/CMakeFiles/rules.ninja build/microtvm_template_projects",
+            script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/i386 --items build/libtvm.so 
build/libtvm_runtime.so build/config.cmake build/cpptest build/build.ninja 
build/CMakeFiles/rules.ninja",
             label: 'Upload artifacts to S3',
           )
             })
@@ -562,17 +561,17 @@ def 
shard_run_python_i386_1_of_3(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-i386") {
-        try {
-          init_git()
-          docker_init(ci_i386)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=i386',
-              'TEST_STEP_NAME=python: i386',
-              'TVM_NUM_SHARDS=3',
-              'TVM_SHARD_INDEX=0',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_i386)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=i386',
+            'TEST_STEP_NAME=python: i386',
+            'TVM_NUM_SHARDS=3',
+            'TVM_SHARD_INDEX=0',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/i386",
                   label: 'Download artifacts from S3',
                 )
@@ -584,19 +583,18 @@ def 
shard_run_python_i386_1_of_3(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_i386} 
./tests/scripts/task_python_integration_i386only.sh",
                 label: 'Run i386 integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/python_i386 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -612,17 +610,17 @@ def 
shard_run_python_i386_2_of_3(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-i386") {
-        try {
-          init_git()
-          docker_init(ci_i386)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=i386',
-              'TEST_STEP_NAME=python: i386',
-              'TVM_NUM_SHARDS=3',
-              'TVM_SHARD_INDEX=1',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_i386)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=i386',
+            'TEST_STEP_NAME=python: i386',
+            'TVM_NUM_SHARDS=3',
+            'TVM_SHARD_INDEX=1',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/i386",
                   label: 'Download artifacts from S3',
                 )
@@ -633,19 +631,18 @@ def 
shard_run_python_i386_2_of_3(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_i386} 
./tests/scripts/task_python_integration_i386only.sh",
                 label: 'Run i386 integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/python_i386 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -661,17 +658,17 @@ def 
shard_run_python_i386_3_of_3(node_type='CPU-SMALL-SPOT', on_demand=false) {
     }
     node(node_type) {
       ws("workspace/exec_${env.EXECUTOR_NUMBER}/tvm/integration-python-i386") {
-        try {
-          init_git()
-          docker_init(ci_i386)
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM=i386',
-              'TEST_STEP_NAME=python: i386',
-              'TVM_NUM_SHARDS=3',
-              'TVM_SHARD_INDEX=2',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              sh(
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init(ci_i386)
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM=i386',
+            'TEST_STEP_NAME=python: i386',
+            'TVM_NUM_SHARDS=3',
+            'TVM_SHARD_INDEX=2',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            sh(
                   script: "./${jenkins_scripts_root}/s3.py --action download 
--bucket ${s3_bucket} --prefix ${s3_prefix}/i386",
                   label: 'Download artifacts from S3',
                 )
@@ -682,19 +679,18 @@ def 
shard_run_python_i386_3_of_3(node_type='CPU-SMALL-SPOT', on_demand=false) {
                 script: "${docker_run} ${ci_i386} 
./tests/scripts/task_python_integration_i386only.sh",
                 label: 'Run i386 integration tests',
               )
-            })
-          }
-        } finally {
-          try {
-            sh(
+          })
+        }
+        // only run upload if things are successful
+        try {
+          sh(
             script: "./${jenkins_scripts_root}/s3.py --action upload --bucket 
${s3_bucket} --prefix ${s3_prefix}/pytest-results/python_i386 --items 
build/pytest-results",
             label: 'Upload JUnits to S3',
           )
 
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -714,6 +710,9 @@ def test() {
       try {
       shard_run_python_i386_1_of_3()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_python_i386_1_of_3(on_demand = true)
       }
     },
@@ -721,6 +720,9 @@ def test() {
       try {
       shard_run_python_i386_2_of_3()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_python_i386_2_of_3(on_demand = true)
       }
     },
@@ -728,6 +730,9 @@ def test() {
       try {
       shard_run_python_i386_3_of_3()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         shard_run_python_i386_3_of_3(on_demand = true)
       }
     },
diff --git a/ci/jenkins/templates/arm_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/arm_jenkinsfile.groovy.j2
index c512ddd0ce..e9a5bde25e 100644
--- a/ci/jenkins/templates/arm_jenkinsfile.groovy.j2
+++ b/ci/jenkins/templates/arm_jenkinsfile.groovy.j2
@@ -31,7 +31,7 @@
   )
   cmake_build(ci_arm, 'build', '-j4')
   make_cpp_tests(ci_arm, 'build')
-  {{ m.upload_artifacts(tag='arm', filenames=tvm_multilib + cpptest + crttest 
+ microtvm_template_projects) }}
+  {{ m.upload_artifacts(tag='arm', filenames=tvm_lib + cpptest) }}
 {% endcall %}
 
 {% set test_method_names = [] %}
diff --git a/ci/jenkins/templates/cortexm_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/cortexm_jenkinsfile.groovy.j2
deleted file mode 100644
index 4f38306701..0000000000
--- a/ci/jenkins/templates/cortexm_jenkinsfile.groovy.j2
+++ /dev/null
@@ -1,63 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-{% include "utils/base.groovy.j2" with context %}
-{% import 'utils/macros.j2' as m with context -%}
-
-{% call m.invoke_build(
-  name='BUILD: Cortex-M',
-  node='CPU-SMALL',
-  condition='!skip_ci && is_docs_only_build != 1',
-  ws='tvm/build-cortexm',
-  docker_image='ci_cortexm',
-  platform="cortexm",
-) %}
-  sh (
-    script: "${docker_run} ${ci_cortexm} 
./tests/scripts/task_config_build_cortexm.sh build",
-    label: 'Create Cortex-M cmake config',
-  )
-  cmake_build(ci_cortexm, 'build', '-j2')
-  make_cpp_tests(ci_cortexm, 'build')
-  {{ m.upload_artifacts(tag='cortexm', filenames=tvm_lib + tvm_allvisible + 
crttest + cpptest + microtvm_template_projects) }}
-{% endcall %}
-
-{% set test_method_names = [] %}
-
-{% call(shard_index, num_shards) m.sharded_test_step(
-  name="test: Cortex-M",
-  node="CPU-SMALL",
-  ws="tvm/test-cortexm",
-  platform="cortexm",
-  docker_image="ci_cortexm",
-  num_shards=12,
-  test_method_names=test_method_names,
-) %}
-  {{ m.download_artifacts(tag='cortexm') }}
-  ci_setup(ci_cortexm)
-  {% if shard_index == 1%}
-  cpp_unittest(ci_cortexm)
-  sh (
-    script: "${docker_run} ${ci_cortexm} 
./tests/scripts/task_demo_microtvm.sh",
-    label: 'Run microTVM demos',
-  )
-  {% endif %}
-  sh (
-    script: "${docker_run} ${ci_cortexm} 
./tests/scripts/task_python_microtvm.sh",
-    label: 'Run microTVM tests',
-  )
-{% endcall %}
-
-{{ m.invoke_tests(test_method_names) -}}
diff --git a/ci/jenkins/templates/cpu_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/cpu_jenkinsfile.groovy.j2
index 4d6081f3f4..f522196939 100644
--- a/ci/jenkins/templates/cpu_jenkinsfile.groovy.j2
+++ b/ci/jenkins/templates/cpu_jenkinsfile.groovy.j2
@@ -31,7 +31,7 @@
   )
   cmake_build(ci_cpu, 'build', '-j2')
   make_cpp_tests(ci_cpu, 'build')
-  {{ m.upload_artifacts(tag='cpu', filenames=tvm_multilib_tsim + 
tvm_allvisible + crttest + cpptest + microtvm_template_projects) }}
+  {{ m.upload_artifacts(tag='cpu', filenames=tvm_lib + tvm_allvisible + 
cpptest) }}
   ci_setup(ci_cpu)
   // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_golang.sh"
   // TODO(@jroesch): need to resolve CI issue will turn back on in follow up 
patch
diff --git a/ci/jenkins/templates/gpu_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/gpu_jenkinsfile.groovy.j2
index b6c7c2cecb..54b2da50cf 100644
--- a/ci/jenkins/templates/gpu_jenkinsfile.groovy.j2
+++ b/ci/jenkins/templates/gpu_jenkinsfile.groovy.j2
@@ -19,7 +19,7 @@
 
 {% call m.invoke_build(
   name='BUILD: GPU',
-  node='CPU-SMALL',
+  node='CPU',
   condition='!skip_ci',
   ws='tvm/build-gpu',
   docker_image='ci_gpu',
@@ -27,13 +27,13 @@
 ) %}
   sh "${docker_run} --no-gpu ${ci_gpu} 
./tests/scripts/task_config_build_gpu.sh build"
   cmake_build("${ci_gpu} --no-gpu", 'build', '-j2')
-  {{ m.upload_artifacts(tag='gpu', filenames=tvm_multilib + tvm_allvisible + 
microtvm_template_projects + crttest) }}
+  {{ m.upload_artifacts(tag='gpu', filenames=tvm_lib + tvm_allvisible + 
tvm_lib_gpu_extra) }}
 
   // compiler test
   sh "rm -rf build"
   sh "${docker_run} --no-gpu ${ci_gpu} 
./tests/scripts/task_config_build_gpu_other.sh build"
   cmake_build("${ci_gpu} --no-gpu", 'build', '-j2')
-  {{ m.upload_artifacts(tag='gpu2', filenames=tvm_lib + crttest) }}
+  {{ m.upload_artifacts(tag='gpu2', filenames=tvm_lib) }}
 {% endcall %}
 
 {% set test_method_names = [] %}
diff --git a/ci/jenkins/templates/i386_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/i386_jenkinsfile.groovy.j2
index 5b18136745..617c3f75e7 100644
--- a/ci/jenkins/templates/i386_jenkinsfile.groovy.j2
+++ b/ci/jenkins/templates/i386_jenkinsfile.groovy.j2
@@ -31,7 +31,7 @@
   )
   cmake_build(ci_i386, 'build', '-j2')
   make_cpp_tests(ci_i386, 'build')
-  {{ m.upload_artifacts(tag='i386', filenames=tvm_multilib_tsim + crttest + 
cpptest + microtvm_template_projects) }}
+  {{ m.upload_artifacts(tag='i386', filenames=tvm_lib + cpptest) }}
 {% endcall %}
 
 
diff --git a/ci/jenkins/templates/minimal_cross_isa_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/minimal_cross_isa_jenkinsfile.groovy.j2
deleted file mode 100644
index 07c3890dfc..0000000000
--- a/ci/jenkins/templates/minimal_cross_isa_jenkinsfile.groovy.j2
+++ /dev/null
@@ -1,57 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-{% include "utils/base.groovy.j2" with context %}
-{% import 'utils/macros.j2' as m with context -%}
-
-{% call m.invoke_build(
-  name='BUILD: CPU MINIMAL CROSS ISA',
-  node='CPU-SMALL',
-  condition='!skip_ci && is_docs_only_build != 1',
-  ws='tvm/build-cpu-minimal-cross-isa',
-  docker_image='ci_minimal',
-  platform="arm",
-) %}
-  sh (
-    script: "${docker_run} ${ci_minimal} 
./tests/scripts/task_config_build_minimal_cross_isa.sh build",
-    label: 'Create CPU minimal cmake config',
-  )
-  cmake_build(ci_minimal, 'build', '-j2')
-  {{ m.upload_artifacts(tag='cpu-minimal-cross-isa', filenames=tvm_lib + 
tvm_allvisible + microtvm_template_projects) }}
-{% endcall %}
-
-
-{% set test_method_names = [] %}
-
-{% call(shard_index, num_shards) m.sharded_test_step(
-  name="unittest: CPU MINIMAL CROSS ISA",
-  node="ARM-SMALL",
-  num_shards=1,
-  ws="tvm/ut-cpp-arm-cross-isa",
-  platform="arm",
-  docker_image="ci_arm",
-  test_method_names=test_method_names,
-) %}
-  {{ m.download_artifacts(tag='cpu-minimal-cross-isa') }}
-  ci_setup(ci_arm)
-  sh "${docker_run} ${ci_arm} 
./tests/scripts/task_config_build_minimal_cross_isa.sh build"
-  make_cpp_tests(ci_arm, 'build')
-  cpp_unittest(ci_arm)
-  python_unittest(ci_arm)
-{% endcall %}
-
-
-{{ m.invoke_tests(test_method_names) -}}
diff --git a/ci/jenkins/templates/minimal_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/minimal_jenkinsfile.groovy.j2
deleted file mode 100644
index dd4dd02fc8..0000000000
--- a/ci/jenkins/templates/minimal_jenkinsfile.groovy.j2
+++ /dev/null
@@ -1,55 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-{% include "utils/base.groovy.j2" with context %}
-{% import 'utils/macros.j2' as m with context -%}
-
-{% call m.invoke_build(
-  name='BUILD: CPU MINIMAL',
-  node='CPU-SMALL',
-  condition='!skip_ci && is_docs_only_build != 1',
-  ws='tvm/build-cpu-minimal',
-  docker_image='ci_minimal',
-  platform="minimal",
-) %}
-  sh (
-    script: "${docker_run} ${ci_minimal} 
./tests/scripts/task_config_build_minimal.sh build",
-    label: 'Create CPU minimal cmake config',
-  )
-  cmake_build(ci_minimal, 'build', '-j2')
-  make_cpp_tests(ci_minimal, 'build')
-  {{ m.upload_artifacts(tag='cpu-minimal', filenames=tvm_lib + tvm_allvisible 
+ crttest + cpptest + microtvm_template_projects) }}
-{% endcall %}
-
-
-{% set test_method_names = [] %}
-
-{% call(shard_index, num_shards) m.sharded_test_step(
-  name="unittest: CPU MINIMAL",
-  node="CPU-SMALL",
-  num_shards=1,
-  ws="tvm/ut-python-cpu-minimal",
-  platform="minimal",
-  docker_image="ci_minimal",
-  test_method_names=test_method_names,
-) %}
-  {{ m.download_artifacts(tag='cpu-minimal') }}
-  cpp_unittest(ci_minimal)
-  python_unittest(ci_minimal)
-{% endcall %}
-
-
-{{ m.invoke_tests(test_method_names) -}}
diff --git a/ci/jenkins/templates/riscv_jenkinsfile.groovy.j2 
b/ci/jenkins/templates/riscv_jenkinsfile.groovy.j2
deleted file mode 100644
index 902e912f75..0000000000
--- a/ci/jenkins/templates/riscv_jenkinsfile.groovy.j2
+++ /dev/null
@@ -1,61 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-{% include "utils/base.groovy.j2" with context %}
-{% import 'utils/macros.j2' as m with context -%}
-
-{% call m.invoke_build(
-  name='BUILD: RISC-V',
-  node='CPU-SMALL',
-  condition='!skip_ci && is_docs_only_build != 1',
-  ws='tvm/build-riscv',
-  docker_image='ci_riscv',
-  platform="riscv",
-) %}
-  sh (
-    script: "${docker_run} ${ci_riscv} 
./tests/scripts/task_config_build_riscv.sh build",
-    label: 'Create RISC-V cmake config',
-  )
-  cmake_build(ci_riscv, 'build', '-j2')
-  make_cpp_tests(ci_riscv, 'build')
-  {{ m.upload_artifacts(tag='riscv', filenames=tvm_lib + tvm_allvisible + 
crttest + cpptest + microtvm_template_projects) }}
-{% endcall %}
-
-
-
-{% set test_method_names = [] %}
-
-{% call(shard_index, num_shards) m.sharded_test_step(
-  name="test: RISC-V",
-  node="CPU-SMALL",
-  ws="tvm/test-riscv",
-  platform="riscv",
-  docker_image="ci_riscv",
-  num_shards=1,
-  test_method_names=test_method_names,
-) %}
-  {{ m.download_artifacts(tag='riscv') }}
-  ci_setup(ci_riscv)
-  {% if shard_index == 1%}
-  cpp_unittest(ci_riscv)
-  {% endif %}
-  sh (
-    script: "${docker_run} ${ci_riscv} ./tests/scripts/task_riscv_microtvm.sh",
-    label: 'Run microTVM tests',
-  )
-{% endcall %}
-
-{{ m.invoke_tests(test_method_names) -}}
diff --git a/ci/jenkins/templates/utils/macros.j2 
b/ci/jenkins/templates/utils/macros.j2
index 6de4bd6d65..2deff5c2c5 100644
--- a/ci/jenkins/templates/utils/macros.j2
+++ b/ci/jenkins/templates/utils/macros.j2
@@ -38,26 +38,25 @@ def {{ method_name }}(node_type='{{ node }}-SPOT', 
on_demand=false) {
     }
     node(node_type) {
       ws({{ per_exec_ws(ws) }}) {
+        // NOTE: if exception happens, it will be caught outside
+        init_git()
+        docker_init({{ docker_image }})
+        timeout(time: max_time, unit: 'MINUTES') {
+          withEnv([
+            'PLATFORM={{ platform }}',
+            'TEST_STEP_NAME={{ name }}',
+            'TVM_NUM_SHARDS={{ num_shards }}',
+            'TVM_SHARD_INDEX={{ shard_index - 1 }}',
+            "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
+            {{ caller(shard_index, num_shards) | trim | indent(width=12) }}
+          })
+        }
+        // only run upload if things are successful
         try {
-          init_git()
-          docker_init({{ docker_image }})
-          timeout(time: max_time, unit: 'MINUTES') {
-            withEnv([
-              'PLATFORM={{ platform }}',
-              'TEST_STEP_NAME={{ name }}',
-              'TVM_NUM_SHARDS={{ num_shards }}',
-              'TVM_SHARD_INDEX={{ shard_index - 1 }}',
-              "SKIP_SLOW_TESTS=${skip_slow_tests}"], {
-              {{ caller(shard_index, num_shards) | trim | indent(width=12) }}
-            })
-          }
-        } finally {
-          try {
-            {{ junit_to_s3(test_dir_name) }}
-            junit 'build/pytest-results/*.xml'
-          } catch (Exception e) {
-            echo 'Exception during JUnit upload: ' + e.toString()
-          }
+          {{ junit_to_s3(test_dir_name) }}
+          junit 'build/pytest-results/*.xml'
+        } catch (Exception e) {
+          echo 'Exception during JUnit upload: ' + e.toString()
         }
       }
     }
@@ -96,6 +95,9 @@ def build(node_type) {
 try {
     build('{{ node }}-SPOT')
 } catch (Exception ex) {
+    // mark the current stage as success
+    // and try again via on demand node
+    currentBuild.result = 'SUCCESS'
     build('{{ node }}')
 }
 {% endmacro %}
@@ -112,6 +114,9 @@ def test() {
       try {
       {{ method_name }}()
       } catch (Exception ex) {
+        // mark the current stage as success
+        // and try again via on demand node
+        currentBuild.result = 'SUCCESS'
         {{ method_name }}(on_demand = true)
       }
     },
diff --git a/ci/jenkins/unity_jenkinsfile.groovy 
b/ci/jenkins/unity_jenkinsfile.groovy
index d6a5d46f6f..a0c1120fa3 100755
--- a/ci/jenkins/unity_jenkinsfile.groovy
+++ b/ci/jenkins/unity_jenkinsfile.groovy
@@ -348,7 +348,7 @@ stage('Build and Test') {
       }
     },
     'BUILD: CPU': {
-      node('CPU-SMALL-SPOT') {
+      node('CPU-SMALL') {
         ws(per_exec_ws('tvm/build-cpu')) {
           init_git()
           sh "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh 
build"

Reply via email to