This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new bd17f4470d9 [SPARK-42037][INFRA] Rename `AMPLAB_` to `SPARK_` in 
Jenkins build environment variables
bd17f4470d9 is described below

commit bd17f4470d98254518d231b9a7225dbeb2efcb4d
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Thu Jan 12 22:35:22 2023 -0800

    [SPARK-42037][INFRA] Rename `AMPLAB_` to `SPARK_` in Jenkins build 
environment variables
    
    ### What changes were proposed in this pull request?
    
    This PR aims to rename the following environment variables and some legacy 
variable `amplap` from Apache Spark 3.4.0.
    - `AMPLAB_JENKINS` -> `SPARK_JENKINS`
    - `AMPLAB_JENKINS_BUILD_TOOL` -> `SPARK_JENKINS_BUILD_TOOL`
    - `AMPLAB_JENKINS_BUILD_SCALA_PROFILE` -> 
`SPARK_JENKINS_BUILD_SCALA_PROFILE`
    - `AMPLAB_JENKINS_BUILD_PROFILE` -> `SPARK_JENKINS_BUILD_PROFILE`
    
    ### Why are the changes needed?
    
    As of today, Apache Spark's the only Jenkins farm is `Apple Silicon 
Jenkins` Farm on `Scaleway`.
    - https://spark.apache.org/developer-tools.html
    
    It's weird to use `AMPLAB_XXX` in new irrelevant environment.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, this is a dev-only change for new branch-3.4 and master.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    Actually, there is no occurrence of `AMPLAB_` after this PR.
    ```
    $ git grep AMPLAB_ | wc -l
           0
    ```
    
    Closes #39539 from dongjoon-hyun/SPARK-42037.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 dev/run-tests-jenkins.py | 10 +++++-----
 dev/run-tests.py         | 28 ++++++++++++----------------
 2 files changed, 17 insertions(+), 21 deletions(-)

diff --git a/dev/run-tests-jenkins.py b/dev/run-tests-jenkins.py
index 93fbf1a6705..548bc2ee32c 100755
--- a/dev/run-tests-jenkins.py
+++ b/dev/run-tests-jenkins.py
@@ -174,18 +174,18 @@ def main():
     sha1 = os.environ["sha1"]
 
     # Marks this build as a pull request build.
-    os.environ["AMP_JENKINS_PRB"] = "true"
+    os.environ["SPARK_JENKINS_PRB"] = "true"
     # Switch to a Maven-based build if the PR title contains "test-maven":
     if "test-maven" in ghprb_pull_title:
-        os.environ["AMPLAB_JENKINS_BUILD_TOOL"] = "maven"
+        os.environ["SPARK_JENKINS_BUILD_TOOL"] = "maven"
     # Switch the Hadoop profile based on the PR title:
     if "test-hadoop2" in ghprb_pull_title:
-        os.environ["AMPLAB_JENKINS_BUILD_PROFILE"] = "hadoop2"
+        os.environ["SPARK_JENKINS_BUILD_PROFILE"] = "hadoop2"
     if "test-hadoop3" in ghprb_pull_title:
-        os.environ["AMPLAB_JENKINS_BUILD_PROFILE"] = "hadoop3"
+        os.environ["SPARK_JENKINS_BUILD_PROFILE"] = "hadoop3"
     # Switch the Scala profile based on the PR title:
     if "test-scala2.13" in ghprb_pull_title:
-        os.environ["AMPLAB_JENKINS_BUILD_SCALA_PROFILE"] = "scala2.13"
+        os.environ["SPARK_JENKINS_BUILD_SCALA_PROFILE"] = "scala2.13"
 
     build_display_name = os.environ["BUILD_DISPLAY_NAME"]
     build_url = os.environ["BUILD_URL"]
diff --git a/dev/run-tests.py b/dev/run-tests.py
index 18f55697918..92768c96905 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -289,7 +289,7 @@ def build_spark_assembly_sbt(extra_profiles, 
checkstyle=False):
     if checkstyle:
         run_java_style_checks(build_profiles)
 
-    if not os.environ.get("AMPLAB_JENKINS") and not 
os.environ.get("SKIP_UNIDOC"):
+    if not os.environ.get("SPARK_JENKINS") and not 
os.environ.get("SKIP_UNIDOC"):
         build_spark_unidoc_sbt(extra_profiles)
 
 
@@ -396,7 +396,7 @@ def run_python_tests(test_modules, parallelism, 
with_coverage=False):
 
 
 def run_python_packaging_tests():
-    if not os.environ.get("AMPLAB_JENKINS"):
+    if not os.environ.get("SPARK_JENKINS"):
         set_title_and_block("Running PySpark packaging tests", 
"BLOCK_PYSPARK_PIP_TESTS")
         command = [os.path.join(SPARK_HOME, "dev", "run-pip-tests")]
         run_cmd(command)
@@ -500,17 +500,13 @@ def main():
         else:
             print("Cannot install SparkR as R was not found in PATH")
 
-    if os.environ.get("AMPLAB_JENKINS"):
+    if os.environ.get("SPARK_JENKINS"):
         # if we're on the Amplab Jenkins build servers setup variables
         # to reflect the environment settings
-        build_tool = os.environ.get("AMPLAB_JENKINS_BUILD_TOOL", "sbt")
-        scala_version = os.environ.get("AMPLAB_JENKINS_BUILD_SCALA_PROFILE")
-        hadoop_version = os.environ.get("AMPLAB_JENKINS_BUILD_PROFILE", 
"hadoop3")
-        test_env = "amplab_jenkins"
-        # add path for Python3 in Jenkins if we're calling from a Jenkins 
machine
-        # TODO(sknapp):  after all builds are ported to the ubuntu workers, 
change this to be:
-        # /home/jenkins/anaconda2/envs/py36/bin
-        os.environ["PATH"] = "/home/anaconda/envs/py36/bin:" + 
os.environ.get("PATH")
+        build_tool = os.environ.get("SPARK_JENKINS_BUILD_TOOL", "sbt")
+        scala_version = os.environ.get("SPARK_JENKINS_BUILD_SCALA_PROFILE")
+        hadoop_version = os.environ.get("SPARK_JENKINS_BUILD_PROFILE", 
"hadoop3")
+        test_env = "spark_jenkins"
     else:
         # else we're running locally or GitHub Actions.
         build_tool = "sbt"
@@ -567,9 +563,9 @@ def main():
             print("[info] There are no modules to test, exiting without 
testing.")
             return
 
-    # If we're running the tests in AMPLab Jenkins, calculate the diff from 
the targeted branch, and
+    # If we're running the tests in Jenkins, calculate the diff from the 
targeted branch, and
     # detect modules to test.
-    elif test_env == "amplab_jenkins" and os.environ.get("AMP_JENKINS_PRB"):
+    elif os.environ.get("SPARK_JENKINS_PRB"):
         target_branch = os.environ["ghprbTargetBranch"]
         changed_files = identify_changed_files_from_git_commits("HEAD", 
target_branch=target_branch)
         changed_modules = determine_modules_for_files(changed_files)
@@ -630,12 +626,12 @@ def main():
         ):
             run_sparkr_style_checks()
 
-    # determine if docs were changed and if we're inside the amplab environment
+    # determine if docs were changed and if we're inside the jenkins 
environment
     # note - the below commented out until *all* Jenkins workers can get the 
Bundler gem installed
-    # if "DOCS" in changed_modules and test_env == "amplab_jenkins":
+    # if "DOCS" in changed_modules and test_env == "spark_jenkins":
     #    build_spark_documentation()
 
-    if any(m.should_run_build_tests for m in test_modules) and test_env != 
"amplab_jenkins":
+    if any(m.should_run_build_tests for m in test_modules) and test_env != 
"spark_jenkins":
         run_build_tests()
 
     # spark build


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to