This is an automated email from the ASF dual-hosted git repository.

akm pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/mahout.git


The following commit(s) were added to refs/heads/master by this push:
     new 70f8c90  NOJIRA: Removing all travis profiles but one, for Spark 2.0 
and Scala 2.11.
70f8c90 is described below

commit 70f8c9024fedf94ce6902e94dfaad7990f1b2542
Author: Andrew Musselman <[email protected]>
AuthorDate: Fri Jan 11 11:40:01 2019 -0800

    NOJIRA: Removing all travis profiles but one, for Spark 2.0 and Scala 2.11.
---
 .travis.yml | 32 ++++++++++++++++----------------
 1 file changed, 16 insertions(+), 16 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index 8ad2458..06c978e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -43,40 +43,40 @@ env:
 matrix:
   include:
     # Build Spark 1.6.3 , Scala 2.10
-    - jdk: "openjdk7"
-      env: PROFILES="${PROFILES} -Pscala-2.10 -Pspark-1.6" 
SPARK_BIN=$SPARK_1_6 SCALA_VERSION="2.10"
+    #- jdk: "openjdk7"
+    #  env: PROFILES="${PROFILES} -Pscala-2.10 -Pspark-1.6" 
SPARK_BIN=$SPARK_1_6 SCALA_VERSION="2.10"
 
     # Build Spark 2.0.2 , Scala 2.11 - replace -D... with profiles when 
available
-    - jdk: "openjdk7"
-      env: PROFILES="${PROFILES} -Pspark-2.0 -Pscala-2.11" 
SPARK_BIN=$SPARK_2_0 SCALA_VERSION="2.11"
+    #- jdk: "openjdk7"
+    #  env: PROFILES="${PROFILES} -Pspark-2.0 -Pscala-2.11" 
SPARK_BIN=$SPARK_2_0 SCALA_VERSION="2.11"
 
     # Build Spark 2.1.0 , Scala 2.11 - replace -D... with profiles when 
available
-    - jdk: "openjdk7"
-      env: PROFILES="${PROFILES} -Pspark-2.1 -Pscala-2.11" 
SPARK_BIN=$SPARK_2_1 SCALA_VERSION="2.11"
+    #- jdk: "openjdk7"
+    #  env: PROFILES="${PROFILES} -Pspark-2.1 -Pscala-2.11" 
SPARK_BIN=$SPARK_2_1 SCALA_VERSION="2.11"
 
     # Build Spark 1.6.3 , Scala 2.10, ViennaCL
-    - jdk: "openjdk7"
-      env: PROFILES="${PROFILES} -Pscala-2.10 -Pviennacl" SPARK_BIN=$SPARK_1_6 
SCALA_VERSION="2.10"
+    #- jdk: "openjdk7"
+    #  env: PROFILES="${PROFILES} -Pscala-2.10 -Pviennacl" 
SPARK_BIN=$SPARK_1_6 SCALA_VERSION="2.10"
 
     # Build Spark 2.0.2 , Scala 2.11, ViennaCL - replace -D... with profiles 
when available
     - jdk: "openjdk7"
       env: PROFILES="${PROFILES} -Pspark-2.0 -Pscala-2.11 -Pviennacl" 
SPARK_BIN=$SPARK_2_0 SCALA_VERSION="2.11"
 
     # Build Spark 2.1.0 , Scala 2.11, ViennaCL - replace -D... with profiles 
when available
-    - jdk: "openjdk7"
-      env: PROFILES="${PROFILES} -Pspark-2.1 -Pscala-2.11 -Pviennacl" 
SPARK_BIN=$SPARK_2_1 SCALA_VERSION="2.11"
+    #- jdk: "openjdk7"
+    #  env: PROFILES="${PROFILES} -Pspark-2.1 -Pscala-2.11 -Pviennacl" 
SPARK_BIN=$SPARK_2_1 SCALA_VERSION="2.11"
 
     # Build Spark 1.6.3 , Scala 2.10, ViennaCL-OMP
-    - jdk: "openjdk7"
-      env: PROFILES="${PROFILES} -Pscala-2.10 -Pviennacl-omp" 
TEST_MODULES="${TEST_MODULES},viennacl-omp" SPARK_BIN=$SPARK_1_6 
SCALA_VERSION="2.10"
+    #- jdk: "openjdk7"
+    #  env: PROFILES="${PROFILES} -Pscala-2.10 -Pviennacl-omp" 
TEST_MODULES="${TEST_MODULES},viennacl-omp" SPARK_BIN=$SPARK_1_6 
SCALA_VERSION="2.10"
 
     # Build Spark 2.0.2 , Scala 2.11, ViennaCL-OMP - replace -D... with 
profiles when available
-    - jdk: "openjdk7"
-      env: PROFILES="${PROFILES} -Pspark-2.0 -Pscala-2.11 -Pviennacl-omp" 
TEST_MODULES="${TEST_MODULES},viennacl-omp" SPARK_BIN=$SPARK_2_0 
SCALA_VERSION="2.11"
+    #- jdk: "openjdk7"
+    #  env: PROFILES="${PROFILES} -Pspark-2.0 -Pscala-2.11 -Pviennacl-omp" 
TEST_MODULES="${TEST_MODULES},viennacl-omp" SPARK_BIN=$SPARK_2_0 
SCALA_VERSION="2.11"
 
     # Build Spark 2.1.0 , Scala 2.11, ViennaCL-OMP - replace -D... with 
profiles when available
-    - jdk: "openjdk7"
-      env: PROFILES="${PROFILES} -Pspark-2.1 -Pscala-2.11 -Pviennacl-omp" 
TEST_MODULES="${TEST_MODULES},viennacl-omp" SPARK_BIN=$SPARK_2_1 
SCALA_VERSION="2.11"
+    #- jdk: "openjdk7"
+    #  env: PROFILES="${PROFILES} -Pspark-2.1 -Pscala-2.11 -Pviennacl-omp" 
TEST_MODULES="${TEST_MODULES},viennacl-omp" SPARK_BIN=$SPARK_2_1 
SCALA_VERSION="2.11"
 
 git:
   depth: 10

Reply via email to