This is an automated email from the ASF dual-hosted git repository.
zjffdu pushed a commit to branch branch-0.9
in repository https://gitbox.apache.org/repos/asf/zeppelin.git
The following commit(s) were added to refs/heads/branch-0.9 by this push:
new 00a2bd5 [ZEPPELIN-4895] Remove Ci build for spark under 2.2
00a2bd5 is described below
commit 00a2bd577c830b31be038e46092a53f5dbc7c79a
Author: Jeff Zhang <[email protected]>
AuthorDate: Fri Jun 19 15:43:52 2020 +0800
[ZEPPELIN-4895] Remove Ci build for spark under 2.2
### What is this PR for?
Just remove CI build for spark under 2.2
### What type of PR is it?
[ Improvement ]
### Todos
* [ ] - Task
### What is the Jira issue?
https://issues.apache.org/jira/browse/ZEPPELIN-4895
### How should this be tested?
* CI pass
### Screenshots (if appropriate)
### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No
Author: Jeff Zhang <[email protected]>
Closes #3871 from zjffdu/ZEPPELIN-4895 and squashes the following commits:
5302b57f8 [Jeff Zhang] [ZEPPELIN-4895] Remove Ci build for spark under 2.2
(cherry picked from commit e94d439604209545224270b871c5979772457535)
Signed-off-by: Jeff Zhang <[email protected]>
---
.travis.yml | 22 +---------------------
1 file changed, 1 insertion(+), 21 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 23c1179..33ed2a9 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -117,11 +117,6 @@ jobs:
dist: xenial
env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11" PROFILE="-Phadoop2
-Pintegration" R="true" BUILD_FLAG="install -DskipTests -DskipRat"
TEST_FLAG="test -DskipRat" MODULES="-pl
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies,markdown
-am"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest24,SparkIntegrationTest24,ZeppelinSparkClusterTest23,SparkIntegrationTest23,ZeppelinSparkClusterTest22,SparkIntegrationTest22
-DfailIfNoTests=false"
- # Run spark integration of in one zeppelin instance (2.1, 2.0, 1.6)
- - jdk: "openjdk8"
- dist: xenial
- env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.11" PROFILE="-Phadoop2
-Pintegration" R="true" BUILD_FLAG="install -DskipTests -DskipRat"
TEST_FLAG="test -DskipRat" MODULES="-pl
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies,markdown
-am"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest21,SparkIntegrationTest21,ZeppelinSparkClusterTest20,SparkIntegrationTest20,ZeppelinSparkClusterTest16,SparkIntegrationTest16
-DfailIfNoTests=false"
-
# JdbcIntegrationTest, Unit test of Spark 2.4 (Scala-2.11)
- jdk: "openjdk8"
dist: xenial
@@ -132,7 +127,7 @@ jobs:
dist: xenial
env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.12"
PROFILE="-Pspark-2.4 -Pspark-scala-2.12 -Phadoop2" R="true" BUILD_FLAG="install
-DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl
spark/spark-dependencies -am"
TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.kotlin.*
-DfailIfNoTests=false"
- # Unit test of Spark 2.3 (Scala-2.11) and Unit test python, jupyter and r
interpreter under python2
+ # Unit test of Spark 2.3 (Scala-2.11) and Unit test python, jupyter and r
interpreter under python3
- jdk: "openjdk8"
dist: xenial
env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.11"
PROFILE="-Pspark-2.3 -Pspark-scala-2.11 -Phadoop2" R="true" BUILD_FLAG="install
-DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl
spark/spark-dependencies -am"
TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.*
-DfailIfNoTests=false"
@@ -142,21 +137,6 @@ jobs:
dist: xenial
env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.10"
PROFILE="-Pspark-2.2 -Pspark-scala-2.10 -Phadoop2" R="true" BUILD_FLAG="install
-DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl
spark/spark-dependencies -am"
TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,apache.zeppelin.python.*,apache.zeppelin.jupyter.*,apache.zeppelin.r.*
-DfailIfNoTests=false"
- # Unit test of Spark 2.1 (Scala-2.10)
- - jdk: "openjdk8"
- dist: xenial
- env: BUILD_PLUGINS="false" PYTHON="3" SCALA_VER="2.10"
PROFILE="-Pspark-2.1 -Phadoop2 -Pspark-scala-2.10 -Pintegration" R="true"
BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat"
MODULES="-pl spark/spark-dependencies -am"
TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.* -DfailIfNoTests=false"
-
- # Unit test of Spark 2.0 (Scala-2.10), Use python 3.5 because spark 2.0
doesn't support python 3.6 +
- - jdk: "openjdk8"
- dist: xenial
- env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.10"
PROFILE="-Pspark-2.0 -Phadoop2 -Pspark-scala-2.10" R="true" BUILD_FLAG="install
-DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl
spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
-
- # Unit test of Spark 1.6 (Scala-2.10)
- - jdk: "openjdk8"
- dist: xenial
- env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.10"
PROFILE="-Pspark-1.6 -Phadoop2 -Pspark-scala-2.10" R="true" BUILD_FLAG="install
-DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl
spark/spark-dependencies -am" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
-
# Test python/pyspark with python 2, livy 0.5
- dist: xenial
jdk: "openjdk8"