felixcheung commented on a change in pull request #3306: [ZEPPELIN-4001]
zeppelin-interpreter-integration is ignored due to wrong folder structure
URL: https://github.com/apache/zeppelin/pull/3306#discussion_r257109427
##########
File path: .travis.yml
##########
@@ -87,35 +91,43 @@ matrix:
dist: trusty
env: PYTHON="3" SPARKR="true" SCALA_VER="2.10" PROFILE="-Pscala-2.10"
BUILD_FLAG="install -DskipTests -DskipRat" TEST_FLAG="test -DskipRat"
MODULES="-pl $(echo
.,zeppelin-interpreter,zeppelin-interpreter-api,${INTERPRETERS} | sed
's/!//g')" TEST_PROJECTS=""
- # Run ZeppelinSparkClusterTest & SparkIntegrationTest in one build would
exceed the time limitation of travis, so running them separately
+ # Run Spark integration test and unit test separately for each spark
version
- # Integration test of spark interpreter with different spark versions
under python2, only run ZeppelinSparkClusterTest. Also run spark unit test of
spark 2.4.0 in this build. And run JdbcIntegrationTest here as well.
+ # ZeppelinSparkClusterTest24, SparkIntegrationTest24, JdbcIntegrationTest,
Unit test of Spark 2.4
- sudo: required
jdk: "oraclejdk8"
dist: trusty
- env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.11"
PROFILE="-Pspark-2.4 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true"
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am"
MODULES="-pl
zeppelin-interpreter-integration,jdbc,spark/interpreter,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,JdbcIntegrationTest,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
+ env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.11"
PROFILE="-Pspark-2.4 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true"
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am"
MODULES="-pl
zeppelin-interpreter-integration,jdbc,zeppelin-web,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest24,SparkIntegrationTest24,JdbcIntegrationTest,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
- # Integration test of spark interpreter with different spark versions
under python3, only run SparkIntegrationTestPt1. Also run spark unit test of
spark 2.3 in this build.
+ # ZeppelinSparkClusterTest23, SparkIntegrationTest23, Unit test of Spark
2.3
- sudo: required
jdk: "oraclejdk8"
dist: trusty
- env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11"
PROFILE="-Pspark-2.3 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true"
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am"
MODULES="-pl
zeppelin-interpreter-integration,spark/interpreter,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=SparkIntegrationTestPt1,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
+ env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.11"
PROFILE="-Pspark-2.3 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true"
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am"
MODULES="-pl
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest23,SparkIntegrationTest23,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
- # Integration test of spark interpreter with different spark versions
under python3, only run SparkIntegrationTestPt2. Also run spark unit test of
spark 2.2 in this build.
+ # ZeppelinSparkClusterTest22, SparkIntegrationTest22, Unit test of Spark
2.2
- sudo: required
jdk: "oraclejdk8"
dist: trusty
- env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10"
PROFILE="-Pspark-2.2 -Phadoop2 -Pscala-2.10 -Pintegration" SPARKR="true"
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am"
MODULES="-pl
zeppelin-interpreter-integration,spark/interpreter,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=SparkIntegrationTestPt2,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
+ env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11"
PROFILE="-Pspark-2.2 -Pscala-2.11 -Phadoop2 -Pintegration" SPARKR="true"
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am"
MODULES="-pl
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest22,SparkIntegrationTest22,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
- # Test spark module for 2.1 with scala 2.10
- - jdk: "oraclejdk8"
+ # ZeppelinSparkClusterTest21, SparkIntegrationTest21, Unit test of Spark
2.1
+ - sudo: required
+ jdk: "oraclejdk8"
dist: trusty
- env: PYTHON="2" SCALA_VER="2.10" PROFILE="-Pspark-2.1 -Phadoop2
-Pscala-2.10" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am"
TEST_FLAG="test -DskipRat -am" MODULES="-pl
spark/interpreter,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.*
-DfailIfNoTests=false"
+ env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10"
PROFILE="-Pspark-2.1 -Phadoop2 -Pscala-2.10 -Pintegration" SPARKR="true"
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am"
MODULES="-pl
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest21,SparkIntegrationTest21,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
- # Test spark module for 1.6 with scala 2.10
- - jdk: "oraclejdk8"
+ # ZeppelinSparkClusterTest20, SparkIntegrationTest20, Unit test of Spark
2.0
+ - sudo: required
+ jdk: "oraclejdk8"
+ dist: trusty
+ env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10"
PROFILE="-Pspark-2.0 -Phadoop2 -Pscala-2.10 -Pintegration" SPARKR="true"
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am"
MODULES="-pl
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest20,SparkIntegrationTest20,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
+
+ # ZeppelinSparkClusterTest16, SparkIntegrationTest16, Unit test of Spark
1.6
+ - sudo: required
+ jdk: "oraclejdk8"
dist: trusty
- env: PYTHON="2" SCALA_VER="2.10" PROFILE="-Pspark-1.6 -Phadoop2
-Pscala-2.10" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am"
TEST_FLAG="test -DskipRat -am" MODULES="-pl
spark/interpreter,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.*
-DfailIfNoTests=false"
+ env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10"
PROFILE="-Pspark-1.6 -Phadoop2 -Pscala-2.10 -Pintegration" SPARKR="true"
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am"
MODULES="-pl
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest16,SparkIntegrationTest16,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
Review comment:
bottom of https://spark.apache.org/versioning-policy.html
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services