Repository: zeppelin Updated Branches: refs/heads/master 28adacb9c -> 29f840ae7
ZEPPELIN-1597. Change cdh spark/hadoop dependency to apache spark/hadoop ### What is this PR for? I see several times of travis failure due to downloading cdh spark/hadoop for livy interpreter. This may be due to cdh repository issue, so in this PR I change it to apache spark/hadoop. ### What type of PR is it? [Improvement] ### Todos * [ ] - Task ### What is the Jira issue? * https://issues.apache.org/jira/browse/ZEPPELIN-1597 ### How should this be tested? No test added ### Questions: * Does the licenses files need update? No * Is there breaking changes for older versions? No * Does this needs documentation? No Author: Jeff Zhang <[email protected]> Closes #1575 from zjffdu/ZEPPELIN-1597 and squashes the following commits: 5077f20 [Jeff Zhang] ZEPPELIN-1597. Change cdh spark/hadoop dependency to apache spark/hadoop Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/29f840ae Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/29f840ae Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/29f840ae Branch: refs/heads/master Commit: 29f840ae7cc180ee9651655a539a276d6d07538f Parents: 28adacb Author: Jeff Zhang <[email protected]> Authored: Tue Nov 1 17:41:49 2016 +0800 Committer: Mina Lee <[email protected]> Committed: Wed Nov 2 18:02:30 2016 +0900 ---------------------------------------------------------------------- livy/pom.xml | 197 ++++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 190 insertions(+), 7 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/zeppelin/blob/29f840ae/livy/pom.xml ---------------------------------------------------------------------- diff --git a/livy/pom.xml b/livy/pom.xml index df2bf77..1e717f8 100644 --- a/livy/pom.xml +++ b/livy/pom.xml @@ -41,6 +41,8 @@ <assertj.version>1.7.0</assertj.version> <mockito.version>1.9.5</mockito.version> <livy.version>0.2.0</livy.version> + <spark.version>1.5.2</spark.version> + <hadoop.version>2.6.0</hadoop.version> </properties> <dependencies> @@ -111,38 +113,218 @@ <groupId>com.cloudera.livy</groupId> <artifactId>livy-integration-test</artifactId> <version>${livy.version}</version> - <scope>compile</scope> + <scope>test</scope> <exclusions> <exclusion> <groupId>org.xerial.snappy</groupId> <artifactId>snappy-java</artifactId> </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-core_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-sql_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-streaming_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-hive_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-repl_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-yarn_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-client</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-server-tests</artifactId> + </exclusion> </exclusions> </dependency> <dependency> <groupId>com.cloudera.livy</groupId> <artifactId>livy-test-lib</artifactId> <version>${livy.version}</version> - <scope>compile</scope> + <scope>test</scope> <exclusions> <exclusion> <groupId>org.xerial.snappy</groupId> <artifactId>snappy-java</artifactId> </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-core_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-sql_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-streaming_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-hive_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-repl_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-yarn_2.10</artifactId> + </exclusion> </exclusions> </dependency> <dependency> <groupId>com.cloudera.livy</groupId> <artifactId>livy-core</artifactId> <version>${livy.version}</version> - <scope>compile</scope> + <scope>test</scope> <exclusions> <exclusion> <groupId>org.xerial.snappy</groupId> <artifactId>snappy-java</artifactId> </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-core_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-sql_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-streaming_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-hive_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-repl_2.10</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.spark</groupId> + <artifactId>spark-yarn_2.10</artifactId> + </exclusion> </exclusions> </dependency> + + <dependency> + <groupId>org.apache.spark</groupId> + <artifactId>spark-sql_${scala.binary.version}</artifactId> + <version>${spark.version}</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.spark</groupId> + <artifactId>spark-streaming_${scala.binary.version}</artifactId> + <version>${spark.version}</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.spark</groupId> + <artifactId>spark-hive_${scala.binary.version}</artifactId> + <version>${spark.version}</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.spark</groupId> + <artifactId>spark-repl_${scala.binary.version}</artifactId> + <version>${spark.version}</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.spark</groupId> + <artifactId>spark-yarn_${scala.binary.version}</artifactId> + <version>${spark.version}</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.spark</groupId> + <artifactId>spark-yarn_${scala.binary.version}</artifactId> + <version>${spark.version}</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + <version>2.6.0</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-client</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-server-tests</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> </dependencies> <repositories> @@ -250,9 +432,10 @@ </goals> <configuration> <target> - <delete file="${project.build.directory}/unit-tests.log" quiet="true" /> - <delete file="${project.build.directory}/jacoco.exec" quiet="true" /> - <delete dir="${project.build.directory}/tmp" quiet="true" /> + <delete file="${project.build.directory}/unit-tests.log" + quiet="true"/> + <delete file="${project.build.directory}/jacoco.exec" quiet="true"/> + <delete dir="${project.build.directory}/tmp" quiet="true"/> </target> </configuration> </execution> @@ -265,7 +448,7 @@ </goals> <configuration> <target> - <mkdir dir="${project.build.directory}/tmp" /> + <mkdir dir="${project.build.directory}/tmp"/> </target> </configuration> </execution>
