Repository: incubator-predictionio Updated Branches: refs/heads/develop bab594a0b -> f32b94156
[PIO-53] Per-commit unit and integration tests Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/f32b9415 Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/f32b9415 Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/f32b9415 Branch: refs/heads/develop Commit: f32b941563eeba44d5d8b8299e404e98db69dec0 Parents: bab594a Author: Donald Szeto <[email protected]> Authored: Sun Feb 26 15:09:36 2017 -0800 Committer: Donald Szeto <[email protected]> Committed: Sun Feb 26 15:09:36 2017 -0800 ---------------------------------------------------------------------- .travis.yml | 5 +++ .../predictionio/workflow/WorkflowUtils.scala | 46 +++++++++++--------- tests/Dockerfile | 4 +- tests/docker-files/init.sh | 3 +- tests/docker-files/wait-for-postgres.sh | 5 --- .../org/apache/predictionio/tools/Runner.scala | 14 +++--- 6 files changed, 42 insertions(+), 35 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/f32b9415/.travis.yml ---------------------------------------------------------------------- diff --git a/.travis.yml b/.travis.yml index 22138fc..b763e2d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -51,8 +51,13 @@ env: before_install: - unset SBT_OPTS JVM_OPTS + - sudo rm /usr/local/bin/docker-compose + - curl -L https://github.com/docker/compose/releases/download/1.11.1/docker-compose-`uname -s`-`uname -m` > docker-compose + - chmod +x docker-compose + - sudo mv docker-compose /usr/local/bin before_script: + - docker-compose -v - ./tests/before_script.travis.sh script: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/f32b9415/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala index 93a676a..0e578be 100644 --- a/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala +++ b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala @@ -19,28 +19,17 @@ package org.apache.predictionio.workflow import java.io.File -import java.io.FileNotFoundException - -import org.apache.predictionio.controller.EmptyParams -import org.apache.predictionio.controller.EngineFactory -import org.apache.predictionio.controller.EngineParamsGenerator -import org.apache.predictionio.controller.Evaluation -import org.apache.predictionio.controller.Params -import org.apache.predictionio.controller.PersistentModelLoader -import org.apache.predictionio.controller.Utils -import org.apache.predictionio.core.BuildInfo - -import com.google.gson.Gson -import com.google.gson.JsonSyntaxException +import java.net.URI + +import com.google.gson.{Gson, JsonSyntaxException} import grizzled.slf4j.Logging +import org.apache.log4j.{Level, LogManager} +import org.apache.predictionio.controller._ import org.apache.predictionio.workflow.JsonExtractorOption.JsonExtractorOption -import org.apache.log4j.Level -import org.apache.log4j.LogManager import org.apache.spark.SparkContext import org.apache.spark.api.java.JavaRDDLike import org.apache.spark.rdd.RDD import org.json4s.JsonAST.JValue -import org.json4s.MappingException import org.json4s._ import org.json4s.native.JsonMethods._ @@ -244,12 +233,12 @@ object WorkflowUtils extends Logging { "HADOOP_CONF_DIR" -> "core-site.xml", "HBASE_CONF_DIR" -> "hbase-site.xml") - thirdPartyFiles.keys.toSeq.map { k: String => + thirdPartyFiles.keys.toSeq.flatMap { k: String => sys.env.get(k) map { x => val p = Seq(x, thirdPartyFiles(k)).mkString(File.separator) if (new File(p).exists) Seq(p) else Seq[String]() } getOrElse Seq[String]() - }.flatten + } } def thirdPartyClasspaths: Seq[String] = { @@ -260,9 +249,26 @@ object WorkflowUtils extends Logging { "MYSQL_JDBC_DRIVER", "HADOOP_CONF_DIR", "HBASE_CONF_DIR") - thirdPartyPaths.map(p => + thirdPartyPaths.flatMap(p => sys.env.get(p).map(Seq(_)).getOrElse(Seq[String]()) - ).flatten + ) + } + + def thirdPartyJars: Seq[URI] = { + val thirdPartyPaths = Seq( + "POSTGRES_JDBC_DRIVER", + "MYSQL_JDBC_DRIVER") + thirdPartyPaths.flatMap(p => + sys.env.get(p) map { f => + val file = new File(f) + if (file.exists()) { + Seq(file.toURI) + } else { + warn(s"Environment variable $p is pointing to a nonexistent file $f. Ignoring.") + Seq.empty[URI] + } + } getOrElse Seq.empty[URI] + ) } def modifyLogging(verbose: Boolean): Unit = { http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/f32b9415/tests/Dockerfile ---------------------------------------------------------------------- diff --git a/tests/Dockerfile b/tests/Dockerfile index 9f4decd..dff4fa4 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -24,7 +24,7 @@ ENV HBASE_VERSION 1.0.0 ADD docker-files/spark-${SPARK_VERSION}-bin-hadoop2.6.tgz /vendors ENV SPARK_HOME /vendors/spark-${SPARK_VERSION}-bin-hadoop2.6 -COPY docker-files/postgresql-9.4-1204.jdbc41.jar /drivers +COPY docker-files/postgresql-9.4-1204.jdbc41.jar /drivers/postgresql-9.4-1204.jdbc41.jar COPY docker-files/init.sh init.sh COPY docker-files/env-conf/hbase-site.xml ${PIO_HOME}/conf/hbase-site.xml COPY docker-files/env-conf/pio-env.sh ${PIO_HOME}/conf/pio-env.sh @@ -46,5 +46,7 @@ EXPOSE 8000 # eventserver EXPOSE 7070 +ENV SLEEP_TIME 30 + ENTRYPOINT ["/init.sh"] CMD 'bash' http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/f32b9415/tests/docker-files/init.sh ---------------------------------------------------------------------- diff --git a/tests/docker-files/init.sh b/tests/docker-files/init.sh index af68f1b..8dc08ea 100755 --- a/tests/docker-files/init.sh +++ b/tests/docker-files/init.sh @@ -17,6 +17,7 @@ # set -e -/wait-for-postgres.sh postgres /bin/true export PYTHONPATH=/$PIO_HOME/tests:$PYTHONPATH +echo "Sleeping $SLEEP_TIME seconds for all services to be ready..." +sleep $SLEEP_TIME eval $@ http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/f32b9415/tests/docker-files/wait-for-postgres.sh ---------------------------------------------------------------------- diff --git a/tests/docker-files/wait-for-postgres.sh b/tests/docker-files/wait-for-postgres.sh index 2c7d855..967e5fc 100755 --- a/tests/docker-files/wait-for-postgres.sh +++ b/tests/docker-files/wait-for-postgres.sh @@ -18,13 +18,8 @@ set -e host="$1" -shift -cmd="$@" until psql -h "$host" -U "pio" -c '\l'; do >&2 echo "Postgres is unavailable - sleeping" sleep 1 done - ->&2 echo "Postgres is up - executing command" -exec $cmd http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/f32b9415/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala ---------------------------------------------------------------------- diff --git a/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala b/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala index d9752df..f98dda1 100644 --- a/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala +++ b/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala @@ -15,18 +15,15 @@ * limitations under the License. */ - package org.apache.predictionio.tools import java.io.File import java.net.URI -import org.apache.predictionio.tools.console.ConsoleArgs -import org.apache.predictionio.workflow.WorkflowUtils -import org.apache.predictionio.tools.ReturnTypes._ import org.apache.hadoop.conf.Configuration -import org.apache.hadoop.fs.FileSystem -import org.apache.hadoop.fs.Path +import org.apache.hadoop.fs.{FileSystem, Path} +import org.apache.predictionio.tools.ReturnTypes._ +import org.apache.predictionio.workflow.WorkflowUtils import scala.sys.process._ @@ -163,8 +160,9 @@ object Runner extends EitherLogging { val sparkSubmitCommand = Seq(Seq(sparkHome, "bin", "spark-submit").mkString(File.separator)) - val sparkSubmitJars = if (extraJars.nonEmpty) { - Seq("--jars", deployedJars.map(_.toString).mkString(",")) + val sparkSubmitJarsList = WorkflowUtils.thirdPartyJars ++ deployedJars + val sparkSubmitJars = if (sparkSubmitJarsList.nonEmpty) { + Seq("--jars", sparkSubmitJarsList.map(_.toString).mkString(",")) } else { Nil }
