Repository: incubator-predictionio Updated Branches: refs/heads/develop cb1462592 -> c6c1ac3b2
[PIO-53] Convert unit tests to run in Docker as well Unit tests do not require a sudo Travis environment anymore so that tests can be run sooner. The sbt launcher has also been updated. Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/c6c1ac3b Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/c6c1ac3b Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/c6c1ac3b Branch: refs/heads/develop Commit: c6c1ac3b2d023a830790a8d9f8080bcc4f907f7f Parents: cb14625 Author: Donald Szeto <[email protected]> Authored: Fri Feb 17 09:50:31 2017 -0800 Committer: Donald Szeto <[email protected]> Committed: Fri Feb 17 09:50:31 2017 -0800 ---------------------------------------------------------------------- .travis.yml | 31 +- Dockerfile | 28 + LICENSE.txt | 2 +- build.sbt | 20 +- make-distribution.sh | 1 - sbt/sbt | 606 ++++++++++++++++--- sbt/sbt-launch-lib.bash | 244 -------- tests/Dockerfile | 32 +- tests/Dockerfile.base | 42 ++ tests/build-docker.sh | 9 + tests/docker-compose.yml | 2 +- tests/docker-files/env-conf/hbase-site.xml | 10 - tests/docker-files/env-conf/pio-env.sh | 6 +- tests/docker-files/env-conf/spark-defaults.conf | 13 - tests/docker-files/env-conf/spark-env.sh | 49 -- tests/script.travis.sh | 21 +- tests/unit.sh | 33 + 17 files changed, 643 insertions(+), 506 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/.travis.yml ---------------------------------------------------------------------- diff --git a/.travis.yml b/.travis.yml index 5e11d1a..6a13b9a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,49 +27,20 @@ branches: except: - livedoc -language: scala - -scala: - - 2.10.5 - -jdk: - - oraclejdk8 - -addons: - postgresql: "9.3" - apt: - packages: - - haveged - services: - docker -sudo: required - -# cache: -# directories: -# - $HOME/.ivy2/cache -# - $HOME/.sbt/boot -# before_cache: -# - find $HOME/.ivy2 -name "ivydata-*.properties" -delete -# - find $HOME/.sbt -name "*.lock" -delete +sudo: false cache: false env: - global: - - PIO_HOME=`pwd` - matrix: - BUILD_TYPE=Unit - BUILD_TYPE=Integration METADATA_REP=PGSQL EVENTDATA_REP=PGSQL MODELDATA_REP=PGSQL - BUILD_TYPE=Integration METADATA_REP=ELASTICSEARCH EVENTDATA_REP=HBASE MODELDATA_REP=LOCALFS - BUILD_TYPE=Integration METADATA_REP=ELASTICSEARCH EVENTDATA_REP=PGSQL MODELDATA_REP=HDFS -before_install: - - unset SBT_OPTS JVM_OPTS - - service haveged start - before_script: - ./tests/before_script.travis.sh http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/Dockerfile ---------------------------------------------------------------------- diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..2a7e9c2 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,28 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT. AT +# THIS POINT, THIS IS ONLY INTENDED FOR USE IN AUTOMATED TESTS. IF YOU +# ARE LOOKING TO DEPLOY PREDICTIONIO WITH DOCKER, PLEASE REFER TO +# http://predictionio.incubator.apache.org/community/projects/#docker-installation-for-predictionio + +FROM predictionio/pio-testing-base + +# Include the entire code tree +ENV PIO_HOME /PredictionIO +ENV PATH ${PIO_HOME}/bin/:${PATH} +ADD . ${PIO_HOME} http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/LICENSE.txt ---------------------------------------------------------------------- diff --git a/LICENSE.txt b/LICENSE.txt index 36565a1..83e7390 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -243,7 +243,7 @@ either expressed or implied, of the FreeBSD Project. ================================================================================ -For sbt and sbt-launch-lib.bash in sbt/: +For sbt in sbt/: ================================================================================ // Generated from http://www.opensource.org/licenses/bsd-license.php http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/build.sbt ---------------------------------------------------------------------- diff --git a/build.sbt b/build.sbt index fc09ee5..eeb3724 100644 --- a/build.sbt +++ b/build.sbt @@ -50,42 +50,38 @@ val pioBuildInfoSettings = buildInfoSettings ++ Seq( sparkVersion), buildInfoPackage := "org.apache.predictionio.core") -val conf = file(".") / "conf" +val conf = file("conf") val commonSettings = Seq( - autoAPIMappings := true) + autoAPIMappings := true, + unmanagedClasspath in Test += conf) val common = (project in file("common")). settings(commonSettings: _*). - settings(genjavadocSettings: _*). - settings(unmanagedClasspath in Test += conf) + settings(genjavadocSettings: _*) val data = (project in file("data")). dependsOn(common). settings(commonSettings: _*). - settings(genjavadocSettings: _*). - settings(unmanagedClasspath in Test += conf) + settings(genjavadocSettings: _*) val core = (project in file("core")). dependsOn(data). settings(commonSettings: _*). settings(genjavadocSettings: _*). settings(pioBuildInfoSettings: _*). - enablePlugins(SbtTwirl). - settings(unmanagedClasspath in Test += conf) + enablePlugins(SbtTwirl) val tools = (project in file("tools")). dependsOn(core). dependsOn(data). settings(commonSettings: _*). settings(genjavadocSettings: _*). - enablePlugins(SbtTwirl). - settings(unmanagedClasspath in Test += conf) + enablePlugins(SbtTwirl) val e2 = (project in file("e2")). settings(commonSettings: _*). - settings(genjavadocSettings: _*). - settings(unmanagedClasspath in Test += conf) + settings(genjavadocSettings: _*) val root = (project in file(".")). settings(commonSettings: _*). http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/make-distribution.sh ---------------------------------------------------------------------- diff --git a/make-distribution.sh b/make-distribution.sh index a6accdb..23769fe 100755 --- a/make-distribution.sh +++ b/make-distribution.sh @@ -54,7 +54,6 @@ touch ${DISTDIR}/RELEASE TARNAME="PredictionIO-$VERSION.tar.gz" TARDIR="PredictionIO-$VERSION" cp -r ${DISTDIR} ${TARDIR} -cp -r ${DISTDIR} tests/ tar zcvf ${TARNAME} ${TARDIR} rm -rf ${TARDIR} http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/sbt/sbt ---------------------------------------------------------------------- diff --git a/sbt/sbt b/sbt/sbt index 8d2cafb..98d2ec9 100755 --- a/sbt/sbt +++ b/sbt/sbt @@ -1,150 +1,568 @@ #!/usr/bin/env bash +# +# A more capable sbt runner, coincidentally also called sbt. +# Author: Paul Phillips <[email protected]> +set -o pipefail -### ------------------------------- ### -### Helper methods for BASH scripts ### -### ------------------------------- ### +declare -r sbt_release_version="0.13.13" +declare -r sbt_unreleased_version="0.13.13" -realpath () { -( - TARGET_FILE="$1" - FIX_CYGPATH="$2" +declare -r latest_212="2.12.1" +declare -r latest_211="2.11.8" +declare -r latest_210="2.10.6" +declare -r latest_29="2.9.3" +declare -r latest_28="2.8.2" - cd "$(dirname "$TARGET_FILE")" - TARGET_FILE=$(basename "$TARGET_FILE") +declare -r buildProps="project/build.properties" - COUNT=0 - while [ -L "$TARGET_FILE" -a $COUNT -lt 100 ] - do - TARGET_FILE=$(readlink "$TARGET_FILE") - cd "$(dirname "$TARGET_FILE")" - TARGET_FILE=$(basename "$TARGET_FILE") - COUNT=$(($COUNT + 1)) - done +declare -r sbt_launch_ivy_release_repo="http://repo.typesafe.com/typesafe/ivy-releases" +declare -r sbt_launch_ivy_snapshot_repo="https://repo.scala-sbt.org/scalasbt/ivy-snapshots" +declare -r sbt_launch_mvn_release_repo="http://repo.scala-sbt.org/scalasbt/maven-releases" +declare -r sbt_launch_mvn_snapshot_repo="http://repo.scala-sbt.org/scalasbt/maven-snapshots" + +declare -r default_jvm_opts_common="-Xms512m -Xmx1536m -Xss2m" +declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy" + +declare sbt_jar sbt_dir sbt_create sbt_version sbt_script sbt_new +declare sbt_explicit_version +declare verbose noshare batch trace_level +declare sbt_saved_stty debugUs + +declare java_cmd="java" +declare sbt_launch_dir="$HOME/.sbt/launchers" +declare sbt_launch_repo + +# pull -J and -D options to give to java. +declare -a java_args scalac_args sbt_commands residual_args + +# args to jvm/sbt via files or environment variables +declare -a extra_jvm_opts extra_sbt_opts + +echoerr () { echo >&2 "$@"; } +vlog () { [[ -n "$verbose" ]] && echoerr "$@"; } +die () { echo "Aborting: $@" ; exit 1; } + +# restore stty settings (echo in particular) +onSbtRunnerExit() { + [[ -n "$sbt_saved_stty" ]] || return + vlog "" + vlog "restoring stty: $sbt_saved_stty" + stty "$sbt_saved_stty" + unset sbt_saved_stty +} + +# save stty and trap exit, to ensure echo is re-enabled if we are interrupted. +trap onSbtRunnerExit EXIT +sbt_saved_stty="$(stty -g 2>/dev/null)" +vlog "Saved stty: $sbt_saved_stty" + +# this seems to cover the bases on OSX, and someone will +# have to tell me about the others. +get_script_path () { + local path="$1" + [[ -L "$path" ]] || { echo "$path" ; return; } - # make sure we grab the actual windows path, instead of cygwin's path. - if [[ "x$FIX_CYGPATH" != "x" ]]; then - echo "$(cygwinpath "$(pwd -P)/$TARGET_FILE")" + local target="$(readlink "$path")" + if [[ "${target:0:1}" == "/" ]]; then + echo "$target" else - echo "$(pwd -P)/$TARGET_FILE" + echo "${path%/*}/$target" fi -) } +declare -r script_path="$(get_script_path "$BASH_SOURCE")" +declare -r script_name="${script_path##*/}" + +init_default_option_file () { + local overriding_var="${!1}" + local default_file="$2" + if [[ ! -r "$default_file" && "$overriding_var" =~ ^@(.*)$ ]]; then + local envvar_file="${BASH_REMATCH[1]}" + if [[ -r "$envvar_file" ]]; then + default_file="$envvar_file" + fi + fi + echo "$default_file" +} + +declare sbt_opts_file="$(init_default_option_file SBT_OPTS .sbtopts)" +declare jvm_opts_file="$(init_default_option_file JVM_OPTS .jvmopts)" + +build_props_sbt () { + [[ -r "$buildProps" ]] && \ + grep '^sbt\.version' "$buildProps" | tr '=\r' ' ' | awk '{ print $2; }' +} + +update_build_props_sbt () { + local ver="$1" + local old="$(build_props_sbt)" -# Uses uname to detect if we're in the odd cygwin environment. -is_cygwin() { - local os=$(uname -s) - case "$os" in - CYGWIN*) return 0 ;; - *) return 1 ;; + [[ -r "$buildProps" ]] && [[ "$ver" != "$old" ]] && { + perl -pi -e "s/^sbt\.version\b.*\$/sbt.version=${ver}/" "$buildProps" + grep -q '^sbt.version[ =]' "$buildProps" || printf "\nsbt.version=%s\n" "$ver" >> "$buildProps" + + vlog "!!!" + vlog "!!! Updated file $buildProps setting sbt.version to: $ver" + vlog "!!! Previous value was: $old" + vlog "!!!" + } +} + +set_sbt_version () { + sbt_version="${sbt_explicit_version:-$(build_props_sbt)}" + [[ -n "$sbt_version" ]] || sbt_version=$sbt_release_version + export sbt_version +} + +url_base () { + local version="$1" + + case "$version" in + 0.7.*) echo "http://simple-build-tool.googlecode.com" ;; + 0.10.* ) echo "$sbt_launch_ivy_release_repo" ;; + 0.11.[12]) echo "$sbt_launch_ivy_release_repo" ;; + 0.*-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" + echo "$sbt_launch_ivy_snapshot_repo" ;; + 0.*) echo "$sbt_launch_ivy_release_repo" ;; + *-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" + echo "$sbt_launch_mvn_snapshot_repo" ;; + *) echo "$sbt_launch_mvn_release_repo" ;; + esac +} + +make_url () { + local version="$1" + + local base="${sbt_launch_repo:-$(url_base "$version")}" + + case "$version" in + 0.7.*) echo "$base/files/sbt-launch-0.7.7.jar" ;; + 0.10.* ) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; + 0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; + 0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; + *) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; esac } -# TODO - Use nicer bash-isms here. -CYGWIN_FLAG=$(if is_cygwin; then echo true; else echo false; fi) +addJava () { vlog "[addJava] arg = '$1'" ; java_args+=("$1"); } +addSbt () { vlog "[addSbt] arg = '$1'" ; sbt_commands+=("$1"); } +addScalac () { vlog "[addScalac] arg = '$1'" ; scalac_args+=("$1"); } +addResidual () { vlog "[residual] arg = '$1'" ; residual_args+=("$1"); } + +addResolver () { addSbt "set resolvers += $1"; } +addDebugger () { addJava "-Xdebug" ; addJava "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"; } +setThisBuild () { + vlog "[addBuild] args = '$@'" + local key="$1" && shift + addSbt "set $key in ThisBuild := $@" +} +setScalaVersion () { + [[ "$1" == *"-SNAPSHOT" ]] && addResolver 'Resolver.sonatypeRepo("snapshots")' + addSbt "++ $1" +} +setJavaHome () { + java_cmd="$1/bin/java" + setThisBuild javaHome "_root_.scala.Some(file(\"$1\"))" + export JAVA_HOME="$1" + export JDK_HOME="$1" + export PATH="$JAVA_HOME/bin:$PATH" +} + +getJavaVersion() { "$1" -version 2>&1 | grep -E -e '(java|openjdk) version' | awk '{ print $3 }' | tr -d \"; } +checkJava() { + # Warn if there is a Java version mismatch between PATH and JAVA_HOME/JDK_HOME + + [[ -n "$JAVA_HOME" && -e "$JAVA_HOME/bin/java" ]] && java="$JAVA_HOME/bin/java" + [[ -n "$JDK_HOME" && -e "$JDK_HOME/lib/tools.jar" ]] && java="$JDK_HOME/bin/java" + + if [[ -n "$java" ]]; then + pathJavaVersion=$(getJavaVersion java) + homeJavaVersion=$(getJavaVersion "$java") + if [[ "$pathJavaVersion" != "$homeJavaVersion" ]]; then + echoerr "Warning: Java version mismatch between PATH and JAVA_HOME/JDK_HOME, sbt will use the one in PATH" + echoerr " Either: fix your PATH, remove JAVA_HOME/JDK_HOME or use -java-home" + echoerr " java version from PATH: $pathJavaVersion" + echoerr " java version from JAVA_HOME/JDK_HOME: $homeJavaVersion" + fi + fi +} -# This can fix cygwin style /cygdrive paths so we get the -# windows style paths. -cygwinpath() { - local file="$1" - if [[ "$CYGWIN_FLAG" == "true" ]]; then - echo $(cygpath -w $file) +java_version () { + local version=$(getJavaVersion "$java_cmd") + vlog "Detected Java version: $version" + echo "${version:2:1}" +} + +# MaxPermSize critical on pre-8 JVMs but incurs noisy warning on 8+ +default_jvm_opts () { + local v="$(java_version)" + if [[ $v -ge 8 ]]; then + echo "$default_jvm_opts_common" else - echo $file + echo "-XX:MaxPermSize=384m $default_jvm_opts_common" fi } -. "$(dirname "$(realpath "$0")")/sbt-launch-lib.bash" +build_props_scala () { + if [[ -r "$buildProps" ]]; then + versionLine="$(grep '^build.scala.versions' "$buildProps")" + versionString="${versionLine##build.scala.versions=}" + echo "${versionString%% .*}" + fi +} +execRunner () { + # print the arguments one to a line, quoting any containing spaces + vlog "# Executing command line:" && { + for arg; do + if [[ -n "$arg" ]]; then + if printf "%s\n" "$arg" | grep -q ' '; then + printf >&2 "\"%s\"\n" "$arg" + else + printf >&2 "%s\n" "$arg" + fi + fi + done + vlog "" + } + + [[ -n "$batch" ]] && exec </dev/null + exec "$@" +} -declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy" -declare -r sbt_opts_file=".sbtopts" -declare -r etc_sbt_opts_file="${sbt_home}/conf/sbtopts" -declare -r win_sbt_opts_file="${sbt_home}/conf/sbtconfig.txt" +jar_url () { make_url "$1"; } + +is_cygwin () [[ "$(uname -a)" == "CYGWIN"* ]] + +jar_file () { + is_cygwin \ + && echo "$(cygpath -w $sbt_launch_dir/"$1"/sbt-launch.jar)" \ + || echo "$sbt_launch_dir/$1/sbt-launch.jar" +} -usage() { - cat <<EOM +download_url () { + local url="$1" + local jar="$2" + + echoerr "Downloading sbt launcher for $sbt_version:" + echoerr " From $url" + echoerr " To $jar" + + mkdir -p "${jar%/*}" && { + if which curl >/dev/null; then + curl --fail --silent --location "$url" --output "$jar" + elif which wget >/dev/null; then + wget -q -O "$jar" "$url" + fi + } && [[ -r "$jar" ]] +} + +acquire_sbt_jar () { + { + sbt_jar="$(jar_file "$sbt_version")" + [[ -r "$sbt_jar" ]] + } || { + sbt_jar="$HOME/.ivy2/local/org.scala-sbt/sbt-launch/$sbt_version/jars/sbt-launch.jar" + [[ -r "$sbt_jar" ]] + } || { + sbt_jar="$(jar_file "$sbt_version")" + download_url "$(make_url "$sbt_version")" "$sbt_jar" + } +} + +usage () { + set_sbt_version + cat <<EOM Usage: $script_name [options] +Note that options which are passed along to sbt begin with -- whereas +options to this runner use a single dash. Any sbt command can be scheduled +to run first by prefixing the command with --, so --warn, --error and so on +are not special. + +Output filtering: if there is a file in the home directory called .sbtignore +and this is not an interactive sbt session, the file is treated as a list of +bash regular expressions. Output lines which match any regex are not echoed. +One can see exactly which lines would have been suppressed by starting this +runner with the -x option. + -h | -help print this message - -v | -verbose this runner is chattier - -d | -debug set sbt log level to debug + -v verbose operation (this runner is chattier) + -d, -w, -q aliases for --debug, --warn, --error (q means quiet) + -x debug this script + -trace <level> display stack traces with a max of <level> frames (default: -1, traces suppressed) + -debug-inc enable debugging log for the incremental compiler -no-colors disable ANSI color codes -sbt-create start sbt even if current directory contains no sbt project - -sbt-dir <path> path to global settings/plugins directory (default: ~/.sbt) - -sbt-boot <path> path to shared boot directory (default: ~/.sbt/boot in 0.11 series) + -sbt-dir <path> path to global settings/plugins directory (default: ~/.sbt/<version>) + -sbt-boot <path> path to shared boot directory (default: ~/.sbt/boot in 0.11+) -ivy <path> path to local Ivy repository (default: ~/.ivy2) - -mem <integer> set memory options (default: $sbt_mem, which is $(get_mem_opts $sbt_mem)) -no-share use all local caches; no sharing - -no-global uses global caches, but does not use global ~/.sbt directory. + -offline put sbt in offline mode -jvm-debug <port> Turn on JVM debugging, open at the given port. -batch Disable interactive mode + -prompt <expr> Set the sbt prompt; in expr, 's' is the State and 'e' is Extracted + -script <file> Run the specified file as a scala script - # sbt version (default: from project/build.properties if present, else latest release) - -sbt-version <version> use the specified version of sbt + # sbt version (default: sbt.version from $buildProps if present, otherwise $sbt_release_version) + -sbt-force-latest force the use of the latest release of sbt: $sbt_release_version + -sbt-version <version> use the specified version of sbt (default: $sbt_release_version) + -sbt-dev use the latest pre-release version of sbt: $sbt_unreleased_version -sbt-jar <path> use the specified jar as the sbt launcher - -sbt-rc use an RC version of sbt - -sbt-snapshot use a snapshot version of sbt + -sbt-launch-dir <path> directory to hold sbt launchers (default: $sbt_launch_dir) + -sbt-launch-repo <url> repo url for downloading sbt launcher jar (default: $(url_base "$sbt_version")) + + # scala version (default: as chosen by sbt) + -28 use $latest_28 + -29 use $latest_29 + -210 use $latest_210 + -211 use $latest_211 + -212 use $latest_212 + -scala-home <path> use the scala build at the specified directory + -scala-version <version> use the specified version of scala + -binary-version <version> use the specified scala version when searching for dependencies # java version (default: java from PATH, currently $(java -version 2>&1 | grep version)) -java-home <path> alternate JAVA_HOME - # jvm options and output control - JAVA_OPTS environment variable, if unset uses "$java_opts" - SBT_OPTS environment variable, if unset uses "$default_sbt_opts" - .sbtopts if this file exists in the current directory, it is - prepended to the runner args - /etc/sbt/sbtopts if this file exists, it is prepended to the runner args - -Dkey=val pass -Dkey=val directly to the java runtime - -J-X pass option -X directly to the java runtime - (-J is stripped) - -S-X add -X to sbt's scalacOptions (-S is stripped) - -In the case of duplicated or conflicting options, the order above -shows precedence: JAVA_OPTS lowest, command line options highest. + # passing options to the jvm - note it does NOT use JAVA_OPTS due to pollution + # The default set is used if JVM_OPTS is unset and no -jvm-opts file is found + <default> $(default_jvm_opts) + JVM_OPTS environment variable holding either the jvm args directly, or + the reference to a file containing jvm args if given path is prepended by '@' (e.g. '@/etc/jvmopts') + Note: "@"-file is overridden by local '.jvmopts' or '-jvm-opts' argument. + -jvm-opts <path> file containing jvm args (if not given, .jvmopts in project root is used if present) + -Dkey=val pass -Dkey=val directly to the jvm + -J-X pass option -X directly to the jvm (-J is stripped) + + # passing options to sbt, OR to this runner + SBT_OPTS environment variable holding either the sbt args directly, or + the reference to a file containing sbt args if given path is prepended by '@' (e.g. '@/etc/sbtopts') + Note: "@"-file is overridden by local '.sbtopts' or '-sbt-opts' argument. + -sbt-opts <path> file containing sbt args (if not given, .sbtopts in project root is used if present) + -S-X add -X to sbt's scalacOptions (-S is stripped) EOM } +process_args () { + require_arg () { + local type="$1" + local opt="$2" + local arg="$3" - -process_my_args () { + if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then + die "$opt requires <$type> argument" + fi + } while [[ $# -gt 0 ]]; do case "$1" in - -no-colors) addJava "-Dsbt.log.noformat=true" && shift ;; - -no-share) addJava "$noshare_opts" && shift ;; - -no-global) addJava "-Dsbt.global.base=$(pwd)/project/.sbtboot" && shift ;; - -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;; - -sbt-dir) require_arg path "$1" "$2" && addJava "-Dsbt.global.base=$2" && shift 2 ;; - -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;; - -batch) exec </dev/null && shift ;; - - -sbt-create) sbt_create=true && shift ;; - - *) addResidual "$1" && shift ;; + -h|-help) usage; exit 1 ;; + -v) verbose=true && shift ;; + -d) addSbt "--debug" && shift ;; + -w) addSbt "--warn" && shift ;; + -q) addSbt "--error" && shift ;; + -x) debugUs=true && shift ;; + -trace) require_arg integer "$1" "$2" && trace_level="$2" && shift 2 ;; + -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; + -no-colors) addJava "-Dsbt.log.noformat=true" && shift ;; + -no-share) noshare=true && shift ;; + -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;; + -sbt-dir) require_arg path "$1" "$2" && sbt_dir="$2" && shift 2 ;; + -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;; + -offline) addSbt "set offline in Global := true" && shift ;; + -jvm-debug) require_arg port "$1" "$2" && addDebugger "$2" && shift 2 ;; + -batch) batch=true && shift ;; + -prompt) require_arg "expr" "$1" "$2" && setThisBuild shellPrompt "(s => { val e = Project.extract(s) ; $2 })" && shift 2 ;; + -script) require_arg file "$1" "$2" && sbt_script="$2" && addJava "-Dsbt.main.class=sbt.ScriptMain" && shift 2 ;; + + -sbt-create) sbt_create=true && shift ;; + -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;; + -sbt-version) require_arg version "$1" "$2" && sbt_explicit_version="$2" && shift 2 ;; + -sbt-force-latest) sbt_explicit_version="$sbt_release_version" && shift ;; + -sbt-dev) sbt_explicit_version="$sbt_unreleased_version" && shift ;; + -sbt-launch-dir) require_arg path "$1" "$2" && sbt_launch_dir="$2" && shift 2 ;; + -sbt-launch-repo) require_arg path "$1" "$2" && sbt_launch_repo="$2" && shift 2 ;; + -scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;; + -binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;; + -scala-home) require_arg path "$1" "$2" && setThisBuild scalaHome "_root_.scala.Some(file(\"$2\"))" && shift 2 ;; + -java-home) require_arg path "$1" "$2" && setJavaHome "$2" && shift 2 ;; + -sbt-opts) require_arg path "$1" "$2" && sbt_opts_file="$2" && shift 2 ;; + -jvm-opts) require_arg path "$1" "$2" && jvm_opts_file="$2" && shift 2 ;; + + -D*) addJava "$1" && shift ;; + -J*) addJava "${1:2}" && shift ;; + -S*) addScalac "${1:2}" && shift ;; + -28) setScalaVersion "$latest_28" && shift ;; + -29) setScalaVersion "$latest_29" && shift ;; + -210) setScalaVersion "$latest_210" && shift ;; + -211) setScalaVersion "$latest_211" && shift ;; + -212) setScalaVersion "$latest_212" && shift ;; + new) sbt_new=true && sbt_explicit_version="$sbt_release_version" && addResidual "$1" && shift ;; + *) addResidual "$1" && shift ;; esac done +} + +# process the direct command line arguments +process_args "$@" - # Now, ensure sbt version is used. - [[ "${sbt_version}XXX" != "XXX" ]] && addJava "-Dsbt.version=$sbt_version" +# skip #-styled comments and blank lines +readConfigFile() { + local end=false + until $end; do + read || end=true + [[ $REPLY =~ ^# ]] || [[ -z $REPLY ]] || echo "$REPLY" + done < "$1" } -loadConfigFile() { - for line in $(cat "$1" | sed '/^\#/d'); do - eval echo $line - done +# if there are file/environment sbt_opts, process again so we +# can supply args to this runner +if [[ -r "$sbt_opts_file" ]]; then + vlog "Using sbt options defined in file $sbt_opts_file" + while read opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbt_opts_file") +elif [[ -n "$SBT_OPTS" && ! ("$SBT_OPTS" =~ ^@.*) ]]; then + vlog "Using sbt options defined in variable \$SBT_OPTS" + extra_sbt_opts=( $SBT_OPTS ) +else + vlog "No extra sbt options have been defined" +fi + +[[ -n "${extra_sbt_opts[*]}" ]] && process_args "${extra_sbt_opts[@]}" + +# reset "$@" to the residual args +set -- "${residual_args[@]}" +argumentCount=$# + +# set sbt version +set_sbt_version + +checkJava + +# only exists in 0.12+ +setTraceLevel() { + case "$sbt_version" in + "0.7."* | "0.10."* | "0.11."* ) echoerr "Cannot set trace level in sbt version $sbt_version" ;; + *) setThisBuild traceLevel $trace_level ;; + esac } -# TODO - Pull in config based on operating system... (MSYS + cygwin should pull in txt file). -# Here we pull in the global settings configuration. -[[ -f "$etc_sbt_opts_file" ]] && set -- $(loadConfigFile "$etc_sbt_opts_file") "$@" -# -- Windows behavior stub'd -# JAVA_OPTS=$(cat "$WDIR/sbtconfig.txt" | sed -e 's/\r//g' -e 's/^#.*$//g' | sed ':a;N;$!ba;s/\n/ /g') +# set scalacOptions if we were given any -S opts +[[ ${#scalac_args[@]} -eq 0 ]] || addSbt "set scalacOptions in ThisBuild += \"${scalac_args[@]}\"" + +# Update build.properties on disk to set explicit version - sbt gives us no choice +[[ -n "$sbt_explicit_version" && -z "$sbt_new" ]] && update_build_props_sbt "$sbt_explicit_version" +vlog "Detected sbt version $sbt_version" + +if [[ -n "$sbt_script" ]]; then + residual_args=( $sbt_script ${residual_args[@]} ) +else + # no args - alert them there's stuff in here + (( argumentCount > 0 )) || { + vlog "Starting $script_name: invoke with -help for other options" + residual_args=( shell ) + } +fi + +# verify this is an sbt dir, -create was given or user attempts to run a scala script +[[ -r ./build.sbt || -d ./project || -n "$sbt_create" || -n "$sbt_script" || -n "$sbt_new" ]] || { + cat <<EOM +$(pwd) doesn't appear to be an sbt project. +If you want to start sbt anyway, run: + $0 -sbt-create +EOM + exit 1 +} + +# pick up completion if present; todo +[[ -r .sbt_completion.sh ]] && source .sbt_completion.sh -# Pull in the project-level config file, if it exists. -[[ -f "$sbt_opts_file" ]] && set -- $(loadConfigFile "$sbt_opts_file") "$@" +# directory to store sbt launchers +[[ -d "$sbt_launch_dir" ]] || mkdir -p "$sbt_launch_dir" +[[ -w "$sbt_launch_dir" ]] || sbt_launch_dir="$(mktemp -d -t sbt_extras_launchers.XXXXXX)" + +# no jar? download it. +[[ -r "$sbt_jar" ]] || acquire_sbt_jar || { + # still no jar? uh-oh. + echo "Download failed. Obtain the jar manually and place it at $sbt_jar" + exit 1 +} + +if [[ -n "$noshare" ]]; then + for opt in ${noshare_opts}; do + addJava "$opt" + done +else + case "$sbt_version" in + "0.7."* | "0.10."* | "0.11."* | "0.12."* ) + [[ -n "$sbt_dir" ]] || { + sbt_dir="$HOME/.sbt/$sbt_version" + vlog "Using $sbt_dir as sbt dir, -sbt-dir to override." + } + ;; + esac + + if [[ -n "$sbt_dir" ]]; then + addJava "-Dsbt.global.base=$sbt_dir" + fi +fi + +if [[ -r "$jvm_opts_file" ]]; then + vlog "Using jvm options defined in file $jvm_opts_file" + while read opt; do extra_jvm_opts+=("$opt"); done < <(readConfigFile "$jvm_opts_file") +elif [[ -n "$JVM_OPTS" && ! ("$JVM_OPTS" =~ ^@.*) ]]; then + vlog "Using jvm options defined in \$JVM_OPTS variable" + extra_jvm_opts=( $JVM_OPTS ) +else + vlog "Using default jvm options" + extra_jvm_opts=( $(default_jvm_opts) ) +fi + +# traceLevel is 0.12+ +[[ -n "$trace_level" ]] && setTraceLevel + +main () { + execRunner "$java_cmd" \ + "${extra_jvm_opts[@]}" \ + "${java_args[@]}" \ + -jar "$sbt_jar" \ + "${sbt_commands[@]}" \ + "${residual_args[@]}" +} + +# sbt inserts this string on certain lines when formatting is enabled: +# val OverwriteLine = "\r\u001BM\u001B[2K" +# ...in order not to spam the console with a million "Resolving" lines. +# Unfortunately that makes it that much harder to work with when +# we're not going to print those lines anyway. We strip that bit of +# line noise, but leave the other codes to preserve color. +mainFiltered () { + local ansiOverwrite='\r\x1BM\x1B[2K' + local excludeRegex=$(egrep -v '^#|^$' ~/.sbtignore | paste -sd'|' -) + + echoLine () { + local line="$1" + local line1="$(echo "$line" | sed 's/\r\x1BM\x1B\[2K//g')" # This strips the OverwriteLine code. + local line2="$(echo "$line1" | sed 's/\x1B\[[0-9;]*[JKmsu]//g')" # This strips all codes - we test regexes against this. + + if [[ $line2 =~ $excludeRegex ]]; then + [[ -n $debugUs ]] && echo "[X] $line1" + else + [[ -n $debugUs ]] && echo " $line1" || echo "$line1" + fi + } + + echoLine "Starting sbt with output filtering enabled." + main | while read -r line; do echoLine "$line"; done +} +# Only filter if there's a filter file and we don't see a known interactive command. +# Obviously this is super ad hoc but I don't know how to improve on it. Testing whether +# stdin is a terminal is useless because most of my use cases for this filtering are +# exactly when I'm at a terminal, running sbt non-interactively. +shouldFilter () { [[ -f ~/.sbtignore ]] && ! egrep -q '\b(shell|console|consoleProject)\b' <<<"${residual_args[@]}"; } -run "$@" +# run sbt +if shouldFilter; then mainFiltered; else main; fi http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/sbt/sbt-launch-lib.bash ---------------------------------------------------------------------- diff --git a/sbt/sbt-launch-lib.bash b/sbt/sbt-launch-lib.bash deleted file mode 100644 index 10eb7fe..0000000 --- a/sbt/sbt-launch-lib.bash +++ /dev/null @@ -1,244 +0,0 @@ -#!/usr/bin/env bash -# - -# A library to simplify using the SBT launcher from other packages. -# Note: This should be used by tools like giter8/conscript etc. - -# TODO - Should we merge the main SBT script with this library? - -if test -z "$HOME"; then - declare -r script_dir="$(dirname $script_path)" -else - declare -r script_dir="$HOME/.sbt" -fi - -declare -a residual_args -declare -a java_args -declare -a scalac_args -declare -a sbt_commands -declare java_cmd=java -declare java_version -declare -r sbt_bin_dir="$(dirname "$(realpath "$0")")" -declare -r sbt_home="$(dirname "$sbt_bin_dir")" - -echoerr () { - echo 1>&2 "$@" -} -vlog () { - [[ $verbose || $debug ]] && echoerr "$@" -} -dlog () { - [[ $debug ]] && echoerr "$@" -} - -jar_file () { - echo "$(cygwinpath "${sbt_home}/bin/sbt-launch.jar")" -} - -acquire_sbt_jar () { - SBT_VERSION=`awk -F "=" '/sbt\\.version/ {print $2}' $sbt_bin_dir/../project/build.properties` - URL1=http://repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/sbt-launch/${SBT_VERSION}/sbt-launch.jar - URL2=http://typesafe.artifactoryonline.com/typesafe/ivy-releases/org.scala-sbt/sbt-launch/${SBT_VERSION}/sbt-launch.jar - JAR=$sbt_bin_dir/sbt-launch-${SBT_VERSION}.jar - - sbt_jar=$JAR - - if [[ ! -f "$sbt_jar" ]]; then - # Download sbt launch jar if it hasn't been downloaded yet - if [ ! -f ${JAR} ]; then - # Download - printf "Attempting to fetch sbt\n" - JAR_DL=${JAR}.part - if hash curl 2>/dev/null; then - (curl -L --verbose --progress-bar ${URL1} > ${JAR_DL} || curl -L --verbose --progress-bar ${URL2} > ${JAR_DL}) && mv ${JAR_DL} ${JAR} - elif hash wget 2>/dev/null; then - (wget --verbose --progress=bar ${URL1} -O ${JAR_DL} || wget --verbose --progress=bar ${URL2} -O ${JAR_DL}) && mv ${JAR_DL} ${JAR} - else - printf "You do not have curl or wget installed, please install sbt manually from http://www.scala-sbt.org/\n" - exit -1 - fi - fi - if [ ! -f ${JAR} ]; then - # We failed to download - printf "Our attempt to download sbt locally to ${JAR} failed. Please install sbt manually from http://www.scala-sbt.org/\n" - exit -1 - fi - printf "Launching sbt from ${JAR}\n" - fi -} - -execRunner () { - # print the arguments one to a line, quoting any containing spaces - [[ $verbose || $debug ]] && echo "# Executing command line:" && { - for arg; do - if printf "%s\n" "$arg" | grep -q ' '; then - printf "\"%s\"\n" "$arg" - else - printf "%s\n" "$arg" - fi - done - echo "" - } - - # THis used to be exec, but we loose the ability to re-hook stty then - # for cygwin... Maybe we should flag the feature here... - "$@" -} - -addJava () { - dlog "[addJava] arg = '$1'" - java_args=( "${java_args[@]}" "$1" ) -} -addSbt () { - dlog "[addSbt] arg = '$1'" - sbt_commands=( "${sbt_commands[@]}" "$1" ) -} -addResidual () { - dlog "[residual] arg = '$1'" - residual_args=( "${residual_args[@]}" "$1" ) -} -addDebugger () { - addJava "-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1" -} - -get_mem_opts () { - # if we detect any of these settings in ${java_opts} we need to NOT output our settings. - # The reason is the Xms/Xmx, if they don't line up, cause errors. - if [[ "${java_opts}" == *-Xmx* ]] || [[ "${java_opts}" == *-Xms* ]] || [[ "${java_opts}" == *-XX:MaxPermSize* ]] || [[ "${java_opts}" == *-XX:ReservedCodeCacheSize* ]]; then - echo "" - else - # a ham-fisted attempt to move some memory settings in concert - # so they need not be messed around with individually. - local mem=${1:-1024} - local codecache=$(( $mem / 8 )) - (( $codecache > 128 )) || codecache=128 - (( $codecache < 512 )) || codecache=512 - - local common_opts="-Xms${mem}m -Xmx${mem}m -XX:ReservedCodeCacheSize=${codecache}m" - if [[ "$java_version" < "1.8" ]]; then - local perm=$(( $codecache * 2 )) - echo "$common_opts -XX:MaxPermSize=${perm}m" - else - echo "$common_opts" - fi - fi -} - -require_arg () { - local type="$1" - local opt="$2" - local arg="$3" - if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then - echo "$opt requires <$type> argument" - exit 1 - fi -} - -is_function_defined() { - declare -f "$1" > /dev/null -} - -process_args () { - while [[ $# -gt 0 ]]; do - case "$1" in - -h|-help) usage; exit 1 ;; - -v|-verbose) verbose=1 && shift ;; - -d|-debug) debug=1 && shift ;; - - -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; - -mem) require_arg integer "$1" "$2" && sbt_mem="$2" && shift 2 ;; - -jvm-debug) require_arg port "$1" "$2" && addDebugger $2 && shift 2 ;; - -batch) exec </dev/null && shift ;; - - -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;; - -sbt-version) require_arg version "$1" "$2" && sbt_version="$2" && shift 2 ;; - -java-home) require_arg path "$1" "$2" && java_cmd="$2/bin/java" && shift 2 ;; - - -D*) addJava "$1" && shift ;; - -J*) addJava "${1:2}" && shift ;; - *) addResidual "$1" && shift ;; - esac - done - - is_function_defined process_my_args && { - myargs=("${residual_args[@]}") - residual_args=() - process_my_args "${myargs[@]}" - } - - java_version=$("$java_cmd" -version 2>&1 | awk -F '"' '/version/ {print $2}') - vlog "[process_args] java_version = '$java_version'" -} - -# Detect that we have java installed. -checkJava() { - local required_version="$1" - # Now check to see if it's a good enough version - if [[ "$java_version" == "" ]]; then - echo - echo No java installations was detected. - echo Please go to http://www.java.com/getjava/ and download - echo - exit 1 - elif [[ ! "$java_version" > "$required_version" ]]; then - echo - echo The java installation you have is not up to date - echo $script_name requires at least version $required_version+, you have - echo version $java_version - echo - echo Please go to http://www.java.com/getjava/ and download - echo a valid Java Runtime and install before running $script_name. - echo - exit 1 - fi -} - - -run() { - # no jar? download it. - [[ -f "$sbt_jar" ]] || acquire_sbt_jar "$sbt_version" || { - # still no jar? uh-oh. - echo "Download failed. Obtain the sbt-launch.jar manually and place it at $sbt_jar" - exit 1 - } - - # process the combined args, then reset "$@" to the residuals - process_args "$@" - set -- "${residual_args[@]}" - argumentCount=$# - - # TODO - java check should be configurable... - checkJava "1.6" - - #If we're in cygwin, we should use the windows config, and terminal hacks - if [[ "$CYGWIN_FLAG" == "true" ]]; then - stty -icanon min 1 -echo > /dev/null 2>&1 - addJava "-Djline.terminal=jline.UnixTerminal" - addJava "-Dsbt.cygwin=true" - fi - - # run sbt - execRunner "$java_cmd" \ - ${SBT_OPTS:-$default_sbt_opts} \ - $(get_mem_opts $sbt_mem) \ - ${java_opts} \ - ${java_args[@]} \ - -jar "$sbt_jar" \ - "${sbt_commands[@]}" \ - "${residual_args[@]}" - - exit_code=$? - - # Clean up the terminal from cygwin hacks. - if [[ "$CYGWIN_FLAG" == "true" ]]; then - stty icanon echo > /dev/null 2>&1 - fi - exit $exit_code -} - -runAlternateBoot() { - local bootpropsfile="$1" - shift - addJava "-Dsbt.boot.properties=$bootpropsfile" - run $@ -} http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/tests/Dockerfile ---------------------------------------------------------------------- diff --git a/tests/Dockerfile b/tests/Dockerfile index fda6682..970ebba 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -15,46 +15,18 @@ # limitations under the License. # -FROM ubuntu:xenial +FROM predictionio/pio ENV SPARK_VERSION 1.4.0 ENV ELASTICSEARCH_VERSION 1.4.4 ENV HBASE_VERSION 1.0.0 -RUN apt-get update && apt-get install -y \ - wget curl \ - python-pip \ - python3-pip \ - postgresql-client \ - openjdk-8-jdk \ - openssh-client openssh-server \ - git - -RUN pip install predictionio -RUN pip3 install --upgrade \ - pip \ - xmlrunner \ - requests \ - urllib3 - -ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64/jre - ADD docker-files/spark-${SPARK_VERSION}-bin-hadoop2.6.tgz /vendors ENV SPARK_HOME /vendors/spark-${SPARK_VERSION}-bin-hadoop2.6 -ENV ELASTICSEARCH_HOME /vendors/elasticsearch-${ELASTICSEARCH_VERSION} - -ENV HBASE_HOME /vendors/hbase-${HBASE_VERSION} - COPY docker-files/postgresql-9.4-1204.jdbc41.jar /drivers - -ENV PIO_HOME /PredictionIO -ENV PATH ${PIO_HOME}/bin/:${PATH} -COPY dist ${PIO_HOME} - COPY docker-files/init.sh init.sh -COPY docker-files/env-conf/spark-env.sh ${SPARK_HOME}/conf/spark-env.sh -COPY docker-files/env-conf/hbase-site.xml ${HBASE_HOME}/conf/hbase-site.xml +COPY docker-files/env-conf/hbase-site.xml ${PIO_HOME}/conf/hbase-site.xml COPY docker-files/env-conf/pio-env.sh /pio-env.sh COPY docker-files/wait-for-postgres.sh /wait-for-postgres.sh COPY docker-files/pgpass /root/.pgpass http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/tests/Dockerfile.base ---------------------------------------------------------------------- diff --git a/tests/Dockerfile.base b/tests/Dockerfile.base new file mode 100644 index 0000000..4dc3911 --- /dev/null +++ b/tests/Dockerfile.base @@ -0,0 +1,42 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT. AT +# THIS POINT, THIS IS ONLY INTENDED FOR USE IN AUTOMATED TESTS. IF YOU +# ARE LOOKING TO DEPLOY PREDICTIONIO WITH DOCKER, PLEASE REFER TO +# http://predictionio.incubator.apache.org/community/projects/#docker-installation-for-predictionio + +# Tests do not like the musl libc :(, and we need Python 3.5 +FROM ubuntu:xenial + +# Install OpenJDK 8 and Python 3.5 +RUN apt-get update && apt-get install -y \ + openjdk-8-jdk \ + wget curl \ + python-pip \ + python3-pip \ + postgresql-client \ + openssh-client openssh-server \ + git + +RUN pip install predictionio && pip3 install --upgrade \ + pip \ + xmlrunner \ + requests \ + urllib3 + +ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64/jre http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/tests/build-docker.sh ---------------------------------------------------------------------- diff --git a/tests/build-docker.sh b/tests/build-docker.sh index ae78cac..1c1b9fe 100755 --- a/tests/build-docker.sh +++ b/tests/build-docker.sh @@ -27,4 +27,13 @@ if [ ! -f $DIR/docker-files/postgresql-9.4-1204.jdbc41.jar ]; then mv postgresql-9.4-1204.jdbc41.jar $DIR/docker-files/ fi +docker pull predictionio/pio-testing-base +docker build -t predictionio/pio-testing-base -f Dockerfile.base . +pushd $DIR/.. +./make-distribution.sh +sbt/sbt clean +mkdir assembly +cp dist/lib/*.jar assembly/ +docker build -t predictionio/pio . +popd docker build -t predictionio/pio-testing $DIR http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/tests/docker-compose.yml ---------------------------------------------------------------------- diff --git a/tests/docker-compose.yml b/tests/docker-compose.yml index bd2dd6d..ba3ce59 100644 --- a/tests/docker-compose.yml +++ b/tests/docker-compose.yml @@ -42,5 +42,5 @@ services: - hbase - postgres volumes: - - ./pio_tests:/tests/pio_tests - ~/.ivy2:/root/.ivy2 + - ~/.sbt:/root/.sbt http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/tests/docker-files/env-conf/hbase-site.xml ---------------------------------------------------------------------- diff --git a/tests/docker-files/env-conf/hbase-site.xml b/tests/docker-files/env-conf/hbase-site.xml index 3c4e24c..3d5363e 100644 --- a/tests/docker-files/env-conf/hbase-site.xml +++ b/tests/docker-files/env-conf/hbase-site.xml @@ -17,16 +17,6 @@ See the License for the specific language governing permissions and limitations under the License. --> <configuration> - <!-- - <property> - <name>hbase.rootdir</name> - <value>file:///hbase-files/data</value> - </property> - <property> - <name>hbase.zookeeper.property.dataDir</name> - <value>/hbase-files/zookeeper</value> - </property> - --> <property> <name>hbase.zookeeper.quorum</name> <value>hbase</value> http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/tests/docker-files/env-conf/pio-env.sh ---------------------------------------------------------------------- diff --git a/tests/docker-files/env-conf/pio-env.sh b/tests/docker-files/env-conf/pio-env.sh index 5417d24..e1076ba 100644 --- a/tests/docker-files/env-conf/pio-env.sh +++ b/tests/docker-files/env-conf/pio-env.sh @@ -39,7 +39,7 @@ MYSQL_JDBC_DRIVER= # HBASE_CONF_DIR: You must configure this if you intend to run PredictionIO # with HBase on a remote cluster. -HBASE_CONF_DIR=$HBASE_HOME/conf +HBASE_CONF_DIR=$PIO_HOME/conf # Filesystem paths where PredictionIO uses as block storage. PIO_FS_BASEDIR=$HOME/.pio_store @@ -88,7 +88,7 @@ PIO_STORAGE_SOURCES_ELASTICSEARCH_TYPE=elasticsearch #PIO_STORAGE_SOURCES_ELASTICSEARCH_CLUSTERNAME=pio PIO_STORAGE_SOURCES_ELASTICSEARCH_HOSTS=elasticsearch PIO_STORAGE_SOURCES_ELASTICSEARCH_PORTS=9300 -PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME=$ELASTICSEARCH_HOME +#PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME=$ELASTICSEARCH_HOME # Local File System Example PIO_STORAGE_SOURCES_LOCALFS_TYPE=localfs @@ -96,7 +96,7 @@ PIO_STORAGE_SOURCES_LOCALFS_PATH=$PIO_FS_BASEDIR/local_models # HBase Example PIO_STORAGE_SOURCES_HBASE_TYPE=hbase -PIO_STORAGE_SOURCES_HBASE_HOME=$HBASE_HOME +#PIO_STORAGE_SOURCES_HBASE_HOME=$HBASE_HOME # HDFS config PIO_STORAGE_SOURCES_HDFS_TYPE=hdfs http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/tests/docker-files/env-conf/spark-defaults.conf ---------------------------------------------------------------------- diff --git a/tests/docker-files/env-conf/spark-defaults.conf b/tests/docker-files/env-conf/spark-defaults.conf deleted file mode 100644 index fcb1b15..0000000 --- a/tests/docker-files/env-conf/spark-defaults.conf +++ /dev/null @@ -1,13 +0,0 @@ -# Default system properties included when running spark-submit. -# This is useful for setting default environmental settings. - -# Example: -# spark.master spark://master:7077 -# spark.eventLog.enabled true -# spark.eventLog.dir hdfs://namenode:8021/directory -# spark.serializer org.apache.spark.serializer.KryoSerializer -spark.driver.memory 10g -spark.executor.memory 10g -spark.driver.cores 4 -spark.ui.port 4040 -# spark.executor.extraJavaOptions -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three" http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/tests/docker-files/env-conf/spark-env.sh ---------------------------------------------------------------------- diff --git a/tests/docker-files/env-conf/spark-env.sh b/tests/docker-files/env-conf/spark-env.sh deleted file mode 100755 index 22e7a9c..0000000 --- a/tests/docker-files/env-conf/spark-env.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash - -# Options read when launching programs locally with -# ./bin/run-example or ./bin/spark-submit -# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files -# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node -# - SPARK_PUBLIC_DNS, to set the public dns name of the driver program -# - SPARK_CLASSPATH, default classpath entries to append - -# Options read by executors and drivers running inside the cluster -# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node -# - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program -# - SPARK_CLASSPATH, default classpath entries to append -# - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data -# - MESOS_NATIVE_JAVA_LIBRARY, to point to your libmesos.so if you use Mesos - -# Options read in YARN client mode -# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files -# - SPARK_EXECUTOR_INSTANCES, Number of workers to start (Default: 2) -# - SPARK_EXECUTOR_CORES, Number of cores for the workers (Default: 1). -# - SPARK_EXECUTOR_MEMORY, Memory per Worker (e.g. 1000M, 2G) (Default: 1G) -# - SPARK_DRIVER_MEMORY, Memory for Master (e.g. 1000M, 2G) (Default: 1G) -# - SPARK_YARN_APP_NAME, The name of your application (Default: Spark) -# - SPARK_YARN_QUEUE, The hadoop queue to use for allocation requests (Default: âdefaultâ) -# - SPARK_YARN_DIST_FILES, Comma separated list of files to be distributed with the job. -# - SPARK_YARN_DIST_ARCHIVES, Comma separated list of archives to be distributed with the job. - -# Options for the daemons used in the standalone deploy mode -# - SPARK_MASTER_IP, to bind the master to a different IP address or hostname -# - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports for the master -# - SPARK_MASTER_OPTS, to set config properties only for the master (e.g. "-Dx=y") -# - SPARK_WORKER_CORES, to set the number of cores to use on this machine -# - SPARK_WORKER_MEMORY, to set how much total memory workers have to give executors (e.g. 1000m, 2g) -# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT, to use non-default ports for the worker -# - SPARK_WORKER_INSTANCES, to set the number of worker processes per node -# - SPARK_WORKER_DIR, to set the working directory of worker processes -# - SPARK_WORKER_OPTS, to set config properties only for the worker (e.g. "-Dx=y") -# - SPARK_DAEMON_MEMORY, to allocate to the master, worker and history server themselves (default: 1g). -# - SPARK_HISTORY_OPTS, to set config properties only for the history server (e.g. "-Dx=y") -# - SPARK_SHUFFLE_OPTS, to set config properties only for the external shuffle service (e.g. "-Dx=y") -# - SPARK_DAEMON_JAVA_OPTS, to set config properties for all daemons (e.g. "-Dx=y") -# - SPARK_PUBLIC_DNS, to set the public dns name of the master or workers - -# Generic options for the daemons used in the standalone deploy mode -# - SPARK_CONF_DIR Alternate conf dir. (Default: ${SPARK_HOME}/conf) -# - SPARK_LOG_DIR Where log files are stored. (Default: ${SPARK_HOME}/logs) -# - SPARK_PID_DIR Where the pid file is stored. (Default: /tmp) -# - SPARK_IDENT_STRING A string representing this instance of spark. (Default: $USER) -# - SPARK_NICENESS The scheduling priority for daemons. (Default: 0) http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/tests/script.travis.sh ---------------------------------------------------------------------- diff --git a/tests/script.travis.sh b/tests/script.travis.sh index f7f81ac..0f2af4c 100755 --- a/tests/script.travis.sh +++ b/tests/script.travis.sh @@ -16,25 +16,10 @@ # limitations under the License. # -set -e - if [[ $BUILD_TYPE == Unit ]]; then - # Run license check - ./tests/check_license.sh - - # Prepare pio environment variables - set -a - source conf/pio-env.sh.travis - set +a - - # Run stylecheck - sbt scalastyle - # Run all unit tests - sbt test - + ./tests/run_docker.sh $METADATA_REP $EVENTDATA_REP $MODELDATA_REP \ + '/PredictionIO/tests/unit.sh' else - REPO=`pwd` - ./tests/run_docker.sh $METADATA_REP $EVENTDATA_REP $MODELDATA_REP \ - 'python3 /tests/pio_tests/tests.py' + 'python3 /PredictionIO/tests/pio_tests/tests.py' fi http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/c6c1ac3b/tests/unit.sh ---------------------------------------------------------------------- diff --git a/tests/unit.sh b/tests/unit.sh new file mode 100755 index 0000000..324b87f --- /dev/null +++ b/tests/unit.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Run license check +pushd /PredictionIO + +./tests/check_license.sh + +# Prepare pio environment variables +set -a +source conf/pio-env.sh +set +a + +# Run stylecheck +sbt/sbt scalastyle +# Run all unit tests +sbt/sbt test + +popd
