[PIO-57] Add SBT Native Packager Closes #359
Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/d0d2570b Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/d0d2570b Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/d0d2570b Branch: refs/heads/livedoc Commit: d0d2570b3ec602d1abaad544b5759c016432b733 Parents: dfb01e3 Author: Shinsuke Sugaya <[email protected]> Authored: Thu Mar 30 15:11:01 2017 -0700 Committer: Donald Szeto <[email protected]> Committed: Thu Mar 30 15:11:01 2017 -0700 ---------------------------------------------------------------------- .gitignore | 7 +- assembly/build.sbt | 93 ++++++++++++++++++ assembly/src/debian/DEBIAN/postrm | 43 +++++++++ assembly/src/debian/DEBIAN/preinst | 36 +++++++ assembly/src/rpm/scriptlets/postun | 41 ++++++++ assembly/src/rpm/scriptlets/preinst | 35 +++++++ bin/compute-classpath.sh | 2 +- bin/pio | 44 +++++---- bin/pio-class | 17 +++- bin/pio-daemon | 35 ++++++- bin/pio-shell | 34 ++++++- bin/pio-start-all | 99 +++++++++++--------- bin/pio-stop-all | 50 ++++++---- build.sbt | 3 + conf/log4j.properties | 2 +- make-distribution.sh | 22 ++++- project/plugins.sbt | 2 + storage/elasticsearch/build.sbt | 3 +- storage/elasticsearch1/build.sbt | 3 +- storage/hbase/build.sbt | 3 +- storage/hdfs/build.sbt | 3 +- storage/jdbc/build.sbt | 3 +- storage/localfs/build.sbt | 3 +- tests/build_docker.sh | 3 +- tools/build.sbt | 5 +- .../org/apache/predictionio/tools/Runner.scala | 2 + .../predictionio/tools/commands/Engine.scala | 2 + 27 files changed, 496 insertions(+), 99 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/.gitignore ---------------------------------------------------------------------- diff --git a/.gitignore b/.gitignore index 68ad5c4..d6dfe21 100644 --- a/.gitignore +++ b/.gitignore @@ -12,7 +12,6 @@ core/data examples/data/ml-* fs/ supervisord.conf -assembly/ /dist pio.log *.tar.gz @@ -20,6 +19,10 @@ pio.log # Ignore source files whose name prefixed with "Private" Private*.scala quickstartapp/ +# Eclipse +.project +.classpath +.settings/ # IntelliJ *.iml .idea/ @@ -31,3 +34,5 @@ apache-rat-0.11.jar tests/dist tests/docker-files/*.jar tests/docker-files/*.tgz +assembly/*.jar +assembly/src/universal/ http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/assembly/build.sbt ---------------------------------------------------------------------- diff --git a/assembly/build.sbt b/assembly/build.sbt new file mode 100644 index 0000000..79fc3ab --- /dev/null +++ b/assembly/build.sbt @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import NativePackagerHelper._ +import RpmConstants._ +import com.typesafe.sbt.packager.linux.LinuxSymlink + +enablePlugins(RpmPlugin, DebianPlugin) + +name := "predictionio" + +maintainer in Linux := "Apache Software Foundation" +packageSummary in Linux := "Apache PredictionIO" +packageDescription := "Apache PredictionIO is an open source Machine Learning Server " + + "built on top of state-of-the-art open source stack for developers " + + "and data scientists create predictive engines for any machine learning task." + +version in Rpm := version.value.replace("-", "_") +rpmRelease := "1" +rpmVendor := "apache" +rpmGroup := Some("Applications/System") +rpmUrl := Some("http://predictionio.incubator.apache.org/") +rpmLicense := Some("Apache License Version 2.0") + +maintainerScripts in Rpm := maintainerScriptsAppendFromFile((maintainerScripts in Rpm).value)( + Pre -> (sourceDirectory.value / "rpm" / "scriptlets" / "preinst"), + Postun -> (sourceDirectory.value / "rpm" / "scriptlets" / "postun") +) + +mappings in Universal ++= { + val releaseFile = baseDirectory.value / ".." / "RELEASE.md" + val buildPropFile = baseDirectory.value / ".." / "project" / "build.properties" + val sbtFile = baseDirectory.value / ".." / "sbt" / "sbt" + Seq(releaseFile -> "RELEASE", + buildPropFile -> "project/build.properties", + sbtFile -> "sbt/sbt") +} + +mappings in Universal ++= { + val files = IO.listFiles(baseDirectory.value / ".." / "conf") + files filterNot { f => f.getName.endsWith(".travis") } map { + case f if f.getName equals "pio-env.sh.template" => f -> "conf/pio-env.sh" + case f => f -> s"conf/${f.getName}" + } toSeq +} + +mappings in Universal ++= { + val files = IO.listFiles(baseDirectory.value / ".." / "bin") + files map { f => f -> s"bin/${f.getName}" } toSeq +} + +linuxPackageMappings := { + val mappings = linuxPackageMappings.value + mappings map { linuxPackage => + val linuxFileMappings = linuxPackage.mappings map { + case (f, n) if f.getName equals "conf" => f -> s"/etc/${name.value}" + case (f, n) if f.getName equals "pio-env.sh.template" => f -> s"/etc/${name.value}/pio-env.sh" + case (f, n) if f.getParent endsWith "conf" => f -> s"/etc/${name.value}/${f.getName}" + case (f, n) if f.getName equals "log" => f -> s"/var/log/${name.value}" + case (f, n) if f.getName equals "pio.log" => f -> s"/var/log/${name.value}/pio.log" + case (f, n) => f -> n + } + + val fileData = linuxPackage.fileData.copy( + user = s"${name.value}", + group = s"${name.value}" + ) + + linuxPackage.copy( + mappings = linuxFileMappings, + fileData = fileData + ) + } +} + +linuxPackageSymlinks := { + Seq(LinuxSymlink("/usr/bin/pio", s"/usr/share/${name.value}/bin/pio"), + LinuxSymlink("/usr/bin/pio-daemon", s"/usr/share/${name.value}/bin/pio-daemon")) +} http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/assembly/src/debian/DEBIAN/postrm ---------------------------------------------------------------------- diff --git a/assembly/src/debian/DEBIAN/postrm b/assembly/src/debian/DEBIAN/postrm new file mode 100644 index 0000000..ed9d71c --- /dev/null +++ b/assembly/src/debian/DEBIAN/postrm @@ -0,0 +1,43 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +REMOVE_USER_AND_GROUP=false + +case "$1" in + remove) + ;; + purge) + REMOVE_USER_AND_GROUP=true + ;; + failed-upgrade|abort-install|abort-upgrade|disappear|upgrade|disappear) + ;; + *) + echo "post remove script called with unknown argument \`$1'" >&2 + exit 1 + ;; +esac + +if [ "$REMOVE_USER_AND_GROUP" = "true" ]; then + if id "predictionio" > /dev/null 2>&1 ; then + userdel "predictionio" + fi + + if getent group "predictionio" > /dev/null 2>&1 ; then + groupdel "predictionio" + fi +fi + http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/assembly/src/debian/DEBIAN/preinst ---------------------------------------------------------------------- diff --git a/assembly/src/debian/DEBIAN/preinst b/assembly/src/debian/DEBIAN/preinst new file mode 100644 index 0000000..da60ce3 --- /dev/null +++ b/assembly/src/debian/DEBIAN/preinst @@ -0,0 +1,36 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +if ! getent group "predictionio" > /dev/null 2>&1 ; then + echo -n "Creating predictionio group..." + addgroup --quiet --system "predictionio" + echo " OK" +fi + +if ! id predictionio > /dev/null 2>&1 ; then + echo -n "Creating predictionio user..." + adduser --quiet \ + --system \ + --no-create-home \ + --ingroup "predictionio" \ + --disabled-password \ + --shell /bin/false \ + --home "/usr/share/predictionio" \ + "predictionio" + echo " OK" +fi + http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/assembly/src/rpm/scriptlets/postun ---------------------------------------------------------------------- diff --git a/assembly/src/rpm/scriptlets/postun b/assembly/src/rpm/scriptlets/postun new file mode 100644 index 0000000..206fb86 --- /dev/null +++ b/assembly/src/rpm/scriptlets/postun @@ -0,0 +1,41 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +REMOVE_USER_AND_GROUP=false + +case "$1" in + 0) + REMOVE_USER_AND_GROUP=true + ;; + 1) + ;; + *) + echo "post remove script called with unknown argument \`$1'" >&2 + exit 1 + ;; +esac + +if [ "$REMOVE_USER_AND_GROUP" = "true" ]; then + if id "predictionio" > /dev/null 2>&1 ; then + userdel "predictionio" + fi + + if getent group "predictionio" > /dev/null 2>&1 ; then + groupdel "predictionio" + fi +fi + http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/assembly/src/rpm/scriptlets/preinst ---------------------------------------------------------------------- diff --git a/assembly/src/rpm/scriptlets/preinst b/assembly/src/rpm/scriptlets/preinst new file mode 100644 index 0000000..cb66b46 --- /dev/null +++ b/assembly/src/rpm/scriptlets/preinst @@ -0,0 +1,35 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +if ! getent group "predictionio" > /dev/null 2>&1 ; then + echo -n "Creating predictionio group..." + groupadd -r "predictionio" + echo " OK" +fi + +if ! id predictionio > /dev/null 2>&1 ; then + echo -n "Creating predictionio user..." + useradd --system \ + -M \ + --gid "predictionio" \ + --shell /sbin/nologin \ + --comment "fess user" \ + -d "/usr/share/predictionio" \ + "predictionio" + echo " OK" +fi + http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/bin/compute-classpath.sh ---------------------------------------------------------------------- diff --git a/bin/compute-classpath.sh b/bin/compute-classpath.sh index 69cbb25..032d111 100755 --- a/bin/compute-classpath.sh +++ b/bin/compute-classpath.sh @@ -25,7 +25,7 @@ FWDIR="$(cd `dirname $0`/..; pwd)" . ${FWDIR}/bin/load-pio-env.sh # Build up classpath -CLASSPATH="${FWDIR}/conf" +CLASSPATH="${PIO_CONF_DIR}" CLASSPATH="$CLASSPATH:${FWDIR}/plugins/*:${FWDIR}/lib/spark/*" http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/bin/pio ---------------------------------------------------------------------- diff --git a/bin/pio b/bin/pio index fca48f6..01e73ea 100755 --- a/bin/pio +++ b/bin/pio @@ -32,28 +32,38 @@ search() { echo ${i} } -PIO_FILE=$(readlink -f $0 2>/dev/null) -if [ $? = 0 ] ; then - export PIO_HOME="$(cd $(dirname $PIO_FILE)/..; pwd)" -else - CURRENT_DIR=`pwd` - TARGET_FILE="$0" - cd "$(dirname "$TARGET_FILE")" - TARGET_FILE=$(basename "$TARGET_FILE") - - while [ -L "$TARGET_FILE" ] - do - TARGET_FILE=$(readlink "$TARGET_FILE") +if [ -z $PIO_HOME ] ; then + PIO_FILE=$(readlink -f $0 2>/dev/null) + if [ $? = 0 ] ; then + export PIO_HOME="$(cd $(dirname $PIO_FILE)/..; pwd)" + else + CURRENT_DIR=`pwd` + TARGET_FILE="$0" cd "$(dirname "$TARGET_FILE")" TARGET_FILE=$(basename "$TARGET_FILE") - done - export PIO_HOME="$(cd $(dirname "$TARGET_FILE")/..; pwd -P)" - cd "$CURRENT_DIR" -fi + while [ -L "$TARGET_FILE" ] + do + TARGET_FILE=$(readlink "$TARGET_FILE") + cd "$(dirname "$TARGET_FILE")" + TARGET_FILE=$(basename "$TARGET_FILE") + done + export PIO_HOME="$(cd $(dirname "$TARGET_FILE")/..; pwd -P)" + cd "$CURRENT_DIR" + fi +fi -export PIO_CONF_DIR="${PIO_HOME}/conf" +if [ -z $PIO_CONF_DIR ] ; then + export PIO_CONF_DIR="${PIO_HOME}/conf" + if [ ! -d $PIO_CONF_DIR ] ; then + export PIO_CONF_DIR="/etc/predictionio" + if [ ! -d $PIO_CONF_DIR ] ; then + echo "PIO_CONF_DIR is not found." + exit 1 + fi + fi +fi FIRST_SEP=$(search "--" $@) http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/bin/pio-class ---------------------------------------------------------------------- diff --git a/bin/pio-class b/bin/pio-class index 002da5c..8bde407 100755 --- a/bin/pio-class +++ b/bin/pio-class @@ -40,8 +40,8 @@ if [ -z "$1" ]; then fi # Warn if log4j.properties is not present -if [ ! -f "$FWDIR/conf/log4j.properties" ]; then - echo -e "\033[0;35mWarning: log4j.properties is missing from $FWDIR/conf\033[0m" +if [ ! -f "$PIO_CONF_DIR/log4j.properties" ]; then + echo -e "\033[0;35mWarning: log4j.properties is missing from $PIO_CONF_DIR\033[0m" fi # Make sure the Apache Spark version meets the prerequisite if it is a binary @@ -84,6 +84,19 @@ else CLASSPATH=${classpath_output} fi +if [ -z $PIO_LOG_DIR ] ; then + PIO_LOG_DIR=$PIO_HOME/log + touch $PIO_LOG_DIR/pio.log > /dev/null 2>&1 + if [ $? != 0 ] ; then + PIO_LOG_DIR=/var/log/predictionio + touch $PIO_LOG_DIR/pio.log > /dev/null 2>&1 + if [ $? != 0 ] ; then + PIO_LOG_DIR=$HOME + fi + fi +fi + export CLASSPATH +export JAVA_OPTS="$JAVA_OPTS -Dpio.log.dir=$PIO_LOG_DIR" exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/bin/pio-daemon ---------------------------------------------------------------------- diff --git a/bin/pio-daemon b/bin/pio-daemon index 62704e1..630b087 100755 --- a/bin/pio-daemon +++ b/bin/pio-daemon @@ -22,7 +22,7 @@ search() { local needle=$1; shift for str in $@; do - if [ "$str" = "$needle" ]; then + if [ "${str}" = "$needle" ]; then echo ${i} return else @@ -32,9 +32,38 @@ search() { echo ${i} } -export PIO_HOME="$(cd `dirname $0`/..; pwd)" +if [ -z $PIO_HOME ] ; then + PIO_FILE=$(readlink -f $0 2>/dev/null) + if [ $? = 0 ] ; then + export PIO_HOME="$(cd $(dirname $PIO_FILE)/..; pwd)" + else + CURRENT_DIR=`pwd` + TARGET_FILE="$0" + cd "$(dirname "$TARGET_FILE")" + TARGET_FILE=$(basename "$TARGET_FILE") -export PIO_CONF_DIR="$PIO_HOME/conf" + while [ -L "$TARGET_FILE" ] + do + TARGET_FILE=$(readlink "$TARGET_FILE") + cd "$(dirname "$TARGET_FILE")" + TARGET_FILE=$(basename "$TARGET_FILE") + done + + export PIO_HOME="$(cd $(dirname "$TARGET_FILE")/..; pwd -P)" + cd "$CURRENT_DIR" + fi +fi + +if [ -z $PIO_CONF_DIR ] ; then + export PIO_CONF_DIR="${PIO_HOME}/conf" + if [ ! -d $PIO_CONF_DIR ] ; then + export PIO_CONF_DIR="/etc/predictionio" + if [ ! -d $PIO_CONF_DIR ] ; then + echo "PIO_CONF_DIR is not found." + exit 1 + fi + fi +fi PIDFILE=$1 http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/bin/pio-shell ---------------------------------------------------------------------- diff --git a/bin/pio-shell b/bin/pio-shell index bb57ddb..d47008f 100755 --- a/bin/pio-shell +++ b/bin/pio-shell @@ -17,7 +17,39 @@ # limitations under the License. # -export PIO_HOME="$(cd `dirname $0`/..; pwd)" +if [ -z $PIO_HOME ] ; then + PIO_FILE=$(readlink -f $0 2>/dev/null) + if [ $? = 0 ] ; then + export PIO_HOME="$(cd $(dirname $PIO_FILE)/..; pwd)" + else + CURRENT_DIR=`pwd` + TARGET_FILE="$0" + cd "$(dirname "$TARGET_FILE")" + TARGET_FILE=$(basename "$TARGET_FILE") + + while [ -L "$TARGET_FILE" ] + do + TARGET_FILE=$(readlink "$TARGET_FILE") + cd "$(dirname "$TARGET_FILE")" + TARGET_FILE=$(basename "$TARGET_FILE") + done + + export PIO_HOME="$(cd $(dirname "$TARGET_FILE")/..; pwd -P)" + cd "$CURRENT_DIR" + fi +fi + +if [ -z $PIO_CONF_DIR ] ; then + export PIO_CONF_DIR="${PIO_HOME}/conf" + if [ ! -d $PIO_CONF_DIR ] ; then + export PIO_CONF_DIR="/etc/predictionio" + if [ ! -d $PIO_CONF_DIR ] ; then + echo "PIO_CONF_DIR is not found." + exit 1 + fi + fi +fi + . ${PIO_HOME}/bin/load-pio-env.sh if [[ "$1" == "--with-spark" ]] http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/bin/pio-start-all ---------------------------------------------------------------------- diff --git a/bin/pio-start-all b/bin/pio-start-all index a78b0d2..15ac1a6 100755 --- a/bin/pio-start-all +++ b/bin/pio-start-all @@ -25,63 +25,78 @@ export PIO_HOME="$(cd `dirname $0`/..; pwd)" . ${PIO_HOME}/bin/load-pio-env.sh +SOURCE_TYPE=$PIO_STORAGE_REPOSITORIES_METADATA_SOURCE +SOURCE_TYPE=$SOURCE_TYPE$PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE +SOURCE_TYPE=$SOURCE_TYPE$PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE + # Elasticsearch -echo "Starting Elasticsearch..." -if [ -n "$PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME" ]; then - if [ -n "$JAVA_HOME" ]; then - JPS=`$JAVA_HOME/bin/jps` - else - JPS=`jps` +if [ `echo $SOURCE_TYPE | grep -i elasticsearch | wc -l` != 0 ] ; then + echo "Starting Elasticsearch..." + if [ -n "$PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME" ]; then + ELASTICSEARCH_HOME=$PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME + elif [ -n "$PIO_STORAGE_SOURCES_ELASTICSEARCH5_HOME" ]; then + ELASTICSEARCH_HOME=$PIO_STORAGE_SOURCES_ELASTICSEARCH5_HOME fi - if [[ ${JPS} =~ "Elasticsearch" ]]; then - echo -e "\033[0;31mElasticsearch is already running. Please use pio-stop-all to try stopping it first.\033[0m" - echo -e "\033[0;31mNote: If you started Elasticsearch manually, you will need to kill it manually.\033[0m" - echo -e "\033[0;31mAborting...\033[0m" - exit 1 + if [ -n "$ELASTICSEARCH_HOME" ]; then + if [ -n "$JAVA_HOME" ]; then + JPS=`$JAVA_HOME/bin/jps` + else + JPS=`jps` + fi + if [[ ${JPS} =~ "Elasticsearch" ]]; then + echo -e "\033[0;31mElasticsearch is already running. Please use pio-stop-all to try stopping it first.\033[0m" + echo -e "\033[0;31mNote: If you started Elasticsearch manually, you will need to kill it manually.\033[0m" + echo -e "\033[0;31mAborting...\033[0m" + exit 1 + else + $ELASTICSEARCH_HOME/bin/elasticsearch -d -p $PIO_HOME/es.pid + fi else - $PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME/bin/elasticsearch -d -p $PIO_HOME/es.pid + echo -e "\033[0;31mPlease set PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME in conf/pio-env.sh, or in your environment.\033[0m" + echo -e "\033[0;31mCannot start Elasticsearch. Aborting...\033[0m" + exit 1 fi -else - echo -e "\033[0;31mPlease set PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME in conf/pio-env.sh, or in your environment.\033[0m" - echo -e "\033[0;31mCannot start Elasticsearch. Aborting...\033[0m" - exit 1 fi # HBase -echo "Starting HBase..." -if [ -n "$PIO_STORAGE_SOURCES_HBASE_HOME" ]; then - $PIO_STORAGE_SOURCES_HBASE_HOME/bin/start-hbase.sh -else - echo -e "\033[0;31mPlease set PIO_STORAGE_SOURCES_HBASE_HOME in conf/pio-env.sh, or in your environment.\033[0m" - # Kill everything for cleanliness - echo -e "\033[0;31mCannot start HBase. Aborting...\033[0m" - sleep 3 - ${PIO_HOME}/bin/pio-stop-all - exit 1 +if [ `echo $SOURCE_TYPE | grep -i hbase | wc -l` != 0 ] ; then + echo "Starting HBase..." + if [ -n "$PIO_STORAGE_SOURCES_HBASE_HOME" ]; then + $PIO_STORAGE_SOURCES_HBASE_HOME/bin/start-hbase.sh + else + echo -e "\033[0;31mPlease set PIO_STORAGE_SOURCES_HBASE_HOME in conf/pio-env.sh, or in your environment.\033[0m" + # Kill everything for cleanliness + echo -e "\033[0;31mCannot start HBase. Aborting...\033[0m" + sleep 3 + ${PIO_HOME}/bin/pio-stop-all + exit 1 + fi fi #PGSQL -pgsqlStatus="$(ps auxwww | grep postgres | wc -l)" -if [[ "$pgsqlStatus" < 5 ]]; then - # Detect OS - OS=`uname` - if [[ "$OS" = "Darwin" ]]; then - pg_cmd=`which pg_ctl` - if [[ "$pg_cmd" != "" ]]; then - pg_ctl -D /usr/local/var/postgres -l /usr/local/var/postgres/server.log start +if [ `echo $SOURCE_TYPE | grep -i pgsql | wc -l` != 0 ] ; then + pgsqlStatus="$(ps auxwww | grep postgres | wc -l)" + if [[ "$pgsqlStatus" < 5 ]]; then + # Detect OS + OS=`uname` + if [[ "$OS" = "Darwin" ]]; then + pg_cmd=`which pg_ctl` + if [[ "$pg_cmd" != "" ]]; then + pg_ctl -D /usr/local/var/postgres -l /usr/local/var/postgres/server.log start + fi + elif [[ "$OS" = "Linux" ]]; then + sudo service postgresql start + else + echo -e "\033[1;31mYour OS $OS is not yet supported for automatic postgresql startup:(\033[0m" + echo -e "\033[1;31mPlease do a manual startup!\033[0m" + ${PIO_HOME}/bin/pio-stop-all + exit 1 fi - elif [[ "$OS" = "Linux" ]]; then - sudo service postgresql start - else - echo -e "\033[1;31mYour OS $OS is not yet supported for automatic postgresql startup:(\033[0m" - echo -e "\033[1;31mPlease do a manual startup!\033[0m" - ${PIO_HOME}/bin/pio-stop-all - exit 1 fi fi # PredictionIO Event Server -echo "Waiting 10 seconds for HBase to fully initialize..." +echo "Waiting 10 seconds for Storage Repositories to fully initialize..." sleep 10 echo "Starting PredictionIO Event Server..." ${PIO_HOME}/bin/pio-daemon ${PIO_HOME}/eventserver.pid eventserver --ip 0.0.0.0 http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/bin/pio-stop-all ---------------------------------------------------------------------- diff --git a/bin/pio-stop-all b/bin/pio-stop-all index 4aab5a3..dabad5d 100755 --- a/bin/pio-stop-all +++ b/bin/pio-stop-all @@ -25,6 +25,10 @@ export PIO_HOME="$(cd `dirname $0`/..; pwd)" . ${PIO_HOME}/bin/load-pio-env.sh +SOURCE_TYPE=$PIO_STORAGE_REPOSITORIES_METADATA_SOURCE +SOURCE_TYPE=$SOURCE_TYPE$PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE +SOURCE_TYPE=$SOURCE_TYPE$PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE + # PredictionIO Event Server echo "Stopping PredictionIO Event Server..." PIDFILE=${PIO_HOME}/eventserver.pid @@ -34,30 +38,38 @@ if [ -e ${PIDFILE} ]; then fi # HBase -echo "Stopping HBase..." -if [ -n "$PIO_STORAGE_SOURCES_HBASE_HOME" ]; then - $PIO_STORAGE_SOURCES_HBASE_HOME/bin/stop-hbase.sh +if [ `echo $SOURCE_TYPE | grep -i hbase | wc -l` != 0 ] ; then + echo "Stopping HBase..." + if [ -n "$PIO_STORAGE_SOURCES_HBASE_HOME" ]; then + $PIO_STORAGE_SOURCES_HBASE_HOME/bin/stop-hbase.sh + fi fi # Elasticsearch -echo "Stopping Elasticsearch..." -PIDFILE=${PIO_HOME}/es.pid -if [ -e ${PIDFILE} ]; then - cat ${PIDFILE} | xargs kill - rm ${PIDFILE} +if [ `echo $SOURCE_TYPE | grep -i elasticsearch | wc -l` != 0 ] ; then + echo "Stopping Elasticsearch..." + PIDFILE=${PIO_HOME}/es.pid + if [ -e ${PIDFILE} ]; then + cat ${PIDFILE} | xargs kill + rm ${PIDFILE} + fi fi #PGSQL -OS=`uname` -if [[ "$OS" = "Darwin" ]]; then - pg_cmd=`which pg_ctl` - if [[ "$pg_cmd" != "" ]]; then - pg_ctl -D /usr/local/var/postgres stop -s -m fast +if [ `echo $SOURCE_TYPE | grep -i pgsql | wc -l` != 0 ] ; then + if [ -n "$PIO_STORAGE_SOURCES_PGSQL_TYPE" ]; then + OS=`uname` + if [[ "$OS" = "Darwin" ]]; then + pg_cmd=`which pg_ctl` + if [[ "$pg_cmd" != "" ]]; then + pg_ctl -D /usr/local/var/postgres stop -s -m fast + fi + elif [[ "$OS" = "Linux" ]]; then + sudo service postgresql stop + else + echo -e "\033[1;31mYour OS $OS is not yet supported for automatic postgresql startup:(\033[0m" + echo -e "\033[1;31mPlease do a manual shutdown!\033[0m" + exit 1 + fi fi -elif [[ "$OS" = "Linux" ]]; then - sudo service postgresql stop -else - echo -e "\033[1;31mYour OS $OS is not yet supported for automatic postgresql startup:(\033[0m" - echo -e "\033[1;31mPlease do a manual shutdown!\033[0m" - exit 1 fi http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/build.sbt ---------------------------------------------------------------------- diff --git a/build.sbt b/build.sbt index 6c4cb7c..06030cb 100644 --- a/build.sbt +++ b/build.sbt @@ -185,6 +185,9 @@ val storage = (project in file("storage")) .aggregate(storageSubprojects map Project.projectToRef: _*) .disablePlugins(sbtassembly.AssemblyPlugin) +val assembly = (project in file("assembly")). + settings(commonSettings: _*) + val root = (project in file(".")). settings(commonSettings: _*). enablePlugins(ScalaUnidocPlugin). http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/conf/log4j.properties ---------------------------------------------------------------------- diff --git a/conf/log4j.properties b/conf/log4j.properties index a9e9d8e..77a7c60 100644 --- a/conf/log4j.properties +++ b/conf/log4j.properties @@ -25,7 +25,7 @@ log4j.appender.console.layout.ConversionPattern=[%p] [%c{1}] %m%n%throwable{0} # file appender log4j.appender.file=org.apache.log4j.FileAppender -log4j.appender.file.File=./pio.log +log4j.appender.file.File=${pio.log.dir}/pio.log log4j.appender.file.layout=org.apache.log4j.EnhancedPatternLayout log4j.appender.file.layout.ConversionPattern=%d %-5p %c [%t] - %m%n http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/make-distribution.sh ---------------------------------------------------------------------- diff --git a/make-distribution.sh b/make-distribution.sh index e92178f..03c15e8 100755 --- a/make-distribution.sh +++ b/make-distribution.sh @@ -24,6 +24,9 @@ usage () echo "Usage: $0 [-h|--help]" echo "" echo " -h|--help Show usage" + echo "" + echo " --with-rpm Build distribution for RPM package" + echo " --with-deb Build distribution for DEB package" } JAVA_PROPS=() @@ -40,6 +43,14 @@ case $i in JAVA_PROPS+=("$i") shift ;; + --with-rpm) + RPM_BUILD=true + shift + ;; + --with-deb) + DEB_BUILD=true + shift + ;; *) usage exit 1 @@ -59,6 +70,13 @@ set -x sbt/sbt "${JAVA_PROPS[@]}" clean sbt/sbt "${JAVA_PROPS[@]}" printBuildInfo sbt/sbt "${JAVA_PROPS[@]}" publishLocal assembly storage/assembly +sbt/sbt "${JAVA_PROPS[@]}" assembly/clean assembly/universal:packageBin assembly/universal:packageZipTarball +if [ x$RPM_BUILD = "xtrue" ] ; then + sbt/sbt "${JAVA_PROPS[@]}" assembly/rpm:packageBin +fi +if [ x$DEB_BUILD = "xtrue" ] ; then + sbt/sbt "${JAVA_PROPS[@]}" assembly/debian:packageBin +fi set +x cd ${FWDIR} @@ -75,8 +93,8 @@ cp ${FWDIR}/bin/* ${DISTDIR}/bin || : cp ${FWDIR}/conf/* ${DISTDIR}/conf cp ${FWDIR}/project/build.properties ${DISTDIR}/project cp ${FWDIR}/sbt/sbt ${DISTDIR}/sbt -cp ${FWDIR}/assembly/*assembly*jar ${DISTDIR}/lib -cp ${FWDIR}/assembly/spark/*jar ${DISTDIR}/lib/spark +cp ${FWDIR}/assembly/src/universal/lib/*assembly*jar ${DISTDIR}/lib +cp ${FWDIR}/assembly/src/universal/lib/spark/*jar ${DISTDIR}/lib/spark rm -f ${DISTDIR}/lib/*javadoc.jar rm -f ${DISTDIR}/lib/*sources.jar http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/project/plugins.sbt ---------------------------------------------------------------------- diff --git a/project/plugins.sbt b/project/plugins.sbt index 2f21e00..f13cafd 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -11,3 +11,5 @@ addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.8.0") resolvers += "sonatype-releases" at "https://oss.sonatype.org/content/repositories/releases/" addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.3.5") + +addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.2.0-M8") http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/storage/elasticsearch/build.sbt ---------------------------------------------------------------------- diff --git a/storage/elasticsearch/build.sbt b/storage/elasticsearch/build.sbt index 091b354..50292c5 100644 --- a/storage/elasticsearch/build.sbt +++ b/storage/elasticsearch/build.sbt @@ -44,4 +44,5 @@ assemblyShadeRules in assembly := Seq( test in assembly := {} assemblyOutputPath in assembly := baseDirectory.value.getAbsoluteFile.getParentFile.getParentFile / - "assembly" / "spark" / s"pio-data-elasticsearch-assembly-${version.value}.jar" + "assembly" / "src" / "universal" / "lib" / "spark" / + s"pio-data-elasticsearch-assembly-${version.value}.jar" http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/storage/elasticsearch1/build.sbt ---------------------------------------------------------------------- diff --git a/storage/elasticsearch1/build.sbt b/storage/elasticsearch1/build.sbt index 6ed4df0..5e72f91 100644 --- a/storage/elasticsearch1/build.sbt +++ b/storage/elasticsearch1/build.sbt @@ -34,4 +34,5 @@ assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeSca test in assembly := {} assemblyOutputPath in assembly := baseDirectory.value.getAbsoluteFile.getParentFile.getParentFile / - "assembly" / "spark" / s"pio-data-elasticsearch1-assembly-${version.value}.jar" + "assembly" / "src" / "universal" / "lib" / "spark" / + s"pio-data-elasticsearch1-assembly-${version.value}.jar" http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/storage/hbase/build.sbt ---------------------------------------------------------------------- diff --git a/storage/hbase/build.sbt b/storage/hbase/build.sbt index c33db2d..1e904fa 100644 --- a/storage/hbase/build.sbt +++ b/storage/hbase/build.sbt @@ -45,4 +45,5 @@ assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeSca test in assembly := {} assemblyOutputPath in assembly := baseDirectory.value.getAbsoluteFile.getParentFile.getParentFile / - "assembly" / "spark" / s"pio-data-hbase-assembly-${version.value}.jar" + "assembly" / "src" / "universal" / "lib" / "spark" / + s"pio-data-hbase-assembly-${version.value}.jar" http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/storage/hdfs/build.sbt ---------------------------------------------------------------------- diff --git a/storage/hdfs/build.sbt b/storage/hdfs/build.sbt index f8a2bd5..26a3122 100644 --- a/storage/hdfs/build.sbt +++ b/storage/hdfs/build.sbt @@ -35,4 +35,5 @@ assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeSca test in assembly := {} assemblyOutputPath in assembly := baseDirectory.value.getAbsoluteFile.getParentFile.getParentFile / - "assembly" / "spark" / ("pio-data-hdfs-assembly-" + version.value + ".jar") + "assembly" / "src" / "universal" / "lib" / "spark" / + ("pio-data-hdfs-assembly-" + version.value + ".jar") http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/storage/jdbc/build.sbt ---------------------------------------------------------------------- diff --git a/storage/jdbc/build.sbt b/storage/jdbc/build.sbt index 3c5026a..c5bcb12 100644 --- a/storage/jdbc/build.sbt +++ b/storage/jdbc/build.sbt @@ -36,4 +36,5 @@ assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeSca test in assembly := {} assemblyOutputPath in assembly := baseDirectory.value.getAbsoluteFile.getParentFile.getParentFile / - "assembly" / "spark" / s"pio-data-jdbc-assembly-${version.value}.jar" + "assembly" / "src" / "universal" / "lib" / "spark" / + s"pio-data-jdbc-assembly-${version.value}.jar" http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/storage/localfs/build.sbt ---------------------------------------------------------------------- diff --git a/storage/localfs/build.sbt b/storage/localfs/build.sbt index 3413e97..2306d24 100644 --- a/storage/localfs/build.sbt +++ b/storage/localfs/build.sbt @@ -33,4 +33,5 @@ assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeSca test in assembly := {} assemblyOutputPath in assembly := baseDirectory.value.getAbsoluteFile.getParentFile.getParentFile / - "assembly" / "spark" / s"pio-data-localfs-assembly-${version.value}.jar" + "assembly" / "src" / "universal" / "lib" / "spark" / + s"pio-data-localfs-assembly-${version.value}.jar" http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/tests/build_docker.sh ---------------------------------------------------------------------- diff --git a/tests/build_docker.sh b/tests/build_docker.sh index 27fb744..d467e97 100755 --- a/tests/build_docker.sh +++ b/tests/build_docker.sh @@ -38,8 +38,7 @@ fi -Dhadoop.version=$PIO_HADOOP_VERSION \ -Delasticsearch.version=$PIO_ELASTICSEARCH_VERSION sbt/sbt clean storage/clean -rm -rf assembly -mkdir assembly +rm -rf assembly/*.jar cp dist/lib/*.jar assembly/ rm -rf lib/spark mkdir -p lib/spark http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/tools/build.sbt ---------------------------------------------------------------------- diff --git a/tools/build.sbt b/tools/build.sbt index 23bb71a..86c4b6d 100644 --- a/tools/build.sbt +++ b/tools/build.sbt @@ -53,8 +53,9 @@ assemblyShadeRules in assembly := Seq( test in assembly := {} assemblyOutputPath in assembly := baseDirectory.value.getAbsoluteFile.getParentFile / - "assembly" / s"pio-assembly-${version.value}.jar" + "assembly" / "src" / "universal" / "lib" / s"pio-assembly-${version.value}.jar" -cleanFiles <+= baseDirectory { base => base.getParentFile / "assembly" } +cleanFiles <+= baseDirectory { base => base.getParentFile / + "assembly" / "src" / "universal" / "lib" } pomExtra := childrenPomExtra.value http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala ---------------------------------------------------------------------- diff --git a/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala b/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala index 662dbbf..70e3837 100644 --- a/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala +++ b/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala @@ -189,6 +189,7 @@ object Runner extends EitherLogging { } val verboseArg = if (verbose) Seq("--verbose") else Nil + val pioLogDir = Option(System.getProperty("pio.log.dir")).getOrElse(s"${pioHome}/log") val sparkSubmit = Seq( sparkSubmitCommand, @@ -198,6 +199,7 @@ object Runner extends EitherLogging { sparkSubmitFiles, sparkSubmitExtraClasspaths, sparkSubmitKryo, + Seq("--driver-java-options", s"-Dpio.log.dir=${pioLogDir}"), Seq(mainJar), detectFilePaths(fs, sa.scratchUri, classArgs), Seq("--env", pioEnvVars), http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d0d2570b/tools/src/main/scala/org/apache/predictionio/tools/commands/Engine.scala ---------------------------------------------------------------------- diff --git a/tools/src/main/scala/org/apache/predictionio/tools/commands/Engine.scala b/tools/src/main/scala/org/apache/predictionio/tools/commands/Engine.scala index 3924117..e49c3fc 100644 --- a/tools/src/main/scala/org/apache/predictionio/tools/commands/Engine.scala +++ b/tools/src/main/scala/org/apache/predictionio/tools/commands/Engine.scala @@ -294,6 +294,7 @@ object Engine extends EitherLogging { val jarPluginFiles = jarFilesForSpark(pioHome) jarPluginFiles foreach { f => info(s"Found JAR: ${f.getName}") } val allJarFiles = jarFiles.map(_.getCanonicalPath) ++ jarPluginFiles.map(_.getCanonicalPath) + val pioLogDir = Option(System.getProperty("pio.log.dir")).getOrElse(s"${pioHome}/log") val cmd = s"${getSparkHome(sparkArgs.sparkHome)}/bin/spark-submit --jars " + s"${allJarFiles.mkString(",")} " + @@ -302,6 +303,7 @@ object Engine extends EitherLogging { } else { "" }) + + "--driver-java-options -Dpio.log.dir=${pioLogDir} " + "--class " + s"${mainClass} ${sparkArgs.sparkPassThrough.mkString(" ")} " + coreAssembly(pioHome) + " " +
