http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/App.scala ---------------------------------------------------------------------- diff --git a/tools/src/main/scala/io/prediction/tools/console/App.scala b/tools/src/main/scala/io/prediction/tools/console/App.scala deleted file mode 100644 index 2056f9d..0000000 --- a/tools/src/main/scala/io/prediction/tools/console/App.scala +++ /dev/null @@ -1,537 +0,0 @@ -/** Copyright 2015 TappingStone, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.prediction.tools.console - -import io.prediction.data.storage - -import grizzled.slf4j.Logging - -case class AppArgs( - id: Option[Int] = None, - name: String = "", - channel: String = "", - dataDeleteChannel: Option[String] = None, - all: Boolean = false, - force: Boolean = false, - description: Option[String] = None) - -object App extends Logging { - def create(ca: ConsoleArgs): Int = { - val apps = storage.Storage.getMetaDataApps() - // get the client in the beginning so error exit right away if can't access client - val events = storage.Storage.getLEvents() - apps.getByName(ca.app.name) map { app => - error(s"App ${ca.app.name} already exists. Aborting.") - 1 - } getOrElse { - ca.app.id.map { id => - apps.get(id) map { app => - error( - s"App ID ${id} already exists and maps to the app '${app.name}'. " + - "Aborting.") - return 1 - } - } - val appid = apps.insert(storage.App( - id = ca.app.id.getOrElse(0), - name = ca.app.name, - description = ca.app.description)) - appid map { id => - val dbInit = events.init(id) - val r = if (dbInit) { - info(s"Initialized Event Store for this app ID: ${id}.") - val accessKeys = storage.Storage.getMetaDataAccessKeys - val accessKey = accessKeys.insert(storage.AccessKey( - key = ca.accessKey.accessKey, - appid = id, - events = Seq())) - accessKey map { k => - info("Created new app:") - info(s" Name: ${ca.app.name}") - info(s" ID: ${id}") - info(s"Access Key: ${k}") - 0 - } getOrElse { - error(s"Unable to create new access key.") - 1 - } - } else { - error(s"Unable to initialize Event Store for this app ID: ${id}.") - // revert back the meta data change - try { - apps.delete(id) - 0 - } catch { - case e: Exception => - error(s"Failed to revert back the App meta-data change.", e) - error(s"The app ${ca.app.name} CANNOT be used!") - error(s"Please run 'pio app delete ${ca.app.name}' " + - "to delete this app!") - 1 - } - } - events.close() - r - } getOrElse { - error(s"Unable to create new app.") - 1 - } - } - } - - def list(ca: ConsoleArgs): Int = { - val apps = storage.Storage.getMetaDataApps.getAll().sortBy(_.name) - val accessKeys = storage.Storage.getMetaDataAccessKeys - val title = "Name" - val ak = "Access Key" - info(f"$title%20s | ID | $ak%64s | Allowed Event(s)") - apps foreach { app => - val keys = accessKeys.getByAppid(app.id) - keys foreach { k => - val events = - if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)" - info(f"${app.name}%20s | ${app.id}%4d | ${k.key}%64s | $events%s") - } - } - info(s"Finished listing ${apps.size} app(s).") - 0 - } - - def show(ca: ConsoleArgs): Int = { - val apps = storage.Storage.getMetaDataApps - val accessKeys = storage.Storage.getMetaDataAccessKeys - val channels = storage.Storage.getMetaDataChannels - apps.getByName(ca.app.name) map { app => - info(s" App Name: ${app.name}") - info(s" App ID: ${app.id}") - info(s" Description: ${app.description.getOrElse("")}") - val keys = accessKeys.getByAppid(app.id) - - var firstKey = true - keys foreach { k => - val events = - if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)" - if (firstKey) { - info(f" Access Key: ${k.key}%s | ${events}%s") - firstKey = false - } else { - info(f" ${k.key}%s | ${events}%s") - } - } - - val chans = channels.getByAppid(app.id) - var firstChan = true - val titleName = "Channel Name" - val titleID = "Channel ID" - chans.foreach { ch => - if (firstChan) { - info(f" Channels: ${titleName}%16s | ${titleID}%10s ") - firstChan = false - } - info(f" ${ch.name}%16s | ${ch.id}%10s") - } - 0 - } getOrElse { - error(s"App ${ca.app.name} does not exist. Aborting.") - 1 - } - } - - def delete(ca: ConsoleArgs): Int = { - val apps = storage.Storage.getMetaDataApps - val accesskeys = storage.Storage.getMetaDataAccessKeys - val channels = storage.Storage.getMetaDataChannels - val events = storage.Storage.getLEvents() - val status = apps.getByName(ca.app.name) map { app => - info(s"The following app (including all channels) will be deleted. Are you sure?") - info(s" App Name: ${app.name}") - info(s" App ID: ${app.id}") - info(s" Description: ${app.description.getOrElse("")}") - val chans = channels.getByAppid(app.id) - var firstChan = true - val titleName = "Channel Name" - val titleID = "Channel ID" - chans.foreach { ch => - if (firstChan) { - info(f" Channels: ${titleName}%16s | ${titleID}%10s ") - firstChan = false - } - info(f" ${ch.name}%16s | ${ch.id}%10s") - } - - val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ") - choice match { - case "YES" => { - // delete channels - val delChannelStatus: Seq[Int] = chans.map { ch => - if (events.remove(app.id, Some(ch.id))) { - info(s"Removed Event Store of the channel ID: ${ch.id}") - try { - channels.delete(ch.id) - info(s"Deleted channel ${ch.name}") - 0 - } catch { - case e: Exception => - error(s"Error deleting channel ${ch.name}.", e) - 1 - } - } else { - error(s"Error removing Event Store of the channel ID: ${ch.id}.") - return 1 - } - } - - if (delChannelStatus.exists(_ != 0)) { - error("Error occurred while deleting channels. Aborting.") - return 1 - } - - try { - events.remove(app.id) - info(s"Removed Event Store for this app ID: ${app.id}") - } catch { - case e: Exception => - error(s"Error removing Event Store for this app. Aborting.", e) - return 1 - } - - accesskeys.getByAppid(app.id) foreach { key => - try { - accesskeys.delete(key.key) - info(s"Removed access key ${key.key}") - } catch { - case e: Exception => - error(s"Error removing access key ${key.key}. Aborting.", e) - return 1 - } - } - - try { - apps.delete(app.id) - info(s"Deleted app ${app.name}.") - } catch { - case e: Exception => - error(s"Error deleting app ${app.name}. Aborting.", e) - return 1 - } - - info("Done.") - 0 - } - case _ => - info("Aborted.") - 0 - } - } getOrElse { - error(s"App ${ca.app.name} does not exist. Aborting.") - 1 - } - events.close() - status - } - - def dataDelete(ca: ConsoleArgs): Int = { - if (ca.app.all) { - dataDeleteAll(ca) - } else { - dataDeleteOne(ca) - } - } - - def dataDeleteOne(ca: ConsoleArgs): Int = { - val apps = storage.Storage.getMetaDataApps - val channels = storage.Storage.getMetaDataChannels - apps.getByName(ca.app.name) map { app => - - val channelId = ca.app.dataDeleteChannel.map { ch => - val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap - if (!channelMap.contains(ch)) { - error(s"Unable to delete data for channel.") - error(s"Channel ${ch} doesn't exist.") - return 1 - } - - channelMap(ch) - } - - if (channelId.isDefined) { - info(s"Data of the following channel will be deleted. Are you sure?") - info(s"Channel Name: ${ca.app.dataDeleteChannel.get}") - info(s" Channel ID: ${channelId.get}") - info(s" App Name: ${app.name}") - info(s" App ID: ${app.id}") - info(s" Description: ${app.description}") - } else { - info(s"Data of the following app (default channel only) will be deleted. Are you sure?") - info(s" App Name: ${app.name}") - info(s" App ID: ${app.id}") - info(s" Description: ${app.description}") - } - - val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ") - - choice match { - case "YES" => { - val events = storage.Storage.getLEvents() - // remove table - val r1 = if (events.remove(app.id, channelId)) { - if (channelId.isDefined) { - info(s"Removed Event Store for this channel ID: ${channelId.get}") - } else { - info(s"Removed Event Store for this app ID: ${app.id}") - } - 0 - } else { - if (channelId.isDefined) { - error(s"Error removing Event Store for this channel.") - } else { - error(s"Error removing Event Store for this app.") - } - 1 - } - // re-create table - val dbInit = events.init(app.id, channelId) - val r2 = if (dbInit) { - if (channelId.isDefined) { - info(s"Initialized Event Store for this channel ID: ${channelId.get}.") - } else { - info(s"Initialized Event Store for this app ID: ${app.id}.") - } - 0 - } else { - if (channelId.isDefined) { - error(s"Unable to initialize Event Store for this channel ID:" + - s" ${channelId.get}.") - } else { - error(s"Unable to initialize Event Store for this appId:" + - s" ${app.id}.") - } - 1 - } - events.close() - info("Done.") - r1 + r2 - } - case _ => - info("Aborted.") - 0 - } - } getOrElse { - error(s"App ${ca.app.name} does not exist. Aborting.") - 1 - } - } - - def dataDeleteAll(ca: ConsoleArgs): Int = { - val apps = storage.Storage.getMetaDataApps - val channels = storage.Storage.getMetaDataChannels - val events = storage.Storage.getLEvents() - val status = apps.getByName(ca.app.name) map { app => - info(s"All data of the app (including default and all channels) will be deleted." + - " Are you sure?") - info(s" App Name: ${app.name}") - info(s" App ID: ${app.id}") - info(s" Description: ${app.description}") - val chans = channels.getByAppid(app.id) - var firstChan = true - val titleName = "Channel Name" - val titleID = "Channel ID" - chans.foreach { ch => - if (firstChan) { - info(f" Channels: ${titleName}%16s | ${titleID}%10s ") - firstChan = false - } - info(f" ${ch.name}%16s | ${ch.id}%10s") - } - - val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ") - choice match { - case "YES" => { - // delete channels - val delChannelStatus: Seq[Int] = chans.map { ch => - val r1 = if (events.remove(app.id, Some(ch.id))) { - info(s"Removed Event Store of the channel ID: ${ch.id}") - 0 - } else { - error(s"Error removing Event Store of the channel ID: ${ch.id}.") - 1 - } - // re-create table - val dbInit = events.init(app.id, Some(ch.id)) - val r2 = if (dbInit) { - info(s"Initialized Event Store of the channel ID: ${ch.id}") - 0 - } else { - error(s"Unable to initialize Event Store of the channel ID: ${ch.id}.") - 1 - } - r1 + r2 - } - - if (delChannelStatus.filter(_ != 0).isEmpty) { - val r1 = if (events.remove(app.id)) { - info(s"Removed Event Store for this app ID: ${app.id}") - 0 - } else { - error(s"Error removing Event Store for this app.") - 1 - } - - val dbInit = events.init(app.id) - val r2 = if (dbInit) { - info(s"Initialized Event Store for this app ID: ${app.id}.") - 0 - } else { - error(s"Unable to initialize Event Store for this appId: ${app.id}.") - 1 - } - info("Done.") - r1 + r2 - } else 1 - } - case _ => - info("Aborted.") - 0 - } - } getOrElse { - error(s"App ${ca.app.name} does not exist. Aborting.") - 1 - } - events.close() - status - } - - def channelNew(ca: ConsoleArgs): Int = { - val apps = storage.Storage.getMetaDataApps - val channels = storage.Storage.getMetaDataChannels - val events = storage.Storage.getLEvents() - val newChannel = ca.app.channel - val status = apps.getByName(ca.app.name) map { app => - val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap - if (channelMap.contains(newChannel)) { - error(s"Unable to create new channel.") - error(s"Channel ${newChannel} already exists.") - 1 - } else if (!storage.Channel.isValidName(newChannel)) { - error(s"Unable to create new channel.") - error(s"The channel name ${newChannel} is invalid.") - error(s"${storage.Channel.nameConstraint}") - 1 - } else { - - val channelId = channels.insert(storage.Channel( - id = 0, // new id will be assigned - appid = app.id, - name = newChannel - )) - channelId.map { chanId => - info(s"Updated Channel meta-data.") - // initialize storage - val dbInit = events.init(app.id, Some(chanId)) - if (dbInit) { - info(s"Initialized Event Store for the channel: ${newChannel}.") - info(s"Created new channel:") - info(s" Channel Name: ${newChannel}") - info(s" Channel ID: ${chanId}") - info(s" App ID: ${app.id}") - 0 - } else { - error(s"Unable to create new channel.") - error(s"Failed to initalize Event Store.") - // reverted back the meta data - try { - channels.delete(chanId) - 0 - } catch { - case e: Exception => - error(s"Failed to revert back the Channel meta-data change.", e) - error(s"The channel ${newChannel} CANNOT be used!") - error(s"Please run 'pio app channel-delete ${app.name} ${newChannel}' " + - "to delete this channel!") - 1 - } - } - }.getOrElse { - error(s"Unable to create new channel.") - error(s"Failed to update Channel meta-data.") - 1 - } - } - } getOrElse { - error(s"App ${ca.app.name} does not exist. Aborting.") - 1 - } - events.close() - status - } - - def channelDelete(ca: ConsoleArgs): Int = { - val apps = storage.Storage.getMetaDataApps - val channels = storage.Storage.getMetaDataChannels - val events = storage.Storage.getLEvents() - val deleteChannel = ca.app.channel - val status = apps.getByName(ca.app.name) map { app => - val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap - if (!channelMap.contains(deleteChannel)) { - error(s"Unable to delete channel.") - error(s"Channel ${deleteChannel} doesn't exist.") - 1 - } else { - info(s"The following channel will be deleted. Are you sure?") - info(s" Channel Name: ${deleteChannel}") - info(s" Channel ID: ${channelMap(deleteChannel)}") - info(s" App Name: ${app.name}") - info(s" App ID: ${app.id}") - val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ") - choice match { - case "YES" => { - // NOTE: remove storage first before remove meta data (in case remove storage failed) - val dbRemoved = events.remove(app.id, Some(channelMap(deleteChannel))) - if (dbRemoved) { - info(s"Removed Event Store for this channel: ${deleteChannel}") - try { - channels.delete(channelMap(deleteChannel)) - info(s"Deleted channel: ${deleteChannel}.") - 0 - } catch { - case e: Exception => - error(s"Unable to delete channel.", e) - error(s"Failed to update Channel meta-data.") - error(s"The channel ${deleteChannel} CANNOT be used!") - error(s"Please run 'pio app channel-delete ${app.name} ${deleteChannel}' " + - "to delete this channel again!") - 1 - } - } else { - error(s"Unable to delete channel.") - error(s"Error removing Event Store for this channel.") - 1 - } - } - case _ => - info("Aborted.") - 0 - } - } - } getOrElse { - error(s"App ${ca.app.name} does not exist. Aborting.") - 1 - } - events.close() - status - } - -}
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/Console.scala ---------------------------------------------------------------------- diff --git a/tools/src/main/scala/io/prediction/tools/console/Console.scala b/tools/src/main/scala/io/prediction/tools/console/Console.scala deleted file mode 100644 index 81e2d7a..0000000 --- a/tools/src/main/scala/io/prediction/tools/console/Console.scala +++ /dev/null @@ -1,1277 +0,0 @@ -/** Copyright 2015 TappingStone, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.prediction.tools.console - -import java.io.File -import java.net.URI - -import grizzled.slf4j.Logging -import io.prediction.controller.Utils -import io.prediction.core.BuildInfo -import io.prediction.data.api.EventServer -import io.prediction.data.api.EventServerConfig -import io.prediction.data.storage -import io.prediction.data.storage.EngineManifest -import io.prediction.data.storage.EngineManifestSerializer -import io.prediction.data.storage.hbase.upgrade.Upgrade_0_8_3 -import io.prediction.tools.RegisterEngine -import io.prediction.tools.RunServer -import io.prediction.tools.RunWorkflow -import io.prediction.tools.admin.AdminServer -import io.prediction.tools.admin.AdminServerConfig -import io.prediction.tools.dashboard.Dashboard -import io.prediction.tools.dashboard.DashboardConfig -import io.prediction.workflow.JsonExtractorOption -import io.prediction.workflow.JsonExtractorOption.JsonExtractorOption -import io.prediction.workflow.WorkflowUtils -import org.apache.commons.io.FileUtils -import org.json4s._ -import org.json4s.native.JsonMethods._ -import org.json4s.native.Serialization.read -import org.json4s.native.Serialization.write -import semverfi._ - -import scala.collection.JavaConversions._ -import scala.io.Source -import scala.sys.process._ -import scala.util.Random -import scalaj.http.Http - -case class ConsoleArgs( - common: CommonArgs = CommonArgs(), - build: BuildArgs = BuildArgs(), - app: AppArgs = AppArgs(), - accessKey: AccessKeyArgs = AccessKeyArgs(), - deploy: DeployArgs = DeployArgs(), - eventServer: EventServerArgs = EventServerArgs(), - adminServer: AdminServerArgs = AdminServerArgs(), - dashboard: DashboardArgs = DashboardArgs(), - upgrade: UpgradeArgs = UpgradeArgs(), - template: TemplateArgs = TemplateArgs(), - export: ExportArgs = ExportArgs(), - imprt: ImportArgs = ImportArgs(), - commands: Seq[String] = Seq(), - metricsClass: Option[String] = None, - metricsParamsJsonPath: Option[String] = None, - paramsPath: String = "params", - engineInstanceId: Option[String] = None, - mainClass: Option[String] = None) - -case class CommonArgs( - batch: String = "", - sparkPassThrough: Seq[String] = Seq(), - driverPassThrough: Seq[String] = Seq(), - pioHome: Option[String] = None, - sparkHome: Option[String] = None, - engineId: Option[String] = None, - engineVersion: Option[String] = None, - engineFactory: Option[String] = None, - engineParamsKey: Option[String] = None, - evaluation: Option[String] = None, - engineParamsGenerator: Option[String] = None, - variantJson: File = new File("engine.json"), - manifestJson: File = new File("manifest.json"), - stopAfterRead: Boolean = false, - stopAfterPrepare: Boolean = false, - skipSanityCheck: Boolean = false, - verbose: Boolean = false, - verbosity: Int = 0, - sparkKryo: Boolean = false, - scratchUri: Option[URI] = None, - jsonExtractor: JsonExtractorOption = JsonExtractorOption.Both) - -case class BuildArgs( - sbt: Option[File] = None, - sbtExtra: Option[String] = None, - sbtAssemblyPackageDependency: Boolean = true, - sbtClean: Boolean = false, - uberJar: Boolean = false, - forceGeneratePIOSbt: Boolean = false) - -case class DeployArgs( - ip: String = "0.0.0.0", - port: Int = 8000, - logUrl: Option[String] = None, - logPrefix: Option[String] = None) - -case class EventServerArgs( - enabled: Boolean = false, - ip: String = "0.0.0.0", - port: Int = 7070, - stats: Boolean = false) - -case class AdminServerArgs( -ip: String = "127.0.0.1", -port: Int = 7071) - -case class DashboardArgs( - ip: String = "127.0.0.1", - port: Int = 9000) - -case class UpgradeArgs( - from: String = "0.0.0", - to: String = "0.0.0", - oldAppId: Int = 0, - newAppId: Int = 0 -) - -object Console extends Logging { - def main(args: Array[String]): Unit = { - val parser = new scopt.OptionParser[ConsoleArgs]("pio") { - override def showUsageOnError: Boolean = false - head("PredictionIO Command Line Interface Console", BuildInfo.version) - help("") - note("Note that it is possible to supply pass-through arguments at\n" + - "the end of the command by using a '--' separator, e.g.\n\n" + - "pio train --params-path params -- --master spark://mycluster:7077\n" + - "\nIn the example above, the '--master' argument will be passed to\n" + - "underlying spark-submit command. Please refer to the usage section\n" + - "for each command for more information.\n\n" + - "The following options are common to all commands:\n") - opt[String]("pio-home") action { (x, c) => - c.copy(common = c.common.copy(pioHome = Some(x))) - } text("Root directory of a PredictionIO installation.\n" + - " Specify this if automatic discovery fail.") - opt[String]("spark-home") action { (x, c) => - c.copy(common = c.common.copy(sparkHome = Some(x))) - } text("Root directory of an Apache Spark installation.\n" + - " If not specified, will try to use the SPARK_HOME\n" + - " environmental variable. If this fails as well, default to\n" + - " current directory.") - opt[String]("engine-id") abbr("ei") action { (x, c) => - c.copy(common = c.common.copy(engineId = Some(x))) - } text("Specify an engine ID. Usually used by distributed deployment.") - opt[String]("engine-version") abbr("ev") action { (x, c) => - c.copy(common = c.common.copy(engineVersion = Some(x))) - } text("Specify an engine version. Usually used by distributed " + - "deployment.") - opt[File]("variant") abbr("v") action { (x, c) => - c.copy(common = c.common.copy(variantJson = x)) - } - opt[File]("manifest") abbr("m") action { (x, c) => - c.copy(common = c.common.copy(manifestJson = x)) - } - opt[File]("sbt") action { (x, c) => - c.copy(build = c.build.copy(sbt = Some(x))) - } validate { x => - if (x.exists) { - success - } else { - failure(s"${x.getCanonicalPath} does not exist.") - } - } text("Path to sbt. Default: sbt") - opt[Unit]("verbose") action { (x, c) => - c.copy(common = c.common.copy(verbose = true)) - } - opt[Unit]("spark-kryo") abbr("sk") action { (x, c) => - c.copy(common = c.common.copy(sparkKryo = true)) - } - opt[String]("scratch-uri") action { (x, c) => - c.copy(common = c.common.copy(scratchUri = Some(new URI(x)))) - } - note("") - cmd("version"). - text("Displays the version of this command line console."). - action { (_, c) => - c.copy(commands = c.commands :+ "version") - } - note("") - cmd("help").action { (_, c) => - c.copy(commands = c.commands :+ "help") - } children( - arg[String]("<command>") optional() - action { (x, c) => - c.copy(commands = c.commands :+ x) - } - ) - note("") - cmd("build"). - text("Build an engine at the current directory."). - action { (_, c) => - c.copy(commands = c.commands :+ "build") - } children( - opt[String]("sbt-extra") action { (x, c) => - c.copy(build = c.build.copy(sbtExtra = Some(x))) - } text("Extra command to pass to SBT when it builds your engine."), - opt[Unit]("clean") action { (x, c) => - c.copy(build = c.build.copy(sbtClean = true)) - } text("Clean build."), - opt[Unit]("no-asm") action { (x, c) => - c.copy(build = c.build.copy(sbtAssemblyPackageDependency = false)) - } text("Skip building external dependencies assembly."), - opt[Unit]("uber-jar") action { (x, c) => - c.copy(build = c.build.copy(uberJar = true)) - }, - opt[Unit]("generate-pio-sbt") action { (x, c) => - c.copy(build = c.build.copy(forceGeneratePIOSbt = true)) - } - ) - note("") - cmd("unregister"). - text("Unregister an engine at the current directory."). - action { (_, c) => - c.copy(commands = c.commands :+ "unregister") - } - note("") - cmd("train"). - text("Kick off a training using an engine. This will produce an\n" + - "engine instance. This command will pass all pass-through\n" + - "arguments to its underlying spark-submit command."). - action { (_, c) => - c.copy(commands = c.commands :+ "train") - } children( - opt[String]("batch") action { (x, c) => - c.copy(common = c.common.copy(batch = x)) - } text("Batch label of the run."), - opt[String]("params-path") action { (x, c) => - c.copy(paramsPath = x) - } text("Directory to lookup parameters JSON files. Default: params"), - opt[String]("metrics-params") abbr("mp") action { (x, c) => - c.copy(metricsParamsJsonPath = Some(x)) - } text("Metrics parameters JSON file. Will try to use\n" + - " metrics.json in the base path."), - opt[Unit]("skip-sanity-check") abbr("ssc") action { (x, c) => - c.copy(common = c.common.copy(skipSanityCheck = true)) - }, - opt[Unit]("stop-after-read") abbr("sar") action { (x, c) => - c.copy(common = c.common.copy(stopAfterRead = true)) - }, - opt[Unit]("stop-after-prepare") abbr("sap") action { (x, c) => - c.copy(common = c.common.copy(stopAfterPrepare = true)) - }, - opt[Unit]("uber-jar") action { (x, c) => - c.copy(build = c.build.copy(uberJar = true)) - }, - opt[Int]("verbosity") action { (x, c) => - c.copy(common = c.common.copy(verbosity = x)) - }, - opt[String]("engine-factory") action { (x, c) => - c.copy(common = c.common.copy(engineFactory = Some(x))) - }, - opt[String]("engine-params-key") action { (x, c) => - c.copy(common = c.common.copy(engineParamsKey = Some(x))) - }, - opt[String]("json-extractor") action { (x, c) => - c.copy(common = c.common.copy(jsonExtractor = JsonExtractorOption.withName(x))) - } validate { x => - if (JsonExtractorOption.values.map(_.toString).contains(x)) { - success - } else { - val validOptions = JsonExtractorOption.values.mkString("|") - failure(s"$x is not a valid json-extractor option [$validOptions]") - } - } - ) - note("") - cmd("eval"). - text("Kick off an evaluation using an engine. This will produce an\n" + - "engine instance. This command will pass all pass-through\n" + - "arguments to its underlying spark-submit command."). - action { (_, c) => - c.copy(commands = c.commands :+ "eval") - } children( - arg[String]("<evaluation-class>") action { (x, c) => - c.copy(common = c.common.copy(evaluation = Some(x))) - }, - arg[String]("[<engine-parameters-generator-class>]") optional() action { (x, c) => - c.copy(common = c.common.copy(engineParamsGenerator = Some(x))) - } text("Optional engine parameters generator class, overriding the first argument"), - opt[String]("batch") action { (x, c) => - c.copy(common = c.common.copy(batch = x)) - } text("Batch label of the run."), - opt[String]("json-extractor") action { (x, c) => - c.copy(common = c.common.copy(jsonExtractor = JsonExtractorOption.withName(x))) - } validate { x => - if (JsonExtractorOption.values.map(_.toString).contains(x)) { - success - } else { - val validOptions = JsonExtractorOption.values.mkString("|") - failure(s"$x is not a valid json-extractor option [$validOptions]") - } - } - ) - note("") - cmd("deploy"). - text("Deploy an engine instance as a prediction server. This\n" + - "command will pass all pass-through arguments to its underlying\n" + - "spark-submit command."). - action { (_, c) => - c.copy(commands = c.commands :+ "deploy") - } children( - opt[String]("batch") action { (x, c) => - c.copy(common = c.common.copy(batch = x)) - } text("Batch label of the deployment."), - opt[String]("engine-instance-id") action { (x, c) => - c.copy(engineInstanceId = Some(x)) - } text("Engine instance ID."), - opt[String]("ip") action { (x, c) => - c.copy(deploy = c.deploy.copy(ip = x)) - }, - opt[Int]("port") action { (x, c) => - c.copy(deploy = c.deploy.copy(port = x)) - } text("Port to bind to. Default: 8000"), - opt[Unit]("feedback") action { (_, c) => - c.copy(eventServer = c.eventServer.copy(enabled = true)) - } text("Enable feedback loop to event server."), - opt[String]("event-server-ip") action { (x, c) => - c.copy(eventServer = c.eventServer.copy(ip = x)) - }, - opt[Int]("event-server-port") action { (x, c) => - c.copy(eventServer = c.eventServer.copy(port = x)) - } text("Event server port. Default: 7070"), - opt[Int]("admin-server-port") action { (x, c) => - c.copy(adminServer = c.adminServer.copy(port = x)) - } text("Admin server port. Default: 7071"), - opt[String]("admin-server-port") action { (x, c) => - c.copy(adminServer = c.adminServer.copy(ip = x)) - } text("Admin server IP. Default: localhost"), - opt[String]("accesskey") action { (x, c) => - c.copy(accessKey = c.accessKey.copy(accessKey = x)) - } text("Access key of the App where feedback data will be stored."), - opt[Unit]("uber-jar") action { (x, c) => - c.copy(build = c.build.copy(uberJar = true)) - }, - opt[String]("log-url") action { (x, c) => - c.copy(deploy = c.deploy.copy(logUrl = Some(x))) - }, - opt[String]("log-prefix") action { (x, c) => - c.copy(deploy = c.deploy.copy(logPrefix = Some(x))) - }, - opt[String]("json-extractor") action { (x, c) => - c.copy(common = c.common.copy(jsonExtractor = JsonExtractorOption.withName(x))) - } validate { x => - if (JsonExtractorOption.values.map(_.toString).contains(x)) { - success - } else { - val validOptions = JsonExtractorOption.values.mkString("|") - failure(s"$x is not a valid json-extractor option [$validOptions]") - } - } - ) - note("") - cmd("undeploy"). - text("Undeploy an engine instance as a prediction server."). - action { (_, c) => - c.copy(commands = c.commands :+ "undeploy") - } children( - opt[String]("ip") action { (x, c) => - c.copy(deploy = c.deploy.copy(ip = x)) - }, - opt[Int]("port") action { (x, c) => - c.copy(deploy = c.deploy.copy(port = x)) - } text("Port to unbind from. Default: 8000") - ) - note("") - cmd("dashboard"). - text("Launch a dashboard at the specific IP and port."). - action { (_, c) => - c.copy(commands = c.commands :+ "dashboard") - } children( - opt[String]("ip") action { (x, c) => - c.copy(dashboard = c.dashboard.copy(ip = x)) - }, - opt[Int]("port") action { (x, c) => - c.copy(dashboard = c.dashboard.copy(port = x)) - } text("Port to bind to. Default: 9000") - ) - note("") - cmd("eventserver"). - text("Launch an Event Server at the specific IP and port."). - action { (_, c) => - c.copy(commands = c.commands :+ "eventserver") - } children( - opt[String]("ip") action { (x, c) => - c.copy(eventServer = c.eventServer.copy(ip = x)) - }, - opt[Int]("port") action { (x, c) => - c.copy(eventServer = c.eventServer.copy(port = x)) - } text("Port to bind to. Default: 7070"), - opt[Unit]("stats") action { (x, c) => - c.copy(eventServer = c.eventServer.copy(stats = true)) - } - ) - cmd("adminserver"). - text("Launch an Admin Server at the specific IP and port."). - action { (_, c) => - c.copy(commands = c.commands :+ "adminserver") - } children( - opt[String]("ip") action { (x, c) => - c.copy(adminServer = c.adminServer.copy(ip = x)) - } text("IP to bind to. Default: localhost"), - opt[Int]("port") action { (x, c) => - c.copy(adminServer = c.adminServer.copy(port = x)) - } text("Port to bind to. Default: 7071") - ) - note("") - cmd("run"). - text("Launch a driver program. This command will pass all\n" + - "pass-through arguments to its underlying spark-submit command.\n" + - "In addition, it also supports a second level of pass-through\n" + - "arguments to the driver program, e.g.\n" + - "pio run -- --master spark://localhost:7077 -- --driver-arg foo"). - action { (_, c) => - c.copy(commands = c.commands :+ "run") - } children( - arg[String]("<main class>") action { (x, c) => - c.copy(mainClass = Some(x)) - } text("Main class name of the driver program."), - opt[String]("sbt-extra") action { (x, c) => - c.copy(build = c.build.copy(sbtExtra = Some(x))) - } text("Extra command to pass to SBT when it builds your engine."), - opt[Unit]("clean") action { (x, c) => - c.copy(build = c.build.copy(sbtClean = true)) - } text("Clean build."), - opt[Unit]("no-asm") action { (x, c) => - c.copy(build = c.build.copy(sbtAssemblyPackageDependency = false)) - } text("Skip building external dependencies assembly.") - ) - note("") - cmd("status"). - text("Displays status information about the PredictionIO system."). - action { (_, c) => - c.copy(commands = c.commands :+ "status") - } - note("") - cmd("upgrade"). - text("Upgrade tool"). - action { (_, c) => - c.copy(commands = c.commands :+ "upgrade") - } children( - arg[String]("<from version>") action { (x, c) => - c.copy(upgrade = c.upgrade.copy(from = x)) - } text("The version upgraded from."), - arg[String]("<to version>") action { (x, c) => - c.copy(upgrade = c.upgrade.copy(to = x)) - } text("The version upgraded to."), - arg[Int]("<old App ID>") action { (x, c) => - c.copy(upgrade = c.upgrade.copy(oldAppId = x)) - } text("Old App ID."), - arg[Int]("<new App ID>") action { (x, c) => - c.copy(upgrade = c.upgrade.copy(newAppId = x)) - } text("New App ID.") - ) - note("") - cmd("app"). - text("Manage apps.\n"). - action { (_, c) => - c.copy(commands = c.commands :+ "app") - } children( - cmd("new"). - text("Create a new app key to app ID mapping."). - action { (_, c) => - c.copy(commands = c.commands :+ "new") - } children( - opt[Int]("id") action { (x, c) => - c.copy(app = c.app.copy(id = Some(x))) - }, - opt[String]("description") action { (x, c) => - c.copy(app = c.app.copy(description = Some(x))) - }, - opt[String]("access-key") action { (x, c) => - c.copy(accessKey = c.accessKey.copy(accessKey = x)) - }, - arg[String]("<name>") action { (x, c) => - c.copy(app = c.app.copy(name = x)) - } - ), - note(""), - cmd("list"). - text("List all apps."). - action { (_, c) => - c.copy(commands = c.commands :+ "list") - }, - note(""), - cmd("show"). - text("Show details of an app."). - action { (_, c) => - c.copy(commands = c.commands :+ "show") - } children ( - arg[String]("<name>") action { (x, c) => - c.copy(app = c.app.copy(name = x)) - } text("Name of the app to be shown.") - ), - note(""), - cmd("delete"). - text("Delete an app."). - action { (_, c) => - c.copy(commands = c.commands :+ "delete") - } children( - arg[String]("<name>") action { (x, c) => - c.copy(app = c.app.copy(name = x)) - } text("Name of the app to be deleted."), - opt[Unit]("force") abbr("f") action { (x, c) => - c.copy(app = c.app.copy(force = true)) - } text("Delete an app without prompting for confirmation") - ), - note(""), - cmd("data-delete"). - text("Delete data of an app"). - action { (_, c) => - c.copy(commands = c.commands :+ "data-delete") - } children( - arg[String]("<name>") action { (x, c) => - c.copy(app = c.app.copy(name = x)) - } text("Name of the app whose data to be deleted."), - opt[String]("channel") action { (x, c) => - c.copy(app = c.app.copy(dataDeleteChannel = Some(x))) - } text("Name of channel whose data to be deleted."), - opt[Unit]("all") action { (x, c) => - c.copy(app = c.app.copy(all = true)) - } text("Delete data of all channels including default"), - opt[Unit]("force") abbr("f") action { (x, c) => - c.copy(app = c.app.copy(force = true)) - } text("Delete data of an app without prompting for confirmation") - ), - note(""), - cmd("channel-new"). - text("Create a new channel for the app."). - action { (_, c) => - c.copy(commands = c.commands :+ "channel-new") - } children ( - arg[String]("<name>") action { (x, c) => - c.copy(app = c.app.copy(name = x)) - } text("App name."), - arg[String]("<channel>") action { (x, c) => - c.copy(app = c.app.copy(channel = x)) - } text ("Channel name to be created.") - ), - note(""), - cmd("channel-delete"). - text("Delete a channel of the app."). - action { (_, c) => - c.copy(commands = c.commands :+ "channel-delete") - } children ( - arg[String]("<name>") action { (x, c) => - c.copy(app = c.app.copy(name = x)) - } text("App name."), - arg[String]("<channel>") action { (x, c) => - c.copy(app = c.app.copy(channel = x)) - } text ("Channel name to be deleted."), - opt[Unit]("force") abbr("f") action { (x, c) => - c.copy(app = c.app.copy(force = true)) - } text("Delete a channel of the app without prompting for confirmation") - ) - ) - note("") - cmd("accesskey"). - text("Manage app access keys.\n"). - action { (_, c) => - c.copy(commands = c.commands :+ "accesskey") - } children( - cmd("new"). - text("Add allowed event(s) to an access key."). - action { (_, c) => - c.copy(commands = c.commands :+ "new") - } children( - opt[String]("key") action { (x, c) => - c.copy(accessKey = c.accessKey.copy(accessKey = x)) - }, - arg[String]("<app name>") action { (x, c) => - c.copy(app = c.app.copy(name = x)) - }, - arg[String]("[<event1> <event2> ...]") unbounded() optional() - action { (x, c) => - c.copy(accessKey = c.accessKey.copy( - events = c.accessKey.events :+ x)) - } - ), - cmd("list"). - text("List all access keys of an app."). - action { (_, c) => - c.copy(commands = c.commands :+ "list") - } children( - arg[String]("<app name>") optional() action { (x, c) => - c.copy(app = c.app.copy(name = x)) - } text("App name.") - ), - note(""), - cmd("delete"). - text("Delete an access key."). - action { (_, c) => - c.copy(commands = c.commands :+ "delete") - } children( - arg[String]("<access key>") action { (x, c) => - c.copy(accessKey = c.accessKey.copy(accessKey = x)) - } text("The access key to be deleted.") - ) - ) - cmd("template"). - action { (_, c) => - c.copy(commands = c.commands :+ "template") - } children( - cmd("get"). - action { (_, c) => - c.copy(commands = c.commands :+ "get") - } children( - arg[String]("<template ID>") required() action { (x, c) => - c.copy(template = c.template.copy(repository = x)) - }, - arg[String]("<new engine directory>") action { (x, c) => - c.copy(template = c.template.copy(directory = x)) - }, - opt[String]("version") action { (x, c) => - c.copy(template = c.template.copy(version = Some(x))) - }, - opt[String]("name") action { (x, c) => - c.copy(template = c.template.copy(name = Some(x))) - }, - opt[String]("package") action { (x, c) => - c.copy(template = c.template.copy(packageName = Some(x))) - }, - opt[String]("email") action { (x, c) => - c.copy(template = c.template.copy(email = Some(x))) - } - ), - cmd("list"). - action { (_, c) => - c.copy(commands = c.commands :+ "list") - } - ) - cmd("export"). - action { (_, c) => - c.copy(commands = c.commands :+ "export") - } children( - opt[Int]("appid") required() action { (x, c) => - c.copy(export = c.export.copy(appId = x)) - }, - opt[String]("output") required() action { (x, c) => - c.copy(export = c.export.copy(outputPath = x)) - }, - opt[String]("format") action { (x, c) => - c.copy(export = c.export.copy(format = x)) - }, - opt[String]("channel") action { (x, c) => - c.copy(export = c.export.copy(channel = Some(x))) - } - ) - cmd("import"). - action { (_, c) => - c.copy(commands = c.commands :+ "import") - } children( - opt[Int]("appid") required() action { (x, c) => - c.copy(imprt = c.imprt.copy(appId = x)) - }, - opt[String]("input") required() action { (x, c) => - c.copy(imprt = c.imprt.copy(inputPath = x)) - }, - opt[String]("channel") action { (x, c) => - c.copy(imprt = c.imprt.copy(channel = Some(x))) - } - ) - } - - val separatorIndex = args.indexWhere(_ == "--") - val (consoleArgs, theRest) = - if (separatorIndex == -1) { - (args, Array[String]()) - } else { - args.splitAt(separatorIndex) - } - val allPassThroughArgs = theRest.drop(1) - val secondSepIdx = allPassThroughArgs.indexWhere(_ == "--") - val (sparkPassThroughArgs, driverPassThroughArgs) = - if (secondSepIdx == -1) { - (allPassThroughArgs, Array[String]()) - } else { - val t = allPassThroughArgs.splitAt(secondSepIdx) - (t._1, t._2.drop(1)) - } - - parser.parse(consoleArgs, ConsoleArgs()) map { pca => - val ca = pca.copy(common = pca.common.copy( - sparkPassThrough = sparkPassThroughArgs, - driverPassThrough = driverPassThroughArgs)) - WorkflowUtils.modifyLogging(ca.common.verbose) - val rv: Int = ca.commands match { - case Seq("") => - System.err.println(help()) - 1 - case Seq("version") => - version(ca) - 0 - case Seq("build") => - regenerateManifestJson(ca.common.manifestJson) - build(ca) - case Seq("unregister") => - unregister(ca) - 0 - case Seq("train") => - regenerateManifestJson(ca.common.manifestJson) - train(ca) - case Seq("eval") => - regenerateManifestJson(ca.common.manifestJson) - train(ca) - case Seq("deploy") => - deploy(ca) - case Seq("undeploy") => - undeploy(ca) - case Seq("dashboard") => - dashboard(ca) - 0 - case Seq("eventserver") => - eventserver(ca) - 0 - case Seq("adminserver") => - adminserver(ca) - 0 - case Seq("run") => - generateManifestJson(ca.common.manifestJson) - run(ca) - case Seq("status") => - status(ca) - case Seq("upgrade") => - upgrade(ca) - 0 - case Seq("app", "new") => - App.create(ca) - case Seq("app", "list") => - App.list(ca) - case Seq("app", "show") => - App.show(ca) - case Seq("app", "delete") => - App.delete(ca) - case Seq("app", "data-delete") => - App.dataDelete(ca) - case Seq("app", "channel-new") => - App.channelNew(ca) - case Seq("app", "channel-delete") => - App.channelDelete(ca) - case Seq("accesskey", "new") => - AccessKey.create(ca) - case Seq("accesskey", "list") => - AccessKey.list(ca) - case Seq("accesskey", "delete") => - AccessKey.delete(ca) - case Seq("template", "get") => - Template.get(ca) - case Seq("template", "list") => - Template.list(ca) - case Seq("export") => - Export.eventsToFile(ca) - case Seq("import") => - Import.fileToEvents(ca) - case _ => - System.err.println(help(ca.commands)) - 1 - } - sys.exit(rv) - } getOrElse { - val command = args.toSeq.filterNot(_.startsWith("--")).head - System.err.println(help(Seq(command))) - sys.exit(1) - } - } - - def help(commands: Seq[String] = Seq()): String = { - if (commands.isEmpty) { - mainHelp - } else { - val stripped = - (if (commands.head == "help") commands.drop(1) else commands). - mkString("-") - helpText.getOrElse(stripped, s"Help is unavailable for ${stripped}.") - } - } - - val mainHelp = txt.main().toString - - val helpText = Map( - "" -> mainHelp, - "status" -> txt.status().toString, - "upgrade" -> txt.upgrade().toString, - "version" -> txt.version().toString, - "template" -> txt.template().toString, - "build" -> txt.build().toString, - "train" -> txt.train().toString, - "deploy" -> txt.deploy().toString, - "eventserver" -> txt.eventserver().toString, - "adminserver" -> txt.adminserver().toString, - "app" -> txt.app().toString, - "accesskey" -> txt.accesskey().toString, - "import" -> txt.imprt().toString, - "export" -> txt.export().toString, - "run" -> txt.run().toString, - "eval" -> txt.eval().toString, - "dashboard" -> txt.dashboard().toString) - - def version(ca: ConsoleArgs): Unit = println(BuildInfo.version) - - def build(ca: ConsoleArgs): Int = { - Template.verifyTemplateMinVersion(new File("template.json")) - compile(ca) - info("Looking for an engine...") - val jarFiles = jarFilesForScala - if (jarFiles.isEmpty) { - error("No engine found. Your build might have failed. Aborting.") - return 1 - } - jarFiles foreach { f => info(s"Found ${f.getName}")} - RegisterEngine.registerEngine( - ca.common.manifestJson, - jarFiles, - false) - info("Your engine is ready for training.") - 0 - } - - def unregister(ca: ConsoleArgs): Unit = { - RegisterEngine.unregisterEngine(ca.common.manifestJson) - } - - def train(ca: ConsoleArgs): Int = { - Template.verifyTemplateMinVersion(new File("template.json")) - withRegisteredManifest( - ca.common.manifestJson, - ca.common.engineId, - ca.common.engineVersion) { em => - RunWorkflow.newRunWorkflow(ca, em) - } - } - - def deploy(ca: ConsoleArgs): Int = { - Template.verifyTemplateMinVersion(new File("template.json")) - withRegisteredManifest( - ca.common.manifestJson, - ca.common.engineId, - ca.common.engineVersion) { em => - val variantJson = parse(Source.fromFile(ca.common.variantJson).mkString) - val variantId = variantJson \ "id" match { - case JString(s) => s - case _ => - error("Unable to read engine variant ID from " + - s"${ca.common.variantJson.getCanonicalPath}. Aborting.") - return 1 - } - val engineInstances = storage.Storage.getMetaDataEngineInstances - val engineInstance = ca.engineInstanceId map { eid => - engineInstances.get(eid) - } getOrElse { - engineInstances.getLatestCompleted(em.id, em.version, variantId) - } - engineInstance map { r => - RunServer.newRunServer(ca, em, r.id) - } getOrElse { - ca.engineInstanceId map { eid => - error( - s"Invalid engine instance ID ${ca.engineInstanceId}. Aborting.") - } getOrElse { - error( - s"No valid engine instance found for engine ${em.id} " + - s"${em.version}.\nTry running 'train' before 'deploy'. Aborting.") - } - 1 - } - } - } - - def dashboard(ca: ConsoleArgs): Unit = { - info(s"Creating dashboard at ${ca.dashboard.ip}:${ca.dashboard.port}") - Dashboard.createDashboard(DashboardConfig( - ip = ca.dashboard.ip, - port = ca.dashboard.port)) - } - - def eventserver(ca: ConsoleArgs): Unit = { - info( - s"Creating Event Server at ${ca.eventServer.ip}:${ca.eventServer.port}") - EventServer.createEventServer(EventServerConfig( - ip = ca.eventServer.ip, - port = ca.eventServer.port, - stats = ca.eventServer.stats)) - } - - def adminserver(ca: ConsoleArgs): Unit = { - info( - s"Creating Admin Server at ${ca.adminServer.ip}:${ca.adminServer.port}") - AdminServer.createAdminServer(AdminServerConfig( - ip = ca.adminServer.ip, - port = ca.adminServer.port - )) - } - - def undeploy(ca: ConsoleArgs): Int = { - val serverUrl = s"http://${ca.deploy.ip}:${ca.deploy.port}" - info( - s"Undeploying any existing engine instance at ${serverUrl}") - try { - val code = Http(s"${serverUrl}/stop").asString.code - code match { - case 200 => 0 - case 404 => - error(s"Another process is using ${serverUrl}. Unable to undeploy.") - 1 - case _ => - error(s"Another process is using ${serverUrl}, or an existing " + - s"engine server is not responding properly (HTTP ${code}). " + - "Unable to undeploy.") - 1 - } - } catch { - case e: java.net.ConnectException => - warn(s"Nothing at ${serverUrl}") - 0 - case _: Throwable => - error("Another process might be occupying " + - s"${ca.deploy.ip}:${ca.deploy.port}. Unable to undeploy.") - 1 - } - } - - def compile(ca: ConsoleArgs): Unit = { - // only add pioVersion to sbt if project/pio.sbt exists - if (new File("project", "pio-build.sbt").exists || ca.build.forceGeneratePIOSbt) { - FileUtils.writeLines( - new File("pio.sbt"), - Seq( - "// Generated automatically by pio build.", - "// Changes in this file will be overridden.", - "", - "pioVersion := \"" + BuildInfo.version + "\"")) - } - implicit val formats = Utils.json4sDefaultFormats - try { - val engineFactory = - (parse(Source.fromFile("engine.json").mkString) \ "engineFactory"). - extract[String] - WorkflowUtils.checkUpgrade("build", engineFactory) - } catch { - case e: Throwable => WorkflowUtils.checkUpgrade("build") - } - val sbt = detectSbt(ca) - info(s"Using command '${sbt}' at the current working directory to build.") - info("If the path above is incorrect, this process will fail.") - val asm = - if (ca.build.sbtAssemblyPackageDependency) { - " assemblyPackageDependency" - } else { - "" - } - val clean = if (ca.build.sbtClean) " clean" else "" - val buildCmd = s"${sbt} ${ca.build.sbtExtra.getOrElse("")}${clean} " + - (if (ca.build.uberJar) "assembly" else s"package${asm}") - val core = new File(s"pio-assembly-${BuildInfo.version}.jar") - if (ca.build.uberJar) { - info(s"Uber JAR enabled. Putting ${core.getName} in lib.") - val dst = new File("lib") - dst.mkdir() - FileUtils.copyFileToDirectory( - coreAssembly(ca.common.pioHome.get), - dst, - true) - } else { - if (new File("engine.json").exists()) { - info(s"Uber JAR disabled. Making sure lib/${core.getName} is absent.") - new File("lib", core.getName).delete() - } else { - info("Uber JAR disabled, but current working directory does not look " + - s"like an engine project directory. Please delete lib/${core.getName} manually.") - } - } - info(s"Going to run: ${buildCmd}") - try { - val r = - if (ca.common.verbose) { - buildCmd.!(ProcessLogger(line => info(line), line => error(line))) - } else { - buildCmd.!(ProcessLogger( - line => outputSbtError(line), - line => outputSbtError(line))) - } - if (r != 0) { - error(s"Return code of previous step is ${r}. Aborting.") - sys.exit(1) - } - info("Build finished successfully.") - } catch { - case e: java.io.IOException => - error(s"${e.getMessage}") - sys.exit(1) - } - } - - private def outputSbtError(line: String): Unit = { - """\[.*error.*\]""".r findFirstIn line foreach { _ => error(line) } - } - - def run(ca: ConsoleArgs): Int = { - compile(ca) - - val extraFiles = WorkflowUtils.thirdPartyConfFiles - - val jarFiles = jarFilesForScala - jarFiles foreach { f => info(s"Found JAR: ${f.getName}") } - val allJarFiles = jarFiles.map(_.getCanonicalPath) - val cmd = s"${getSparkHome(ca.common.sparkHome)}/bin/spark-submit --jars " + - s"${allJarFiles.mkString(",")} " + - (if (extraFiles.size > 0) { - s"--files ${extraFiles.mkString(",")} " - } else { - "" - }) + - "--class " + - s"${ca.mainClass.get} ${ca.common.sparkPassThrough.mkString(" ")} " + - coreAssembly(ca.common.pioHome.get) + " " + - ca.common.driverPassThrough.mkString(" ") - val proc = Process( - cmd, - None, - "SPARK_YARN_USER_ENV" -> sys.env.filter(kv => kv._1.startsWith("PIO_")). - map(kv => s"${kv._1}=${kv._2}").mkString(",")) - info(s"Submission command: ${cmd}") - val r = proc.! - if (r != 0) { - error(s"Return code of previous step is ${r}. Aborting.") - return 1 - } - r - } - - def status(ca: ConsoleArgs): Int = { - info("Inspecting PredictionIO...") - ca.common.pioHome map { pioHome => - info(s"PredictionIO ${BuildInfo.version} is installed at $pioHome") - } getOrElse { - error("Unable to locate PredictionIO installation. Aborting.") - return 1 - } - info("Inspecting Apache Spark...") - val sparkHome = getSparkHome(ca.common.sparkHome) - if (new File(s"$sparkHome/bin/spark-submit").exists) { - info(s"Apache Spark is installed at $sparkHome") - val sparkMinVersion = "1.3.0" - val sparkReleaseFile = new File(s"$sparkHome/RELEASE") - if (sparkReleaseFile.exists) { - val sparkReleaseStrings = - Source.fromFile(sparkReleaseFile).mkString.split(' ') - if (sparkReleaseStrings.length < 2) { - warn(stripMarginAndNewlines( - s"""|Apache Spark version information cannot be found (RELEASE file - |is empty). This is a known issue for certain vendors (e.g. - |Cloudera). Please make sure you are using a version of at least - |$sparkMinVersion.""")) - } else { - val sparkReleaseVersion = sparkReleaseStrings(1) - val parsedMinVersion = Version.apply(sparkMinVersion) - val parsedCurrentVersion = Version.apply(sparkReleaseVersion) - if (parsedCurrentVersion >= parsedMinVersion) { - info(stripMarginAndNewlines( - s"""|Apache Spark $sparkReleaseVersion detected (meets minimum - |requirement of $sparkMinVersion)""")) - } else { - error(stripMarginAndNewlines( - s"""|Apache Spark $sparkReleaseVersion detected (does not meet - |minimum requirement. Aborting.""")) - } - } - } else { - warn(stripMarginAndNewlines( - s"""|Apache Spark version information cannot be found. If you are - |using a developmental tree, please make sure you are using a - |version of at least $sparkMinVersion.""")) - } - } else { - error("Unable to locate a proper Apache Spark installation. Aborting.") - return 1 - } - info("Inspecting storage backend connections...") - try { - storage.Storage.verifyAllDataObjects() - } catch { - case e: Throwable => - error("Unable to connect to all storage backends successfully. The " + - "following shows the error message from the storage backend.") - error(s"${e.getMessage} (${e.getClass.getName})", e) - error("Dumping configuration of initialized storage backend sources. " + - "Please make sure they are correct.") - storage.Storage.config.get("sources") map { src => - src foreach { case (s, p) => - error(s"Source Name: $s; Type: ${p.getOrElse("type", "(error)")}; " + - s"Configuration: ${p.getOrElse("config", "(error)")}") - } - } getOrElse { - error("No properly configured storage backend sources.") - } - return 1 - } - info("(sleeping 5 seconds for all messages to show up...)") - Thread.sleep(5000) - info("Your system is all ready to go.") - 0 - } - - def upgrade(ca: ConsoleArgs): Unit = { - (ca.upgrade.from, ca.upgrade.to) match { - case ("0.8.2", "0.8.3") => { - Upgrade_0_8_3.runMain(ca.upgrade.oldAppId, ca.upgrade.newAppId) - } - case _ => - println(s"Upgrade from version ${ca.upgrade.from} to ${ca.upgrade.to}" - + s" is not supported.") - } - } - - def coreAssembly(pioHome: String): File = { - val core = s"pio-assembly-${BuildInfo.version}.jar" - val coreDir = - if (new File(pioHome + File.separator + "RELEASE").exists) { - new File(pioHome + File.separator + "lib") - } else { - new File(pioHome + File.separator + "assembly") - } - val coreFile = new File(coreDir, core) - if (coreFile.exists) { - coreFile - } else { - error(s"PredictionIO Core Assembly (${coreFile.getCanonicalPath}) does " + - "not exist. Aborting.") - sys.exit(1) - } - } - - val manifestAutogenTag = "pio-autogen-manifest" - - def regenerateManifestJson(json: File): Unit = { - val cwd = sys.props("user.dir") - val ha = java.security.MessageDigest.getInstance("SHA-1"). - digest(cwd.getBytes).map("%02x".format(_)).mkString - if (json.exists) { - val em = readManifestJson(json) - if (em.description == Some(manifestAutogenTag) && ha != em.version) { - warn("This engine project directory contains an auto-generated " + - "manifest that has been copied/moved from another location. ") - warn("Regenerating the manifest to reflect the updated location. " + - "This will dissociate with all previous engine instances.") - generateManifestJson(json) - } else { - info(s"Using existing engine manifest JSON at ${json.getCanonicalPath}") - } - } else { - generateManifestJson(json) - } - } - - def generateManifestJson(json: File): Unit = { - val cwd = sys.props("user.dir") - implicit val formats = Utils.json4sDefaultFormats + - new EngineManifestSerializer - val rand = Random.alphanumeric.take(32).mkString - val ha = java.security.MessageDigest.getInstance("SHA-1"). - digest(cwd.getBytes).map("%02x".format(_)).mkString - val em = EngineManifest( - id = rand, - version = ha, - name = new File(cwd).getName, - description = Some(manifestAutogenTag), - files = Seq(), - engineFactory = "") - try { - FileUtils.writeStringToFile(json, write(em), "ISO-8859-1") - } catch { - case e: java.io.IOException => - error(s"Cannot generate ${json} automatically (${e.getMessage}). " + - "Aborting.") - sys.exit(1) - } - } - - def readManifestJson(json: File): EngineManifest = { - implicit val formats = Utils.json4sDefaultFormats + - new EngineManifestSerializer - try { - read[EngineManifest](Source.fromFile(json).mkString) - } catch { - case e: java.io.FileNotFoundException => - error(s"${json.getCanonicalPath} does not exist. Aborting.") - sys.exit(1) - case e: MappingException => - error(s"${json.getCanonicalPath} has invalid content: " + - e.getMessage) - sys.exit(1) - } - } - - def withRegisteredManifest( - json: File, - engineId: Option[String], - engineVersion: Option[String])( - op: EngineManifest => Int): Int = { - val ej = readManifestJson(json) - val id = engineId getOrElse ej.id - val version = engineVersion getOrElse ej.version - storage.Storage.getMetaDataEngineManifests.get(id, version) map { - op - } getOrElse { - error(s"Engine ${id} ${version} cannot be found in the system.") - error("Possible reasons:") - error("- the engine is not yet built by the 'build' command;") - error("- the meta data store is offline.") - 1 - } - } - - def jarFilesAt(path: File): Array[File] = recursiveListFiles(path) filter { - _.getName.toLowerCase.endsWith(".jar") - } - - def jarFilesForScala: Array[File] = { - val libFiles = jarFilesForScalaFilter(jarFilesAt(new File("lib"))) - val targetFiles = jarFilesForScalaFilter(jarFilesAt(new File("target" + - File.separator + s"scala-${scalaVersionNoPatch}"))) - // Use libFiles is target is empty. - if (targetFiles.size > 0) targetFiles else libFiles - } - - def jarFilesForScalaFilter(jars: Array[File]): Array[File] = - jars.filterNot { f => - f.getName.toLowerCase.endsWith("-javadoc.jar") || - f.getName.toLowerCase.endsWith("-sources.jar") - } - - def recursiveListFiles(f: File): Array[File] = { - Option(f.listFiles) map { these => - these ++ these.filter(_.isDirectory).flatMap(recursiveListFiles) - } getOrElse Array[File]() - } - - def getSparkHome(sparkHome: Option[String]): String = { - sparkHome getOrElse { - sys.env.getOrElse("SPARK_HOME", ".") - } - } - - def versionNoPatch(fullVersion: String): String = { - val v = """^(\d+\.\d+)""".r - val versionNoPatch = for { - v(np) <- v findFirstIn fullVersion - } yield np - versionNoPatch.getOrElse(fullVersion) - } - - def scalaVersionNoPatch: String = versionNoPatch(BuildInfo.scalaVersion) - - def detectSbt(ca: ConsoleArgs): String = { - ca.build.sbt map { - _.getCanonicalPath - } getOrElse { - val f = new File(Seq(ca.common.pioHome.get, "sbt", "sbt").mkString( - File.separator)) - if (f.exists) f.getCanonicalPath else "sbt" - } - } - - def stripMarginAndNewlines(string: String): String = - string.stripMargin.replaceAll("\n", " ") -} http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/Export.scala ---------------------------------------------------------------------- diff --git a/tools/src/main/scala/io/prediction/tools/console/Export.scala b/tools/src/main/scala/io/prediction/tools/console/Export.scala deleted file mode 100644 index 55540cf..0000000 --- a/tools/src/main/scala/io/prediction/tools/console/Export.scala +++ /dev/null @@ -1,42 +0,0 @@ -/** Copyright 2015 TappingStone, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.prediction.tools.console - -import io.prediction.tools.Runner - -case class ExportArgs( - appId: Int = 0, - channel: Option[String] = None, - outputPath: String = "", - format: String = "json") - -object Export { - def eventsToFile(ca: ConsoleArgs): Int = { - val channelArg = ca.export.channel - .map(ch => Seq("--channel", ch)).getOrElse(Nil) - Runner.runOnSpark( - "io.prediction.tools.export.EventsToFile", - Seq( - "--appid", - ca.export.appId.toString, - "--output", - ca.export.outputPath, - "--format", - ca.export.format) ++ channelArg, - ca, - Nil) - } -} http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/Import.scala ---------------------------------------------------------------------- diff --git a/tools/src/main/scala/io/prediction/tools/console/Import.scala b/tools/src/main/scala/io/prediction/tools/console/Import.scala deleted file mode 100644 index 18a6437..0000000 --- a/tools/src/main/scala/io/prediction/tools/console/Import.scala +++ /dev/null @@ -1,39 +0,0 @@ -/** Copyright 2015 TappingStone, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.prediction.tools.console - -import io.prediction.tools.Runner - -case class ImportArgs( - appId: Int = 0, - channel: Option[String] = None, - inputPath: String = "") - -object Import { - def fileToEvents(ca: ConsoleArgs): Int = { - val channelArg = ca.imprt.channel - .map(ch => Seq("--channel", ch)).getOrElse(Nil) - Runner.runOnSpark( - "io.prediction.tools.imprt.FileToEvents", - Seq( - "--appid", - ca.imprt.appId.toString, - "--input", - ca.imprt.inputPath) ++ channelArg, - ca, - Nil) - } -} http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/Template.scala ---------------------------------------------------------------------- diff --git a/tools/src/main/scala/io/prediction/tools/console/Template.scala b/tools/src/main/scala/io/prediction/tools/console/Template.scala deleted file mode 100644 index a0b4376..0000000 --- a/tools/src/main/scala/io/prediction/tools/console/Template.scala +++ /dev/null @@ -1,429 +0,0 @@ -/** Copyright 2015 TappingStone, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.prediction.tools.console - -import java.io.BufferedInputStream -import java.io.BufferedOutputStream -import java.io.File -import java.io.FileInputStream -import java.io.FileOutputStream -import java.net.ConnectException -import java.net.URI -import java.util.zip.ZipInputStream - -import grizzled.slf4j.Logging -import io.prediction.controller.Utils -import io.prediction.core.BuildInfo -import org.apache.commons.io.FileUtils -import org.json4s._ -import org.json4s.native.JsonMethods._ -import org.json4s.native.Serialization.read -import org.json4s.native.Serialization.write -import semverfi._ - -import scala.io.Source -import scala.sys.process._ -import scalaj.http._ - -case class TemplateArgs( - directory: String = "", - repository: String = "", - version: Option[String] = None, - name: Option[String] = None, - packageName: Option[String] = None, - email: Option[String] = None) - -case class GitHubTag( - name: String, - zipball_url: String, - tarball_url: String, - commit: GitHubCommit) - -case class GitHubCommit( - sha: String, - url: String) - -case class GitHubCache( - headers: Map[String, String], - body: String) - -case class TemplateEntry( - repo: String) - -case class TemplateMetaData( - pioVersionMin: Option[String] = None) - -object Template extends Logging { - implicit val formats = Utils.json4sDefaultFormats - - def templateMetaData(templateJson: File): TemplateMetaData = { - if (!templateJson.exists) { - warn(s"$templateJson does not exist. Template metadata will not be available. " + - "(This is safe to ignore if you are not working on a template.)") - TemplateMetaData() - } else { - val jsonString = Source.fromFile(templateJson)(scala.io.Codec.ISO8859).mkString - val json = try { - parse(jsonString) - } catch { - case e: org.json4s.ParserUtil.ParseException => - warn(s"$templateJson cannot be parsed. Template metadata will not be available.") - return TemplateMetaData() - } - val pioVersionMin = json \ "pio" \ "version" \ "min" - pioVersionMin match { - case JString(s) => TemplateMetaData(pioVersionMin = Some(s)) - case _ => TemplateMetaData() - } - } - } - - /** Creates a wrapper that provides the functionality of scalaj.http.Http() - * with automatic proxy settings handling. The proxy settings will first - * come from "git" followed by system properties "http.proxyHost" and - * "http.proxyPort". - * - * @param url URL to be connected - * @return - */ - def httpOptionalProxy(url: String): HttpRequest = { - val gitProxy = try { - Some(Process("git config --global http.proxy").lines.toList(0)) - } catch { - case e: Throwable => None - } - - val (host, port) = gitProxy map { p => - val proxyUri = new URI(p) - (Option(proxyUri.getHost), - if (proxyUri.getPort == -1) None else Some(proxyUri.getPort)) - } getOrElse { - (sys.props.get("http.proxyHost"), - sys.props.get("http.proxyPort").map { p => - try { - Some(p.toInt) - } catch { - case e: NumberFormatException => None - } - } getOrElse None) - } - - (host, port) match { - case (Some(h), Some(p)) => Http(url).proxy(h, p) - case _ => Http(url) - } - } - - def getGitHubRepos( - repos: Seq[String], - apiType: String, - repoFilename: String): Map[String, GitHubCache] = { - val reposCache = try { - val cache = - Source.fromFile(repoFilename)(scala.io.Codec.ISO8859).mkString - read[Map[String, GitHubCache]](cache) - } catch { - case e: Throwable => Map[String, GitHubCache]() - } - val newReposCache = reposCache ++ (try { - repos.map { repo => - val url = s"https://api.github.com/repos/$repo/$apiType" - val http = httpOptionalProxy(url) - val response = reposCache.get(repo).map { cache => - cache.headers.get("ETag").map { etag => - http.header("If-None-Match", etag).asString - } getOrElse { - http.asString - } - } getOrElse { - http.asString - } - - val body = if (response.code == 304) { - reposCache(repo).body - } else { - response.body - } - - repo -> GitHubCache(headers = response.headers, body = body) - }.toMap - } catch { - case e: ConnectException => - githubConnectErrorMessage(e) - Map() - }) - FileUtils.writeStringToFile( - new File(repoFilename), - write(newReposCache), - "ISO-8859-1") - newReposCache - } - - def sub(repo: String, name: String, email: String, org: String): Unit = { - val data = Map( - "repo" -> repo, - "name" -> name, - "email" -> email, - "org" -> org) - try { - httpOptionalProxy("https://update.prediction.io/templates.subscribe"). - postData("json=" + write(data)).asString - } catch { - case e: Throwable => error("Unable to subscribe.") - } - } - - def meta(repo: String, name: String, org: String): Unit = { - try { - httpOptionalProxy( - s"https://meta.prediction.io/templates/$repo/$org/$name").asString - } catch { - case e: Throwable => debug("Template metadata unavailable.") - } - } - - def list(ca: ConsoleArgs): Int = { - val templatesUrl = "https://templates.prediction.io/index.json" - try { - val templatesJson = Source.fromURL(templatesUrl).mkString("") - val templates = read[List[TemplateEntry]](templatesJson) - println("The following is a list of template IDs registered on " + - "PredictionIO Template Gallery:") - println() - templates.sortBy(_.repo.toLowerCase).foreach { template => - println(template.repo) - } - println() - println("Notice that it is possible use any GitHub repository as your " + - "engine template ID (e.g. YourOrg/YourTemplate).") - 0 - } catch { - case e: Throwable => - error(s"Unable to list templates from $templatesUrl " + - s"(${e.getMessage}). Aborting.") - 1 - } - } - - def githubConnectErrorMessage(e: ConnectException): Unit = { - error(s"Unable to connect to GitHub (Reason: ${e.getMessage}). " + - "Please check your network configuration and proxy settings.") - } - - def get(ca: ConsoleArgs): Int = { - val repos = - getGitHubRepos(Seq(ca.template.repository), "tags", ".templates-cache") - - repos.get(ca.template.repository).map { repo => - try { - read[List[GitHubTag]](repo.body) - } catch { - case e: MappingException => - error(s"Either ${ca.template.repository} is not a valid GitHub " + - "repository, or it does not have any tag. Aborting.") - return 1 - } - } getOrElse { - error(s"Failed to retrieve ${ca.template.repository}. Aborting.") - return 1 - } - - val name = ca.template.name getOrElse { - try { - Process("git config --global user.name").lines.toList(0) - } catch { - case e: Throwable => - readLine("Please enter author's name: ") - } - } - - val organization = ca.template.packageName getOrElse { - readLine( - "Please enter the template's Scala package name (e.g. com.mycompany): ") - } - - val email = ca.template.email getOrElse { - try { - Process("git config --global user.email").lines.toList(0) - } catch { - case e: Throwable => - readLine("Please enter author's e-mail address: ") - } - } - - println(s"Author's name: $name") - println(s"Author's e-mail: $email") - println(s"Author's organization: $organization") - - var subscribe = readLine("Would you like to be informed about new bug " + - "fixes and security updates of this template? (Y/n) ") - var valid = false - - do { - subscribe match { - case "" | "Y" | "y" => - sub(ca.template.repository, name, email, organization) - valid = true - case "n" | "N" => - meta(ca.template.repository, name, organization) - valid = true - case _ => - println("Please answer 'y' or 'n'") - subscribe = readLine("(Y/n)? ") - } - } while (!valid) - - val repo = repos(ca.template.repository) - - println(s"Retrieving ${ca.template.repository}") - val tags = read[List[GitHubTag]](repo.body) - println(s"There are ${tags.size} tags") - - if (tags.size == 0) { - println(s"${ca.template.repository} does not have any tag. Aborting.") - return 1 - } - - val tag = ca.template.version.map { v => - tags.find(_.name == v).getOrElse { - println(s"${ca.template.repository} does not have tag $v. Aborting.") - return 1 - } - } getOrElse tags.head - - println(s"Using tag ${tag.name}") - val url = - s"https://github.com/${ca.template.repository}/archive/${tag.name}.zip" - println(s"Going to download $url") - val trial = try { - httpOptionalProxy(url).asBytes - } catch { - case e: ConnectException => - githubConnectErrorMessage(e) - return 1 - } - val finalTrial = try { - trial.location.map { loc => - println(s"Redirecting to $loc") - httpOptionalProxy(loc).asBytes - } getOrElse trial - } catch { - case e: ConnectException => - githubConnectErrorMessage(e) - return 1 - } - val zipFilename = - s"${ca.template.repository.replace('/', '-')}-${tag.name}.zip" - FileUtils.writeByteArrayToFile( - new File(zipFilename), - finalTrial.body) - val zis = new ZipInputStream( - new BufferedInputStream(new FileInputStream(zipFilename))) - val bufferSize = 4096 - val filesToModify = collection.mutable.ListBuffer[String]() - var ze = zis.getNextEntry - while (ze != null) { - val filenameSegments = ze.getName.split(File.separatorChar) - val destFilename = (ca.template.directory +: filenameSegments.tail). - mkString(File.separator) - if (ze.isDirectory) { - new File(destFilename).mkdirs - } else { - val os = new BufferedOutputStream( - new FileOutputStream(destFilename), - bufferSize) - val data = Array.ofDim[Byte](bufferSize) - var count = zis.read(data, 0, bufferSize) - while (count != -1) { - os.write(data, 0, count) - count = zis.read(data, 0, bufferSize) - } - os.flush() - os.close() - - val nameOnly = new File(destFilename).getName - - if (organization != "" && - (nameOnly.endsWith(".scala") || - nameOnly == "build.sbt" || - nameOnly == "engine.json")) { - filesToModify += destFilename - } - } - ze = zis.getNextEntry - } - zis.close() - new File(zipFilename).delete - - val engineJsonFile = - new File(ca.template.directory, "engine.json") - - val engineJson = try { - Some(parse(Source.fromFile(engineJsonFile).mkString)) - } catch { - case e: java.io.IOException => - error("Unable to read engine.json. Skipping automatic package " + - "name replacement.") - None - case e: MappingException => - error("Unable to parse engine.json. Skipping automatic package " + - "name replacement.") - None - } - - val engineFactory = engineJson.map { ej => - (ej \ "engineFactory").extractOpt[String] - } getOrElse None - - engineFactory.map { ef => - val pkgName = ef.split('.').dropRight(1).mkString(".") - println(s"Replacing $pkgName with $organization...") - - filesToModify.foreach { ftm => - println(s"Processing $ftm...") - val fileContent = Source.fromFile(ftm).getLines() - val processedLines = - fileContent.map(_.replaceAllLiterally(pkgName, organization)) - FileUtils.writeStringToFile( - new File(ftm), - processedLines.mkString("\n")) - } - } getOrElse { - error("engineFactory is not found in engine.json. Skipping automatic " + - "package name replacement.") - } - - verifyTemplateMinVersion(new File(ca.template.directory, "template.json")) - - println(s"Engine template ${ca.template.repository} is now ready at " + - ca.template.directory) - - 0 - } - - def verifyTemplateMinVersion(templateJsonFile: File): Unit = { - val metadata = templateMetaData(templateJsonFile) - - metadata.pioVersionMin.foreach { pvm => - if (Version(BuildInfo.version) < Version(pvm)) { - error(s"This engine template requires at least PredictionIO $pvm. " + - s"The template may not work with PredictionIO ${BuildInfo.version}.") - sys.exit(1) - } - } - } - -}
