felixcheung commented on a change in pull request #3331: [Zeppelin-4049] Hadoop 
Submarine (Machine Learning) interpreter
URL: https://github.com/apache/zeppelin/pull/3331#discussion_r265334390
 
 

 ##########
 File path: 
submarine/src/main/java/org/apache/zeppelin/submarine/SubmarineInterpreter.java
 ##########
 @@ -0,0 +1,275 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.submarine;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.zeppelin.display.ui.OptionInput.ParamOption;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.apache.zeppelin.submarine.commons.SubmarineCommand;
+import org.apache.zeppelin.submarine.commons.SubmarineConstants;
+import org.apache.zeppelin.submarine.job.SubmarineJob;
+import org.apache.zeppelin.submarine.commons.SubmarineUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.util.List;
+import java.util.Properties;
+
+import static 
org.apache.zeppelin.submarine.commons.SubmarineCommand.CLEAN_RUNTIME_CACHE;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.CHECKPOINT_PATH;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.CLEAN_CHECKPOINT;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.COMMAND_CLEAN;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.COMMAND_JOB_RUN;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.COMMAND_JOB_SHOW;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.COMMAND_TYPE;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.COMMAND_USAGE;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.INPUT_PATH;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.MACHINELEARING_DISTRIBUTED_ENABLE;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.OPERATION_TYPE;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.PS_LAUNCH_CMD;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_CHECKPOINT_PATH;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.USERNAME_SYMBOL;
+import static 
org.apache.zeppelin.submarine.commons.SubmarineConstants.WORKER_LAUNCH_CMD;
+import static org.apache.zeppelin.submarine.commons.SubmarineUtils.unifyKey;
+
+/**
+ * SubmarineInterpreter of Hadoop Submarine implementation.
+ * Support for Hadoop Submarine cli. All the commands documented here
+ * 
https://github.com/apache/hadoop/tree/trunk/hadoop-submarine/hadoop-submarine-core
+ * /src/site/markdown/QuickStart.md is supported.
+ */
+public class SubmarineInterpreter extends Interpreter {
+  private Logger LOGGER = LoggerFactory.getLogger(SubmarineInterpreter.class);
+
+  // Number of submarines executed in parallel for each interpreter instance
+  protected int concurrentExecutedMax = 1;
+
+  private boolean needUpdateConfig = true;
+  private String currentReplName = "";
+
+  SubmarineContext submarineContext = null;
+
+  public SubmarineInterpreter(Properties properties) {
+    super(properties);
+
+    String concurrentMax = 
getProperty(SubmarineConstants.SUBMARINE_CONCURRENT_MAX, "1");
+    concurrentExecutedMax = Integer.parseInt(concurrentMax);
+
+    submarineContext = SubmarineContext.getInstance();
+  }
+
+  @Override
+  public void open() {
+    LOGGER.info("SubmarineInterpreter open()");
+  }
+
+  @Override
+  public void close() {
+    submarineContext.stopAllSubmarineJob();
+  }
+
+  private void setParagraphConfig(InterpreterContext context) {
+    String replName = context.getReplName();
+    if (StringUtils.equals(currentReplName, replName)) {
+      currentReplName = context.getReplName();
+      needUpdateConfig = true;
+    }
+    if (needUpdateConfig) {
+      needUpdateConfig = false;
+      if (currentReplName.equals("submarine") || currentReplName.isEmpty()) {
+        context.getConfig().put("editorHide", true);
+        context.getConfig().put("title", false);
+      } else {
+        context.getConfig().put("editorHide", false);
+        context.getConfig().put("title", true);
+      }
+    }
+  }
+
+  @Override
+  public InterpreterResult interpret(String script, InterpreterContext 
context) {
+    try {
+      // algorithm & checkpoint path support replaces ${username} with real 
user name
+      String algorithmPath = 
properties.getProperty(SUBMARINE_ALGORITHM_HDFS_PATH, "");
+      if (algorithmPath.contains(USERNAME_SYMBOL)) {
+        algorithmPath = algorithmPath.replace(USERNAME_SYMBOL, userName);
+        properties.setProperty(SUBMARINE_ALGORITHM_HDFS_PATH, algorithmPath);
+      }
+      String checkpointPath = properties.getProperty(TF_CHECKPOINT_PATH, "");
+      if (checkpointPath.contains(USERNAME_SYMBOL)) {
+        checkpointPath = checkpointPath.replace(USERNAME_SYMBOL, userName);
+        properties.setProperty(TF_CHECKPOINT_PATH, checkpointPath);
+      }
+
+      SubmarineJob submarineJob = 
submarineContext.addOrGetSubmarineJob(properties, context);
+
+      setParagraphConfig(context);
+
+      LOGGER.debug("Run shell command '" + script + "'");
+      String command = "", operation = "", cleanCheckpoint = "";
+      String inputPath = "", chkPntPath = "", psLaunchCmd = "", 
workerLaunchCmd = "";
+      String noteId = context.getNoteId();
+      String noteName = context.getNoteName();
+
+      if (script.equalsIgnoreCase(COMMAND_CLEAN)) {
+        // Clean Registry Angular Object
+        command = CLEAN_RUNTIME_CACHE.getCommand();
+      } else {
+        operation = SubmarineUtils.getAgulObjValue(context, OPERATION_TYPE);
+        if (!StringUtils.isEmpty(operation)) {
+          SubmarineUtils.removeAgulObjValue(context, OPERATION_TYPE);
+          command = operation;
+        } else {
+          command = SubmarineUtils.getAgulObjValue(context, COMMAND_TYPE);
+        }
+      }
+
+      String distributed = 
this.properties.getProperty(MACHINELEARING_DISTRIBUTED_ENABLE, "false");
+      SubmarineUtils.setAgulObjValue(context, 
unifyKey(MACHINELEARING_DISTRIBUTED_ENABLE),
+          distributed);
+
+      inputPath = SubmarineUtils.getAgulObjValue(context, INPUT_PATH);
+      cleanCheckpoint = SubmarineUtils.getAgulObjValue(context, 
CLEAN_CHECKPOINT);
+      chkPntPath = submarineJob.getJobDefaultCheckpointPath();
+      SubmarineUtils.setAgulObjValue(context, CHECKPOINT_PATH, chkPntPath);
+      psLaunchCmd = SubmarineUtils.getAgulObjValue(context, PS_LAUNCH_CMD);
+      workerLaunchCmd = SubmarineUtils.getAgulObjValue(context, 
WORKER_LAUNCH_CMD);
+      properties.put(INPUT_PATH, inputPath != null ? inputPath : "");
+      properties.put(CHECKPOINT_PATH, chkPntPath != null ? chkPntPath : "");
+      properties.put(PS_LAUNCH_CMD, psLaunchCmd != null ? psLaunchCmd : "");
+      properties.put(WORKER_LAUNCH_CMD, workerLaunchCmd != null ? 
workerLaunchCmd : "");
+
+      SubmarineCommand submarineCmd = SubmarineCommand.fromCommand(command);
+      switch (submarineCmd) {
+        case USAGE:
+          submarineJob.showUsage();
+          break;
+        case JOB_RUN:
+          if (StringUtils.equals(cleanCheckpoint, "true")) {
+            submarineJob.cleanJobDefaultCheckpointPath();
+          }
+          submarineJob.runJob();
+          break;
+        case JOB_STOP:
+          String jobName = SubmarineUtils.getJobName(userName, noteId);
+          submarineJob.deleteJob(jobName);
+          break;
+        case TENSORBOARD_RUN:
+          submarineJob.runTensorBoard();
+          break;
+        case TENSORBOARD_STOP:
+          String user = context.getAuthenticationInfo().getUser();
+          String tensorboardName = SubmarineUtils.getTensorboardName(user);
+          submarineJob.deleteJob(tensorboardName);
+          break;
+        case OLD_UI:
+          createOldGUI(context);
+          break;
+        case CLEAN_RUNTIME_CACHE:
+          submarineJob.cleanRuntimeCache();
+          break;
+        default:
+          submarineJob.onDashboard();
+          break;
+      }
+    } catch (Exception e) {
+      LOGGER.error(e.getMessage(), e);
+      return new InterpreterResult(InterpreterResult.Code.ERROR, 
e.getMessage());
+    }
+
+    return new InterpreterResult(InterpreterResult.Code.SUCCESS);
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {
+    SubmarineJob submarineJob = 
submarineContext.addOrGetSubmarineJob(properties, context);
+    String userName = context.getAuthenticationInfo().getUser();
+    String noteId = context.getNoteId();
+    String jobName = SubmarineUtils.getJobName(userName, noteId);
+    submarineJob.deleteJob(jobName);
+  }
+
+  @Override
+  public FormType getFormType() {
+    return FormType.SIMPLE;
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    return 0;
 
 Review comment:
   can this support progress in the future?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to