cuspymd commented on a change in pull request #4098:
URL: https://github.com/apache/zeppelin/pull/4098#discussion_r615706250



##########
File path: 
zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/remote/ExecRemoteInterpreterProcess.java
##########
@@ -215,6 +215,14 @@ public void onProcessComplete(int exitValue) {
           notifyAll();
         }
       }
+      if (getEnv().getOrDefault("ZEPPELIN_FLINK_YARN_APPLICATION", 
"false").equalsIgnoreCase("true")

Review comment:
       Isn't the `else if` clearer?

##########
File path: 
flink/interpreter/src/main/java/org/apache/zeppelin/flink/YarnApplicationExecutionEnvironment.java
##########
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.flink;
+
+import org.apache.flink.api.common.JobExecutionResult;
+import org.apache.flink.api.java.ExecutionEnvironment;
+import org.apache.flink.configuration.ConfigUtils;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.DeploymentOptions;
+import org.apache.flink.configuration.PipelineOptions;
+import org.apache.flink.core.execution.JobClient;
+import org.apache.flink.core.execution.PipelineExecutorServiceLoader;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.apache.flink.util.Preconditions.checkState;
+
+
+/**
+ * ExecutionEnvironment used for yarn application mode.
+ * Need to add jars of scala shell before submitting jobs.
+ */
+public class YarnApplicationExecutionEnvironment extends ExecutionEnvironment {
+
+  private FlinkILoop flinkILoop;
+
+  public YarnApplicationExecutionEnvironment(PipelineExecutorServiceLoader 
executorServiceLoader,
+                                             Configuration configuration,
+                                             ClassLoader userClassloader,
+                                             FlinkILoop flinkILoop) {
+    super(executorServiceLoader,configuration,userClassloader);
+    this.flinkILoop = flinkILoop;
+  }
+
+  @Override
+  public JobClient executeAsync(String jobName) throws Exception {
+    updateDependencies();
+    return super.executeAsync(jobName);
+  }
+
+  @Override
+  public JobExecutionResult execute() throws Exception {
+    updateDependencies();
+    return super.execute();
+  }
+
+  private void updateDependencies() throws Exception {
+    final Configuration configuration = getConfiguration();
+    checkState(
+            configuration.getBoolean(DeploymentOptions.ATTACHED),
+            "Only ATTACHED mode is supported by the scala shell.");
+
+    final List<URL> updatedJarFiles = getUpdatedJarFiles();
+    ConfigUtils.encodeCollectionToConfig(
+            configuration, PipelineOptions.JARS, updatedJarFiles, 
URL::toString);
+  }
+
+  private List<URL> getUpdatedJarFiles() throws MalformedURLException {
+    final URL jarUrl = 
flinkILoop.writeFilesToDisk().getAbsoluteFile().toURI().toURL();
+    final List<URL> allJarFiles = new ArrayList<>();
+    allJarFiles.add(jarUrl);
+    return allJarFiles;
+  }

Review comment:
       It's not a simple getter, but a function that generates side-effects. 
Isn't `updateJarFiles()` a more accurate name?

##########
File path: 
flink/interpreter/src/main/java/org/apache/zeppelin/flink/HadoopUtils.java
##########
@@ -55,6 +65,18 @@ public static String getYarnAppTrackingUrl(ClusterClient 
clusterClient) throws I
     return yarnClient.getApplicationReport(yarnAppId).getTrackingUrl();
   }
 
+  public static int getFlinkRestPort(String yarnAppId) throws IOException, 
YarnException {
+    YarnClient yarnClient = YarnClient.createYarnClient();
+    YarnConfiguration yarnConf = new YarnConfiguration();
+    // disable timeline service as we only query yarn app here.
+    // Otherwise we may hit this kind of ERROR:
+    // java.lang.ClassNotFoundException: 
com.sun.jersey.api.client.config.ClientConfig
+    yarnConf.set("yarn.timeline-service.enabled", "false");
+    yarnClient.init(yarnConf);
+    yarnClient.start();
+    return 
yarnClient.getApplicationReport(ConverterUtils.toApplicationId(yarnAppId)).getRpcPort();

Review comment:
       It is almost the same as the function above, but it would be good to 
separate it with the `getApplicationReport()` function.

##########
File path: 
zeppelin-plugins/launcher/flink/src/main/java/org/apache/zeppelin/interpreter/launcher/FlinkInterpreterLauncher.java
##########
@@ -55,6 +64,60 @@ public FlinkInterpreterLauncher(ZeppelinConfiguration zConf, 
RecoveryStorage rec
     }
     envs.put("FLINK_LIB_DIR", flinkHome + "/lib");
     envs.put("FLINK_PLUGINS_DIR", flinkHome + "/plugins");
+
+    // yarn application mode specific logic
+    if (context.getProperties().getProperty("flink.execution.mode")
+            .equalsIgnoreCase("yarn_application")) {
+      envs.put("ZEPPELIN_FLINK_YARN_APPLICATION", "true");
+
+      StringBuilder flinkYarnApplicationConfBuilder = new StringBuilder();
+
+      // Extract yarn.ship-files, add hive-site.xml automatically if hive is 
enabled
+      // and HIVE_CONF_DIR is specified
+      String hiveConfDirProperty = 
context.getProperties().getProperty("HIVE_CONF_DIR");
+      List<String> yarnShipFiles = new ArrayList<>();
+      if (StringUtils.isNotBlank(hiveConfDirProperty) &&
+              Boolean.parseBoolean(context.getProperties()
+                      .getProperty("zeppelin.flink.enableHive", "false"))) {
+        File hiveSiteFile = new File(hiveConfDirProperty, "hive-site.xml");
+        if (hiveSiteFile.isFile() && hiveSiteFile.exists()) {
+          yarnShipFiles.add(hiveSiteFile.getAbsolutePath());
+        } else {
+          LOGGER.warn("Hive site file: {} doesn't exist or is not a 
directory", hiveSiteFile);
+        }
+      }
+      if (context.getProperties().containsKey("yarn.ship-files")) {
+        
yarnShipFiles.add(context.getProperties().getProperty("yarn.ship-files"));
+      }
+      if (!yarnShipFiles.isEmpty()) {
+        flinkYarnApplicationConfBuilder.append(
+                " -D yarn.ship-files=" + 
yarnShipFiles.stream().collect(Collectors.joining(",")));
+      }
+
+      // specify yarn.application.name
+      String yarnAppName = 
context.getProperties().getProperty("flink.yarn.appName");
+      if (StringUtils.isNotBlank(yarnAppName)) {
+        // flink run command can not contains whitespace, so replace it with _
+        flinkYarnApplicationConfBuilder.append(
+                " -D yarn.application.name=" + yarnAppName.replaceAll(" ", 
"_") + "");
+      }
+
+      // add other yarn and python configuration.
+      for (Map.Entry<Object, Object> entry : 
context.getProperties().entrySet()) {
+        if (!entry.getKey().toString().equalsIgnoreCase("yarn.ship-files") &&
+            !entry.getKey().toString().equalsIgnoreCase("flink.yarn.appName")) 
{
+          if 
(CharMatcher.whitespace().matchesAnyOf(entry.getValue().toString())) {
+            LOGGER.warn("flink configuration key {} is skipped because it 
contains white space",
+                    entry.getValue().toString());
+          } else {
+            flinkYarnApplicationConfBuilder.append(
+                    " -D " + entry.getKey().toString() + "=" + 
entry.getValue().toString() + "");
+          }
+        }
+      }
+      envs.put("ZEPPELIN_FLINK_YANR_APPLICATION_CONF", 
flinkYarnApplicationConfBuilder.toString());
+    }

Review comment:
       Separating the added part into a separate function would be better for 
maintenance.

##########
File path: 
flink/flink1.12-shims/src/main/java/org/apache/zeppelin/flink/Flink112Shims.java
##########
@@ -173,13 +173,26 @@ public Object createCatalogManager(Object config) {
 
   @Override
   public String getPyFlinkPythonPath(Properties properties) throws IOException 
{
+    if 
("yarn_application".equalsIgnoreCase(properties.getProperty("flink.execution.mode")))
 {
+      // for yarn application mode, FLINK_HOME is container working directory
+      String flinkHome = new File(".").getAbsolutePath();
+      List<File> depFiles = null;
+      depFiles = Arrays.asList(new File(flinkHome + 
"/lib/python").listFiles());
+      StringBuilder builder = new StringBuilder();
+      for (File file : depFiles) {
+        LOGGER.info("Adding extracted file {} to PYTHONPATH", 
file.getAbsolutePath());
+        builder.append(file.getAbsolutePath() + ":");
+      }
+      return builder.toString();

Review comment:
       It is almost the same as the 191~198 line code, so it would be good to 
separate it with a function.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to