ChengJie1053 commented on code in PR #3952:
URL: 
https://github.com/apache/incubator-streampark/pull/3952#discussion_r1712934180


##########
streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/SparkApplicationManageServiceImpl.java:
##########
@@ -294,90 +283,83 @@ public boolean create(SparkApplication appParam) {
 
         if (save(appParam)) {
             if (appParam.isSparkSqlJob()) {
-                FlinkSql flinkSql = new FlinkSql(appParam);
-                flinkSqlService.create(flinkSql);
+                SparkSql sparkSql = new SparkSql(appParam);
+                sparkSqlService.create(sparkSql);
             }
             if (appParam.getConfig() != null) {
-                // configService.create(appParam, true);
+                configService.create(appParam, true);
             }
             return true;
         } else {
             throw new ApiAlertException("create application failed");
         }
     }
 
-    private boolean existsByJobName(String jobName) {
+    private boolean existsByAppName(String jobName) {
         return baseMapper.exists(
-            new 
LambdaQueryWrapper<SparkApplication>().eq(SparkApplication::getJobName, 
jobName));
+            new 
LambdaQueryWrapper<SparkApplication>().eq(SparkApplication::getAppName, 
jobName));
     }
 
     @SuppressWarnings("checkstyle:WhitespaceAround")
     @Override
     @SneakyThrows
     public Long copy(SparkApplication appParam) {
-        boolean existsByJobName = this.existsByJobName(appParam.getJobName());
+        boolean existsByAppName = this.existsByAppName(appParam.getAppName());
         ApiAlertException.throwIfFalse(
-            !existsByJobName,
+            !existsByAppName,
             "[StreamPark] Application names can't be repeated, copy 
application failed.");
 
         SparkApplication oldApp = getById(appParam.getId());
         SparkApplication newApp = new SparkApplication();
-        String jobName = appParam.getJobName();
-
-        newApp.setJobName(jobName);
-        newApp.setClusterId(jobName);
-        newApp.setArgs(appParam.getArgs() != null ? appParam.getArgs() : 
oldApp.getArgs());
-        newApp.setVersionId(oldApp.getVersionId());
 
-        newApp.setSparkClusterId(oldApp.getSparkClusterId());
-        newApp.setRestartSize(oldApp.getRestartSize());
+        newApp.setTeamId(oldApp.getTeamId());
         newApp.setJobType(oldApp.getJobType());
-        newApp.setOptions(oldApp.getOptions());
-        newApp.setDynamicProperties(oldApp.getDynamicProperties());
-        newApp.setResolveOrder(oldApp.getResolveOrder());
-        newApp.setExecutionMode(oldApp.getExecutionMode());
-        newApp.setSparkImage(oldApp.getSparkImage());
-        newApp.setK8sNamespace(oldApp.getK8sNamespace());
-        newApp.setK8sRestExposedType(oldApp.getK8sRestExposedType());
-        newApp.setK8sPodTemplate(oldApp.getK8sPodTemplate());
-        newApp.setK8sJmPodTemplate(oldApp.getK8sJmPodTemplate());
-        newApp.setK8sTmPodTemplate(oldApp.getK8sTmPodTemplate());
-        newApp.setK8sHadoopIntegration(oldApp.getK8sHadoopIntegration());
-        newApp.setDescription(oldApp.getDescription());
-        newApp.setAlertId(oldApp.getAlertId());
-        newApp.setCpFailureAction(oldApp.getCpFailureAction());
-        newApp.setCpFailureRateInterval(oldApp.getCpFailureRateInterval());
-        newApp.setCpMaxFailureInterval(oldApp.getCpMaxFailureInterval());
-        newApp.setMainClass(oldApp.getMainClass());
         newApp.setAppType(oldApp.getAppType());
+        newApp.setVersionId(oldApp.getVersionId());
+        newApp.setAppName(appParam.getAppName());
+        newApp.setExecutionMode(oldApp.getExecutionMode());
         newApp.setResourceFrom(oldApp.getResourceFrom());
         newApp.setProjectId(oldApp.getProjectId());
         newApp.setModule(oldApp.getModule());
-        newApp.setUserId(ServiceHelper.getUserId());
-        newApp.setState(FlinkAppStateEnum.ADDED.getValue());
-        newApp.setRelease(ReleaseStateEnum.NEED_RELEASE.get());
-        newApp.setOptionState(OptionStateEnum.NONE.getValue());
-        newApp.setHotParams(oldApp.getHotParams());
-
+        newApp.setMainClass(oldApp.getMainClass());
         newApp.setJar(oldApp.getJar());
         newApp.setJarCheckSum(oldApp.getJarCheckSum());
-        newApp.setTags(oldApp.getTags());
-        newApp.setTeamId(oldApp.getTeamId());
+        newApp.setAppProperties(oldApp.getAppProperties());
+        newApp.setAppArgs(appParam.getAppArgs() != null ? 
appParam.getAppArgs() : oldApp.getAppArgs());
+        newApp.setYarnQueue(oldApp.getYarnQueue());
+        newApp.resolveYarnQueue();
+        newApp.setK8sMasterUrl(oldApp.getK8sMasterUrl());
+        newApp.setK8sContainerImage(oldApp.getK8sContainerImage());
+        newApp.setK8sImagePullPolicy(oldApp.getK8sImagePullPolicy());
+        newApp.setK8sServiceAccount(oldApp.getK8sServiceAccount());
+        newApp.setK8sNamespace(oldApp.getK8sNamespace());
+
         newApp.setHadoopUser(oldApp.getHadoopUser());
+        newApp.setRestartSize(oldApp.getRestartSize());
+        newApp.setState(SparkAppStateEnum.ADDED.getValue());
+        newApp.setOptions(oldApp.getOptions());
+        newApp.setOptionState(OptionStateEnum.NONE.getValue());
+        newApp.setUserId(ServiceHelper.getUserId());
+        newApp.setDescription(oldApp.getDescription());
+        newApp.setRelease(ReleaseStateEnum.NEED_RELEASE.get());
+        newApp.setAlertId(oldApp.getAlertId());
+        newApp.setCreateTime(new Date());
+        newApp.setModifyTime(new Date());

Review Comment:
   Whether the same Date object can be used
   



##########
streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/SparkApplicationActionServiceImpl.java:
##########
@@ -291,39 +285,43 @@ public void start(SparkApplication appParam, boolean 
auto) throws Exception {
 
         Map<String, Object> extraParameter = new HashMap<>(0);
         if (application.isSparkSqlJob()) {
-            FlinkSql flinkSql = 
flinkSqlService.getEffective(application.getId(), true);
+            SparkSql sparkSql = 
sparkSqlService.getEffective(application.getId(), true);
             // Get the sql of the replaced placeholder
-            String realSql = 
variableService.replaceVariable(application.getTeamId(), flinkSql.getSql());
-            flinkSql.setSql(DeflaterUtils.zipString(realSql));
-            extraParameter.put(ConfigKeys.KEY_FLINK_SQL(null), 
flinkSql.getSql());
+            String realSql = 
variableService.replaceVariable(application.getTeamId(), sparkSql.getSql());
+            sparkSql.setSql(DeflaterUtils.zipString(realSql));
+            extraParameter.put(ConfigKeys.KEY_SPARK_SQL(null), 
sparkSql.getSql());
         }
 
         Tuple2<String, String> userJarAndAppConf = 
getUserJarAndAppConf(sparkEnv, application);
-        String flinkUserJar = userJarAndAppConf.f0;
+        String sparkUserJar = userJarAndAppConf.f0;
         String appConf = userJarAndAppConf.f1;
 
         BuildResult buildResult = buildPipeline.getBuildResult();
-        if (SparkExecutionMode.YARN_CLUSTER == 
application.getSparkExecutionMode()
-            || SparkExecutionMode.YARN_CLIENT == 
application.getSparkExecutionMode()) {
-            buildResult = new ShadedBuildResponse(null, flinkUserJar, true);
-            application.setJobManagerUrl(YarnUtils.getRMWebAppURL(true));
+        if 
(SparkExecutionMode.isYarnMode(application.getSparkExecutionMode())) {
+            buildResult = new ShadedBuildResponse(null, sparkUserJar, true);
+            if (StringUtils.isNotBlank(application.getYarnQueueName())) {
+                extraParameter.put("yarnQueueName", 
application.getYarnQueueName());
+            }
+            if (StringUtils.isNotBlank(application.getYarnQueueLabel())) {
+                extraParameter.put("yarnQueueLabel", 
application.getYarnQueueLabel());

Review Comment:
   Whether yarnQueueName and yarnQueueLabel should be extracted as constants
   



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to