lsyldliu commented on code in PR #25754:
URL: https://github.com/apache/flink/pull/25754#discussion_r1904938662


##########
flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClientFactory.java:
##########
@@ -78,6 +80,12 @@ private YarnClusterDescriptor 
getClusterDescriptor(Configuration configuration)
         final YarnConfiguration yarnConfiguration =
                 Utils.getYarnAndHadoopConfiguration(configuration);
 
+        if (System.getenv().get("IN_TESTS") != null) {

Review Comment:
   The aim of this change is?



##########
flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/cli/CliOptions.java:
##########
@@ -35,16 +36,16 @@ public class CliOptions {
 
     private final boolean isPrintHelp;
     private final String sessionId;
-    private final URL initFile;
-    private final URL sqlFile;
+    private final URI initFile;

Review Comment:
   I'm just a little curious that why we change it to `URI` type?



##########
flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/cli/CliStrings.java:
##########
@@ -213,6 +213,8 @@ public AttributedString build() {
 
     public static final String MESSAGE_EXECUTE_STATEMENT = "Execute statement 
succeeded.";
 
+    public static final String MESSAGE_DEPLOY_SCRIPT = "Deploy script in 
application mode: ";

Review Comment:
   "Deploy SQL script in application mode: "? 
   BTW, the content after the colon is an SQL script?



##########
flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/SqlClient.java:
##########
@@ -140,7 +136,11 @@ private void openCli(Executor executor) {
 
         try (CliClient cli = new CliClient(terminalFactory, executor, 
historyFilePath)) {
             if (options.getInitFile() != null) {
-                boolean success = 
cli.executeInitialization(readFromURL(options.getInitFile()));
+                if (isApplicationMode(executor.getSessionConfig())) {
+                    throw new SqlClientException(
+                            "Sql Client doesn't support to run init files when 
deploying script into cluster.");
+                }
+                boolean success = 
cli.executeInitialization(options.getInitFile());

Review Comment:
   I'm confuse that what content are init file? is it  sql query, sql ddl or 
config option? After see the line 147 and line 153 error message, I guess it is 
a sql query.



##########
flink-yarn-tests/src/test/java/org/apache/flink/yarn/SqlYARNApplicationITCase.java:
##########
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.yarn;
+
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.DeploymentOptions;
+import org.apache.flink.configuration.JobManagerOptions;
+import org.apache.flink.configuration.RpcOptions;
+import org.apache.flink.configuration.TaskManagerOptions;
+import org.apache.flink.util.FileUtils;
+import org.apache.flink.yarn.configuration.YarnConfigOptions;
+import org.apache.flink.yarn.configuration.YarnDeploymentTarget;
+
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Path;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Consumer;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static 
org.apache.flink.yarn.configuration.YarnConfigOptions.CLASSPATH_INCLUDE_USER_JAR;
+import static org.assertj.core.api.Assertions.assertThat;
+
+/** Tests to deploy script into mini yarn cluster. */
+public class SqlYARNApplicationITCase extends YarnTestBase {
+
+    private static final Logger LOG = 
LoggerFactory.getLogger(SqlYARNApplicationITCase.class);
+
+    private static final Duration yarnAppTerminateTimeout = 
Duration.ofSeconds(30);
+    private static final int sleepIntervalInMS = 100;
+    private static @TempDir Path workDir;
+    private static File script;
+
+    @BeforeAll
+    static void setup() throws Exception {
+        YARN_CONFIGURATION.set(
+                YarnTestBase.TEST_CLUSTER_NAME_KEY, 
"flink-sql-yarn-test-application");
+        startYARNWithConfig(YARN_CONFIGURATION, true);
+        script = workDir.resolve("script.sql").toFile();
+        assertThat(script.createNewFile()).isTrue();
+        FileUtils.writeFileUtf8(
+                script,
+                "CREATE TEMPORARY TABLE sink(\n"
+                        + "  a INT\n"
+                        + ") WITH (\n"
+                        + "  'connector' = 'blackhole'\n"
+                        + ");\n"
+                        + "INSERT INTO sink VALUES (1), (2), (3);");

Review Comment:
   Can we add test to cover the udf jar?



##########
flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/ExecutorImpl.java:
##########
@@ -331,6 +334,19 @@ public List<String> completeStatement(String statement, 
int position) {
                 .getCandidates();
     }
 
+    @Override
+    public String deployScript(@Nullable String script, @Nullable URI uri) {
+        return getResponse(

Review Comment:
   I think it would better that we check the two parameters can not be null 
simutaneously.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to