codope commented on code in PR #6489:
URL: https://github.com/apache/hudi/pull/6489#discussion_r967070086


##########
hudi-cli/pom.xml:
##########
@@ -132,6 +133,33 @@
 
 
   <dependencies>
+    <!--  Spring Boot  -->
+    <dependency>

Review Comment:
   Better to declare the dependencies in the root pom under 
`dependencyManagement` and then use here.



##########
hudi-cli/pom.xml:
##########
@@ -200,10 +228,21 @@
     </dependency>
 
     <!-- Logging -->
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>

Review Comment:
   Do we need this with log4j?



##########
docker/demo/compaction.commands:
##########
@@ -1,19 +1,19 @@
 
-#  Licensed to the Apache Software Foundation (ASF) under one
-#  or more contributor license agreements.  See the NOTICE file
-#  distributed with this work for additional information
-#  regarding copyright ownership.  The ASF licenses this file
-#  to you under the Apache License, Version 2.0 (the
-#  "License"); you may not use this file except in compliance
-#  with the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
+//  Licensed to the Apache Software Foundation (ASF) under one

Review Comment:
   same here and other files. revert if not needed.



##########
hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java:
##########
@@ -86,7 +87,7 @@
  */
 public class SparkMain {
 
-  private static final Logger LOG = Logger.getLogger(SparkMain.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SparkMain.class);

Review Comment:
   Why not log4j Logger?



##########
hudi-examples/hudi-examples-flink/pom.xml:
##########
@@ -263,6 +263,12 @@
             <scope>test</scope>
         </dependency>
         <!-- Junit 5 dependencies -->
+        <dependency>

Review Comment:
   Why dependency changes are required in hudi-examples-flink and 
hudi-examples-spark?



##########
hudi-cli/pom.xml:
##########
@@ -254,12 +293,24 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>com.google.code.gson</groupId>
+          <artifactId>gson</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
     </dependency>
 
+    <dependency>

Review Comment:
   Why is `gson` needed?



##########
hudi-cli/src/main/java/org/apache/hudi/cli/commands/UpgradeOrDowngradeCommand.java:
##########
@@ -25,25 +25,24 @@
 import org.apache.hudi.common.table.HoodieTableMetaClient;
 import org.apache.hudi.common.table.HoodieTableVersion;
 import org.apache.hudi.common.util.StringUtils;
-
 import org.apache.spark.launcher.SparkLauncher;
-import org.springframework.shell.core.CommandMarker;
-import org.springframework.shell.core.annotation.CliCommand;
-import org.springframework.shell.core.annotation.CliOption;
-import org.springframework.stereotype.Component;
+import org.springframework.shell.standard.ShellComponent;
+import org.springframework.shell.standard.ShellMethod;
+import org.springframework.shell.standard.ShellOption;
 
 /**
  * CLI command to assist in upgrading/downgrading Hoodie table to a different 
version.
  */
-@Component
-public class UpgradeOrDowngradeCommand implements CommandMarker {
+@ShellComponent
+public class UpgradeOrDowngradeCommand {
 
-  @CliCommand(value = "upgrade table", help = "Upgrades a table")
+  @ShellMethod(key = "upgrade table", value = "Upgrades a table")
   public String upgradeHoodieTable(
-      @CliOption(key = {"toVersion"}, help = "To version of Hoodie table to be 
upgraded/downgraded to", unspecifiedDefaultValue = "") final String toVersion,
-      @CliOption(key = {"sparkProperties"}, help = "Spark Properties File 
Path") final String sparkPropertiesPath,
-      @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = 
"Spark Master") String master,
-      @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G",
+      @ShellOption(value = {"--toVersion"}, help = "To version of Hoodie table 
to be upgraded/downgraded to", defaultValue = "") final String toVersion,
+      @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties 
File Path",
+          defaultValue = ShellOption.NULL) final String sparkPropertiesPath,

Review Comment:
   i'm wondering isn't there a mandatory/required flag? Why do we have to set 
default as null? I would avoid null defaults.



##########
hudi-cli/pom.xml:
##########
@@ -200,10 +228,21 @@
     </dependency>
 
     <!-- Logging -->
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.logging.log4j</groupId>
       <artifactId>log4j-core</artifactId>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>

Review Comment:
   Do we need both log4j-api and log4j-1.2-api?



##########
hudi-cli/src/main/java/org/apache/hudi/cli/Main.java:
##########
@@ -18,18 +18,19 @@
 
 package org.apache.hudi.cli;
 
-import org.springframework.shell.Bootstrap;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
 
 import java.io.IOException;
 
 /**
  * Main class that delegates to Spring Shell's Bootstrap class in order to 
simplify debugging inside an IDE.
  */
+@SpringBootApplication
 public class Main {
 
   public static void main(String[] args) throws IOException {
     System.out.println("Main called");
-    new HoodieSplashScreen();
-    Bootstrap.main(args);
+    SpringApplication.run(Main.class, args);

Review Comment:
   Does this automatically generate the splash screen?



##########
pom.xml:
##########
@@ -182,7 +182,7 @@
     <spark.bundle.hive.shade.prefix/>
     <utilities.bundle.hive.scope>provided</utilities.bundle.hive.scope>
     <utilities.bundle.hive.shade.prefix/>
-    <argLine>-Xmx2g</argLine>
+    <argLine>-Xms4g -Xmx4g</argLine>

Review Comment:
   why are we increasing the jvm allocation?



##########
docker/demo/compaction-bootstrap.commands:
##########
@@ -1,19 +1,19 @@
 
-#  Licensed to the Apache Software Foundation (ASF) under one
-#  or more contributor license agreements.  See the NOTICE file
-#  distributed with this work for additional information
-#  regarding copyright ownership.  The ASF licenses this file
-#  to you under the Apache License, Version 2.0 (the
-#  "License"); you may not use this file except in compliance
-#  with the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
+//  Licensed to the Apache Software Foundation (ASF) under one

Review Comment:
   Why is this required? Please revert this change if not necessary.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to