CTTY commented on code in PR #17718:
URL: https://github.com/apache/hudi/pull/17718#discussion_r2648582033


##########
hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/cli/HDFSParquetImporterUtils.java:
##########
@@ -128,7 +127,7 @@ public int dataImport(JavaSparkContext jsc) {
     FileSystem fs = HadoopFSUtils.getFs(this.targetPath, 
jsc.hadoopConfiguration());
     this.props = this.propsFilePath == null || this.propsFilePath.isEmpty() ? 
buildProperties(this.configs)
         : readConfig(fs.getConf(), new StoragePath(this.propsFilePath), 
this.configs).getProps(true);
-    LOG.info("Starting data import with configs : " + props.toString());
+    log.info("Starting data import with configs : " + props.toString());

Review Comment:
   We can apply parameterized logging



##########
hudi-spark-datasource/hudi-spark/src/test/java/HoodieJavaStreamingApp.java:
##########
@@ -307,7 +304,7 @@ public int addInputAndValidateIngestion(SparkSession spark, 
FileSystem fs, Strin
       Thread.sleep(3000);
       waitTillNCommits(fs, numExpCommits, 180, 3);
       commitInstantTime2 = HoodieDataSourceHelpers.listCommitsSince(fs, 
tablePath, commitInstantTime1).stream().sorted().findFirst().get();
-      LOG.info("Second commit at instant time :" + commitInstantTime2);
+      log.info("Second commit at instant time :" + commitInstantTime2);

Review Comment:
   We can apply parameterized logging to some places in this class



##########
hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/cli/BootstrapExecutorUtils.java:
##########
@@ -159,7 +157,7 @@ public BootstrapExecutorUtils(Config cfg, JavaSparkContext 
jssc, FileSystem fs,
       builder = 
builder.withSchema(schemaProvider.getTargetSchema().toString());
     }
     this.bootstrapConfig = builder.build();
-    LOG.info("Created bootstrap executor with configs : " + 
bootstrapConfig.getProps());
+    log.info("Created bootstrap executor with configs : " + 
bootstrapConfig.getProps());

Review Comment:
   We can apply parameterized logging



##########
hudi-spark-datasource/hudi-spark/src/test/java/HoodieJavaGenerateApp.java:
##########
@@ -132,7 +130,7 @@ private HoodieTestDataGenerator getDataGenerate() {
    */
   private DataFrameWriter<Row> updateHiveSyncConfig(DataFrameWriter<Row> 
writer) {
     if (enableHiveSync) {
-      LOG.info("Enabling Hive sync to " + hiveJdbcUrl);
+      log.info("Enabling Hive sync to " + hiveJdbcUrl);

Review Comment:
   Same here



##########
hudi-spark-datasource/hudi-spark/src/test/java/HoodieJavaGenerateApp.java:
##########
@@ -194,6 +192,6 @@ private void insert(SparkSession spark) throws IOException {
     writer.save(tablePath); // ultimately where the dataset will be placed
     FileSystem fs = FileSystem.get(jssc.hadoopConfiguration());
     String commitInstantTime1 = HoodieDataSourceHelpers.latestCommit(fs, 
tablePath);
-    LOG.info("Commit at instant time :" + commitInstantTime1);
+    log.info("Commit at instant time :" + commitInstantTime1);

Review Comment:
   Same here



##########
hudi-spark-datasource/hudi-spark/src/test/java/HoodieJavaApp.java:
##########
@@ -170,7 +168,7 @@ public void run() throws Exception {
     // new dataset if needed
     writer.save(tablePath); // ultimately where the dataset will be placed
     String commitInstantTime1 = HoodieDataSourceHelpers.latestCommit(fs, 
tablePath);
-    LOG.info("First commit at instant time :" + commitInstantTime1);
+    log.info("First commit at instant time :" + commitInstantTime1);

Review Comment:
   We can apply parameterized logging to some places in this class



##########
hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestHoodieBackedMetadata.java:
##########
@@ -2317,7 +2316,7 @@ public void testMultiWriterForDoubleLocking() throws 
Exception {
 
       // Ensure all commits were synced to the Metadata Table
       HoodieTableMetaClient metadataMetaClient = 
createMetaClient(metadataTableBasePath);
-      LOG.warn("total commits in metadata table " + 
metadataMetaClient.getActiveTimeline().getCommitsTimeline().countInstants());
+      log.warn("total commits in metadata table " + 
metadataMetaClient.getActiveTimeline().getCommitsTimeline().countInstants());

Review Comment:
   We can apply parameterized logging to some places in this class



##########
hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/cli/BootstrapExecutorUtils.java:
##########
@@ -302,7 +300,14 @@ private Map<String, Object> 
extractConfigsRelatedToTimestampBasedKeyGenerator(St
     return Collections.emptyMap();
   }
 
+  /**
+   * Configuration class for Bootstrap operations.
+   * Note: Explicit setters are used instead of Lombok's @Setter annotation 
because this class is accessed from Scala code
+   * (RunBootstrapProcedure.scala). Since Scala compilation happens before 
Java compilation in the Maven build lifecycle,
+   * Lombok-generated methods would not be visible to the Scala compiler, 
causing compilation errors.
+   */

Review Comment:
   Good catch!



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to