This is an automated email from the ASF dual-hosted git repository.
xushiyan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 2a08a65f71 [MINOR] Fix typos in Spark client related classes (#6204)
2a08a65f71 is described below
commit 2a08a65f719b5c155dde85a0dc318af5033c31d5
Author: Vander <[email protected]>
AuthorDate: Mon Jul 25 12:41:42 2022 +0800
[MINOR] Fix typos in Spark client related classes (#6204)
---
.../clustering/run/strategy/SingleSparkJobExecutionStrategy.java | 2 +-
.../org/apache/hudi/client/utils/SparkInternalSchemaConverter.java | 4 ++--
.../main/java/org/apache/hudi/client/utils/SparkValidatorUtils.java | 2 +-
.../org/apache/hudi/common/table/log/block/HoodieAvroDataBlock.java | 6 +++---
4 files changed, 7 insertions(+), 7 deletions(-)
diff --git
a/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/clustering/run/strategy/SingleSparkJobExecutionStrategy.java
b/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/clustering/run/strategy/SingleSparkJobExecutionStrategy.java
index 1158d0ada4..bb6d3df5f1 100644
---
a/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/clustering/run/strategy/SingleSparkJobExecutionStrategy.java
+++
b/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/clustering/run/strategy/SingleSparkJobExecutionStrategy.java
@@ -136,7 +136,7 @@ public abstract class SingleSparkJobExecutionStrategy<T
extends HoodieRecordPayl
/**
* Execute clustering to write inputRecords into new files as defined by
rules in strategy parameters.
* The number of new file groups created is bounded by numOutputGroups.
- * Note that commit is not done as part of strategy. commit is callers
responsibility.
+ * Note that commit is not done as part of strategy. Commit is callers
responsibility.
*/
public abstract Iterator<List<WriteStatus>>
performClusteringWithRecordsIterator(final Iterator<HoodieRecord<T>> records,
final int numOutputGroups,
final String instantTime,
diff --git
a/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkInternalSchemaConverter.java
b/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkInternalSchemaConverter.java
index 8e086c2927..098870a60a 100644
---
a/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkInternalSchemaConverter.java
+++
b/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkInternalSchemaConverter.java
@@ -81,7 +81,7 @@ public class SparkInternalSchemaConverter {
public static final String HOODIE_VALID_COMMITS_LIST =
"hoodie.valid.commits.list";
/**
- * Converts a spark schema to an hudi internal schema. Fields without IDs
are kept and assigned fallback IDs.
+ * Convert a spark schema to an hudi internal schema. Fields without IDs are
kept and assigned fallback IDs.
*
* @param sparkSchema a spark schema
* @return a matching internal schema for the provided spark schema
@@ -157,7 +157,7 @@ public class SparkInternalSchemaConverter {
}
/**
- * Converts Spark schema to Hudi internal schema, and prune fields.
+ * Convert Spark schema to Hudi internal schema, and prune fields.
* Fields without IDs are kept and assigned fallback IDs.
*
* @param sparkSchema a pruned spark schema
diff --git
a/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkValidatorUtils.java
b/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkValidatorUtils.java
index fd083f2c89..a6d03eae2b 100644
---
a/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkValidatorUtils.java
+++
b/hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/client/utils/SparkValidatorUtils.java
@@ -50,7 +50,7 @@ import java.util.stream.Stream;
import scala.collection.JavaConverters;
/**
- * Spark validator utils to verify and run any precommit validators configured.
+ * Spark validator utils to verify and run any pre-commit validators
configured.
*/
public class SparkValidatorUtils {
private static final Logger LOG =
LogManager.getLogger(BaseSparkCommitActionExecutor.class);
diff --git
a/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieAvroDataBlock.java
b/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieAvroDataBlock.java
index 491c6700c9..9e74d14c04 100644
---
a/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieAvroDataBlock.java
+++
b/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieAvroDataBlock.java
@@ -308,7 +308,7 @@ public class HoodieAvroDataBlock extends HoodieDataBlock {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream output = new DataOutputStream(baos);
- // 2. Compress and Write schema out
+ // 1. Compress and Write schema out
byte[] schemaContent = compress(schema.toString());
output.writeInt(schemaContent.length);
output.write(schemaContent);
@@ -318,10 +318,10 @@ public class HoodieAvroDataBlock extends HoodieDataBlock {
recordItr.forEachRemaining(records::add);
}
- // 3. Write total number of records
+ // 2. Write total number of records
output.writeInt(records.size());
- // 4. Write the records
+ // 3. Write the records
Iterator<IndexedRecord> itr = records.iterator();
while (itr.hasNext()) {
IndexedRecord s = itr.next();