This is an automated email from the ASF dual-hosted git repository.
danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 1cc20882667 [HUDI-6945] Fix HoodieRowDataParquetWriter cast issue
(#9868)
1cc20882667 is described below
commit 1cc20882667d62e0e3846d36b7cf168c255cf79e
Author: YueZhang <[email protected]>
AuthorDate: Mon Oct 16 19:04:18 2023 +0800
[HUDI-6945] Fix HoodieRowDataParquetWriter cast issue (#9868)
---
.../io/storage/row/HoodieRowDataParquetWriter.java | 37 ++++------------------
1 file changed, 6 insertions(+), 31 deletions(-)
diff --git
a/hudi-client/hudi-flink-client/src/main/java/org/apache/hudi/io/storage/row/HoodieRowDataParquetWriter.java
b/hudi-client/hudi-flink-client/src/main/java/org/apache/hudi/io/storage/row/HoodieRowDataParquetWriter.java
index 17b3b6b37cf..099b0224791 100644
---
a/hudi-client/hudi-flink-client/src/main/java/org/apache/hudi/io/storage/row/HoodieRowDataParquetWriter.java
+++
b/hudi-client/hudi-flink-client/src/main/java/org/apache/hudi/io/storage/row/HoodieRowDataParquetWriter.java
@@ -18,46 +18,26 @@
package org.apache.hudi.io.storage.row;
-import org.apache.hudi.common.fs.FSUtils;
-import org.apache.hudi.common.fs.HoodieWrapperFileSystem;
-import org.apache.hudi.io.storage.HoodieParquetConfig;
-
import org.apache.flink.table.data.RowData;
import org.apache.hadoop.fs.Path;
-import org.apache.parquet.hadoop.ParquetFileWriter;
-import org.apache.parquet.hadoop.ParquetWriter;
+import org.apache.hudi.io.storage.HoodieBaseParquetWriter;
+import org.apache.hudi.io.storage.HoodieParquetConfig;
import java.io.IOException;
/**
- * Parquet's impl of {@link HoodieRowDataFileWriter} to write {@link RowData}s.
+ * Parquet's impl of {@link HoodieRowDataFileWriter} to write fink {@link
RowData}s.
*/
-public class HoodieRowDataParquetWriter extends ParquetWriter<RowData>
+public class HoodieRowDataParquetWriter extends
HoodieBaseParquetWriter<RowData>
implements HoodieRowDataFileWriter {
- private final Path file;
- private final HoodieWrapperFileSystem fs;
- private final long maxFileSize;
private final HoodieRowDataParquetWriteSupport writeSupport;
public HoodieRowDataParquetWriter(Path file,
HoodieParquetConfig<HoodieRowDataParquetWriteSupport> parquetConfig)
throws IOException {
- super(HoodieWrapperFileSystem.convertToHoodiePath(file,
parquetConfig.getHadoopConf()),
- ParquetFileWriter.Mode.CREATE, parquetConfig.getWriteSupport(),
parquetConfig.getCompressionCodecName(),
- parquetConfig.getBlockSize(), parquetConfig.getPageSize(),
parquetConfig.getPageSize(),
- DEFAULT_IS_DICTIONARY_ENABLED, DEFAULT_IS_VALIDATING_ENABLED,
- DEFAULT_WRITER_VERSION, FSUtils.registerFileSystem(file,
parquetConfig.getHadoopConf()));
- this.file = HoodieWrapperFileSystem.convertToHoodiePath(file,
parquetConfig.getHadoopConf());
- this.fs = (HoodieWrapperFileSystem)
this.file.getFileSystem(FSUtils.registerFileSystem(file,
- parquetConfig.getHadoopConf()));
- this.maxFileSize = parquetConfig.getMaxFileSize()
- + Math.round(parquetConfig.getMaxFileSize() *
parquetConfig.getCompressionRatio());
- this.writeSupport = parquetConfig.getWriteSupport();
- }
+ super(file, parquetConfig);
- @Override
- public boolean canWrite() {
- return fs.getBytesWritten(file) < maxFileSize;
+ this.writeSupport = parquetConfig.getWriteSupport();
}
@Override
@@ -70,9 +50,4 @@ public class HoodieRowDataParquetWriter extends
ParquetWriter<RowData>
public void writeRow(RowData row) throws IOException {
super.write(row);
}
-
- @Override
- public void close() throws IOException {
- super.close();
- }
}