This is an automated email from the ASF dual-hosted git repository.

kirs pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel.git


The following commit(s) were added to refs/heads/dev by this push:
     new 3a2586f6d Decide table format using outputFormat in HiveSinkConfig 
#2303
3a2586f6d is described below

commit 3a2586f6dc007ac465588c9f4e04dd237064ca5f
Author: Xiao Zhao <[email protected]>
AuthorDate: Mon Aug 1 17:06:45 2022 +0800

    Decide table format using outputFormat in HiveSinkConfig #2303
---
 .../seatunnel/connectors/seatunnel/hive/sink/HiveSinkConfig.java    | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git 
a/seatunnel-connectors-v2/connector-hive/src/main/java/org/apache/seatunnel/connectors/seatunnel/hive/sink/HiveSinkConfig.java
 
b/seatunnel-connectors-v2/connector-hive/src/main/java/org/apache/seatunnel/connectors/seatunnel/hive/sink/HiveSinkConfig.java
index f4c1c175b..837237b19 100644
--- 
a/seatunnel-connectors-v2/connector-hive/src/main/java/org/apache/seatunnel/connectors/seatunnel/hive/sink/HiveSinkConfig.java
+++ 
b/seatunnel-connectors-v2/connector-hive/src/main/java/org/apache/seatunnel/connectors/seatunnel/hive/sink/HiveSinkConfig.java
@@ -92,13 +92,13 @@ public class HiveSinkConfig implements Serializable {
 
         try {
             table = hiveMetaStoreClient.getTable(dbName, tableName);
-            String inputFormat = table.getSd().getInputFormat();
+            String outputFormat = table.getSd().getOutputFormat();
             Map<String, String> parameters = 
table.getSd().getSerdeInfo().getParameters();
-            if 
("org.apache.hadoop.mapred.TextInputFormat".equals(inputFormat)) {
+            if 
("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat".equals(outputFormat))
 {
                 config = config.withValue(FILE_FORMAT, 
ConfigValueFactory.fromAnyRef(FileFormat.TEXT.toString()))
                         .withValue(FIELD_DELIMITER, 
ConfigValueFactory.fromAnyRef(parameters.get("field.delim")))
                         .withValue(ROW_DELIMITER, 
ConfigValueFactory.fromAnyRef(parameters.get("line.delim")));
-            } else if 
("org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat".equals(inputFormat))
 {
+            } else if 
("org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat".equals(outputFormat))
 {
                 config = config.withValue(FILE_FORMAT, 
ConfigValueFactory.fromAnyRef(FileFormat.PARQUET.toString()));
             } else {
                 throw new RuntimeException("Only support text or parquet file 
now");

Reply via email to