This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git

commit 3343322965d4f379d74f6864a104b708cada3b21
Author: slothever <18522955+w...@users.noreply.github.com>
AuthorDate: Tue Mar 26 11:14:30 2024 +0800

    [fix](insert)fix conversion of doris type to hive type (#32735)
    
    #31442
    
    create table
    fix doris to hive type, use primitiveType to check doris type.
---
 .../datasource/hive/HiveMetaStoreClientHelper.java | 117 ++++++++++++------
 .../org/apache/doris/datasource/hive/HiveUtil.java |  95 +++++++++++++++
 .../datasource/hive/ThriftHMSCachedClient.java     |  94 +--------------
 .../plans/physical/PhysicalHiveTableSink.java      |   1 +
 .../datasource/hive/HiveDDLAndDMLPlanTest.java     | 131 +++++++++++++++++++++
 5 files changed, 310 insertions(+), 128 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
index 4c002275bde..3ebce966777 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
@@ -586,42 +586,89 @@ public class HiveMetaStoreClientHelper {
      * Convert doris type to hive type.
      */
     public static String dorisTypeToHiveType(Type dorisType) {
-        if (dorisType.equals(Type.BOOLEAN)) {
-            return "boolean";
-        } else if (dorisType.equals(Type.TINYINT)) {
-            return "tinyint";
-        } else if (dorisType.equals(Type.SMALLINT)) {
-            return "smallint";
-        } else if (dorisType.equals(Type.INT)) {
-            return "int";
-        } else if (dorisType.equals(Type.BIGINT)) {
-            return "bigint";
-        } else if (dorisType.equals(Type.DATE) || 
dorisType.equals(Type.DATEV2)) {
-            return "date";
-        } else if (dorisType.equals(Type.DATETIME) || 
dorisType.equals(Type.DATETIMEV2)) {
-            return "timestamp";
-        } else if (dorisType.equals(Type.FLOAT)) {
-            return "float";
-        } else if (dorisType.equals(Type.DOUBLE)) {
-            return "double";
-        } else if (dorisType.equals(Type.STRING)) {
-            return "string";
-        } else if (dorisType.equals(Type.DEFAULT_DECIMALV3)) {
-            StringBuilder decimalType = new StringBuilder();
-            decimalType.append("decimal");
-            ScalarType scalarType = (ScalarType) dorisType;
-            int precision = scalarType.getScalarPrecision();
-            if (precision == 0) {
-                precision = ScalarType.DEFAULT_PRECISION;
+        if (dorisType.isScalarType()) {
+            PrimitiveType primitiveType = dorisType.getPrimitiveType();
+            switch (primitiveType) {
+                case BOOLEAN:
+                    return "boolean";
+                case TINYINT:
+                    return "tinyint";
+                case SMALLINT:
+                    return "smallint";
+                case INT:
+                    return "int";
+                case BIGINT:
+                    return "bigint";
+                case DATEV2:
+                case DATE:
+                    return "date";
+                case DATETIMEV2:
+                case DATETIME:
+                    return "timestamp";
+                case FLOAT:
+                    return "float";
+                case DOUBLE:
+                    return "double";
+                case CHAR: {
+                    ScalarType scalarType = (ScalarType) dorisType;
+                    return "char(" + scalarType.getLength() + ")";
+                }
+                case VARCHAR:
+                case STRING:
+                    return "string";
+                case DECIMAL32:
+                case DECIMAL64:
+                case DECIMAL128:
+                case DECIMAL256:
+                case DECIMALV2: {
+                    StringBuilder decimalType = new StringBuilder();
+                    decimalType.append("decimal");
+                    ScalarType scalarType = (ScalarType) dorisType;
+                    int precision = scalarType.getScalarPrecision();
+                    if (precision == 0) {
+                        precision = ScalarType.DEFAULT_PRECISION;
+                    }
+                    // decimal(precision, scale)
+                    int scale = scalarType.getScalarScale();
+                    decimalType.append("(");
+                    decimalType.append(precision);
+                    decimalType.append(",");
+                    decimalType.append(scale);
+                    decimalType.append(")");
+                    return decimalType.toString();
+                }
+                default:
+                    throw new HMSClientException("Unsupported primitive type 
conversion of " + dorisType.toSql());
+            }
+        } else if (dorisType.isArrayType()) {
+            ArrayType dorisArray = (ArrayType) dorisType;
+            Type itemType = dorisArray.getItemType();
+            return "array<" + dorisTypeToHiveType(itemType) + ">";
+        } else if (dorisType.isMapType()) {
+            MapType dorisMap = (MapType) dorisType;
+            Type keyType = dorisMap.getKeyType();
+            Type valueType = dorisMap.getValueType();
+            return "map<"
+                    + dorisTypeToHiveType(keyType)
+                    + ","
+                    + dorisTypeToHiveType(valueType)
+                    + ">";
+        } else if (dorisType.isStructType()) {
+            StructType dorisStruct = (StructType) dorisType;
+            StringBuilder structType = new StringBuilder();
+            structType.append("struct<");
+            ArrayList<StructField> fields = dorisStruct.getFields();
+            for (int i = 0; i < fields.size(); i++) {
+                StructField field = fields.get(i);
+                structType.append(field.getName());
+                structType.append(":");
+                structType.append(dorisTypeToHiveType(field.getType()));
+                if (i != fields.size() - 1) {
+                    structType.append(",");
+                }
             }
-            // decimal(precision, scale)
-            int scale = scalarType.getScalarScale();
-            decimalType.append("(");
-            decimalType.append(precision);
-            decimalType.append(",");
-            decimalType.append(scale);
-            decimalType.append(")");
-            return decimalType.toString();
+            structType.append(">");
+            return structType.toString();
         }
         throw new HMSClientException("Unsupported type conversion of " + 
dorisType.toSql());
     }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveUtil.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveUtil.java
index eb107464bfc..cc834894076 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveUtil.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveUtil.java
@@ -17,6 +17,8 @@
 
 package org.apache.doris.datasource.hive;
 
+import org.apache.doris.catalog.Column;
+import org.apache.doris.common.Pair;
 import org.apache.doris.common.UserException;
 import org.apache.doris.fs.remote.BrokerFileSystem;
 import org.apache.doris.fs.remote.RemoteFileSystem;
@@ -24,8 +26,13 @@ import org.apache.doris.fs.remote.RemoteFileSystem;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat;
 import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat;
 import org.apache.hadoop.mapred.InputFormat;
@@ -36,8 +43,13 @@ import org.apache.hadoop.util.ReflectionUtils;
 import java.io.UnsupportedEncodingException;
 import java.net.URLDecoder;
 import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
 import java.util.stream.Collectors;
 
 /**
@@ -157,4 +169,87 @@ public final class HiveUtil {
         }
         return resultBuilder.build();
     }
+
+    public static Table toHiveTable(HiveTableMetadata hiveTable) {
+        Objects.requireNonNull(hiveTable.getDbName(), "Hive database name 
should be not null");
+        Objects.requireNonNull(hiveTable.getTableName(), "Hive table name 
should be not null");
+        Table table = new Table();
+        table.setDbName(hiveTable.getDbName());
+        table.setTableName(hiveTable.getTableName());
+        // table.setOwner("");
+        int createTime = (int) System.currentTimeMillis() * 1000;
+        table.setCreateTime(createTime);
+        table.setLastAccessTime(createTime);
+        // table.setRetention(0);
+        String location = 
hiveTable.getProperties().get(HiveMetadataOps.LOCATION_URI_KEY);
+        Set<String> partitionSet = new HashSet<>(hiveTable.getPartitionKeys());
+        Pair<List<FieldSchema>, List<FieldSchema>> hiveSchema = 
toHiveSchema(hiveTable.getColumns(), partitionSet);
+
+        table.setSd(toHiveStorageDesc(hiveSchema.first, 
hiveTable.getBucketCols(), hiveTable.getNumBuckets(),
+                hiveTable.getFileFormat(), location));
+        table.setPartitionKeys(hiveSchema.second);
+
+        // table.setViewOriginalText(hiveTable.getViewSql());
+        // table.setViewExpandedText(hiveTable.getViewSql());
+        table.setTableType("MANAGED_TABLE");
+        table.setParameters(hiveTable.getProperties());
+        return table;
+    }
+
+    private static StorageDescriptor toHiveStorageDesc(List<FieldSchema> 
columns,
+            List<String> bucketCols, int numBuckets, String fileFormat, String 
location) {
+        StorageDescriptor sd = new StorageDescriptor();
+        sd.setCols(columns);
+        setFileFormat(fileFormat, sd);
+        if (StringUtils.isNotEmpty(location)) {
+            sd.setLocation(location);
+        }
+        sd.setBucketCols(bucketCols);
+        sd.setNumBuckets(numBuckets);
+        Map<String, String> parameters = new HashMap<>();
+        parameters.put("tag", "doris external hive talbe");
+        sd.setParameters(parameters);
+        return sd;
+    }
+
+    private static void setFileFormat(String fileFormat, StorageDescriptor sd) 
{
+        String inputFormat;
+        String outputFormat;
+        String serDe;
+        if (fileFormat.equalsIgnoreCase("orc")) {
+            inputFormat = "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat";
+            outputFormat = "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat";
+            serDe = "org.apache.hadoop.hive.ql.io.orc.OrcSerde";
+        } else if (fileFormat.equalsIgnoreCase("parquet")) {
+            inputFormat = 
"org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat";
+            outputFormat = 
"'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat";
+            serDe = 
"org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe";
+        } else {
+            throw new IllegalArgumentException("Creating table with an 
unsupported file format: " + fileFormat);
+        }
+        SerDeInfo serDeInfo = new SerDeInfo();
+        serDeInfo.setSerializationLib(serDe);
+        sd.setSerdeInfo(serDeInfo);
+        sd.setInputFormat(inputFormat);
+        sd.setOutputFormat(outputFormat);
+    }
+
+    private static Pair<List<FieldSchema>, List<FieldSchema>> 
toHiveSchema(List<Column> columns,
+            Set<String> partitionSet) {
+        List<FieldSchema> hiveCols = new ArrayList<>();
+        List<FieldSchema> hiveParts = new ArrayList<>();
+        for (Column column : columns) {
+            FieldSchema hiveFieldSchema = new FieldSchema();
+            // TODO: add doc, just support doris type
+            
hiveFieldSchema.setType(HiveMetaStoreClientHelper.dorisTypeToHiveType(column.getType()));
+            hiveFieldSchema.setName(column.getName());
+            hiveFieldSchema.setComment(column.getComment());
+            if (partitionSet.contains(column.getName())) {
+                hiveParts.add(hiveFieldSchema);
+            } else {
+                hiveCols.add(hiveFieldSchema);
+            }
+        }
+        return Pair.of(hiveCols, hiveParts);
+    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java
index b5b1147447e..1f3d188ac6b 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java
@@ -18,9 +18,7 @@
 package org.apache.doris.datasource.hive;
 
 import org.apache.doris.analysis.TableName;
-import org.apache.doris.catalog.Column;
 import org.apache.doris.common.Config;
-import org.apache.doris.common.Pair;
 import org.apache.doris.datasource.DatabaseMetadata;
 import org.apache.doris.datasource.TableMetadata;
 import 
org.apache.doris.datasource.hive.event.MetastoreNotificationFetchException;
@@ -68,18 +66,14 @@ import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
 import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Objects;
 import java.util.Optional;
 import java.util.Queue;
-import java.util.Set;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
@@ -178,7 +172,7 @@ public class ThriftHMSCachedClient implements 
HMSCachedClient {
                 // String location,
                 if (tbl instanceof HiveTableMetadata) {
                     ugiDoAs(() -> {
-                        
client.client.createTable(toHiveTable((HiveTableMetadata) tbl));
+                        
client.client.createTable(HiveUtil.toHiveTable((HiveTableMetadata) tbl));
                         return null;
                     });
                 }
@@ -191,92 +185,6 @@ public class ThriftHMSCachedClient implements 
HMSCachedClient {
         }
     }
 
-    private static Table toHiveTable(HiveTableMetadata hiveTable) {
-        Objects.requireNonNull(hiveTable.getDbName(), "Hive database name 
should be not null");
-        Objects.requireNonNull(hiveTable.getTableName(), "Hive table name 
should be not null");
-        Table table = new Table();
-        table.setDbName(hiveTable.getDbName());
-        table.setTableName(hiveTable.getTableName());
-        // table.setOwner("");
-        int createTime = (int) System.currentTimeMillis() * 1000;
-        table.setCreateTime(createTime);
-        table.setLastAccessTime(createTime);
-        // table.setRetention(0);
-        String location = 
hiveTable.getProperties().get(HiveMetadataOps.LOCATION_URI_KEY);
-        Set<String> partitionSet = new HashSet<>(hiveTable.getPartitionKeys());
-        Pair<List<FieldSchema>, List<FieldSchema>> hiveSchema = 
toHiveSchema(hiveTable.getColumns(), partitionSet);
-
-        table.setSd(toHiveStorageDesc(hiveSchema.first, 
hiveTable.getBucketCols(), hiveTable.getNumBuckets(),
-                hiveTable.getFileFormat(), location));
-        table.setPartitionKeys(hiveSchema.second);
-
-        // table.setViewOriginalText(hiveTable.getViewSql());
-        // table.setViewExpandedText(hiveTable.getViewSql());
-        table.setTableType("MANAGED_TABLE");
-        table.setParameters(hiveTable.getProperties());
-        return table;
-    }
-
-    private static StorageDescriptor toHiveStorageDesc(List<FieldSchema> 
columns,
-                                                       List<String> bucketCols,
-                                                       int numBuckets,
-                                                       String fileFormat,
-                                                       String location) {
-        StorageDescriptor sd = new StorageDescriptor();
-        sd.setCols(columns);
-        setFileFormat(fileFormat, sd);
-        if (StringUtils.isNotEmpty(location)) {
-            sd.setLocation(location);
-        }
-        sd.setBucketCols(bucketCols);
-        sd.setNumBuckets(numBuckets);
-        Map<String, String> parameters = new HashMap<>();
-        parameters.put("tag", "doris external hive talbe");
-        sd.setParameters(parameters);
-        return sd;
-    }
-
-    private static void setFileFormat(String fileFormat, StorageDescriptor sd) 
{
-        String inputFormat;
-        String outputFormat;
-        String serDe;
-        if (fileFormat.equalsIgnoreCase("orc")) {
-            inputFormat = "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat";
-            outputFormat = "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat";
-            serDe = "org.apache.hadoop.hive.ql.io.orc.OrcSerde";
-        } else if (fileFormat.equalsIgnoreCase("parquet")) {
-            inputFormat = 
"org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat";
-            outputFormat = 
"'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat";
-            serDe = 
"org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe";
-        } else {
-            throw new IllegalArgumentException("Creating table with an 
unsupported file format: " + fileFormat);
-        }
-        SerDeInfo serDeInfo = new SerDeInfo();
-        serDeInfo.setSerializationLib(serDe);
-        sd.setSerdeInfo(serDeInfo);
-        sd.setInputFormat(inputFormat);
-        sd.setOutputFormat(outputFormat);
-    }
-
-    private static Pair<List<FieldSchema>, List<FieldSchema>> 
toHiveSchema(List<Column> columns,
-                Set<String> partitionSet) {
-        List<FieldSchema> hiveCols = new ArrayList<>();
-        List<FieldSchema> hiveParts = new ArrayList<>();
-        for (Column column : columns) {
-            FieldSchema hiveFieldSchema = new FieldSchema();
-            // TODO: add doc, just support doris type
-            
hiveFieldSchema.setType(HiveMetaStoreClientHelper.dorisTypeToHiveType(column.getType()));
-            hiveFieldSchema.setName(column.getName());
-            hiveFieldSchema.setComment(column.getComment());
-            if (partitionSet.contains(column.getName())) {
-                hiveParts.add(hiveFieldSchema);
-            } else {
-                hiveCols.add(hiveFieldSchema);
-            }
-        }
-        return Pair.of(hiveCols, hiveParts);
-    }
-
     @Override
     public void dropDatabase(String dbName) {
         try (ThriftHMSClient client = getClient()) {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalHiveTableSink.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalHiveTableSink.java
index 8a37bb71cc6..a670290a163 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalHiveTableSink.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalHiveTableSink.java
@@ -153,6 +153,7 @@ public class PhysicalHiveTableSink<CHILD_TYPE extends Plan> 
extends PhysicalTabl
                     columnIdx.add(i);
                 }
             }
+            // mapping partition id
             List<ExprId> exprIds = columnIdx.stream()
                     .map(idx -> child().getOutput().get(idx).getExprId())
                     .collect(Collectors.toList());
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
index b7e90218e10..8e2d436e219 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
@@ -22,10 +22,12 @@ import org.apache.doris.analysis.CreateDbStmt;
 import org.apache.doris.analysis.CreateTableStmt;
 import org.apache.doris.analysis.HashDistributionDesc;
 import org.apache.doris.analysis.SwitchStmt;
+import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.Env;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.ExceptionChecker;
 import org.apache.doris.common.FeConstants;
+import org.apache.doris.datasource.TableMetadata;
 import org.apache.doris.nereids.parser.NereidsParser;
 import org.apache.doris.nereids.trees.plans.commands.CreateTableCommand;
 import 
org.apache.doris.nereids.trees.plans.commands.insert.InsertIntoTableCommand;
@@ -36,6 +38,8 @@ import org.apache.doris.utframe.TestWithFeService;
 import mockit.Mock;
 import mockit.MockUp;
 import mockit.Mocked;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Table;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 
@@ -52,6 +56,8 @@ public class HiveDDLAndDMLPlanTest extends TestWithFeService {
     @Mocked
     private ThriftHMSCachedClient mockedHiveClient;
 
+    private List<FieldSchema> checkedHiveCols;
+
     @Override
     protected void runBeforeAll() throws Exception {
         connectContext.getSessionVariable().enableFallbackToOriginalPlanner = 
false;
@@ -101,6 +107,25 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
                     }
                 };
             }
+
+            @Mock
+            public void createTable(TableMetadata tbl, boolean ignoreIfExists) 
{
+                if (tbl instanceof HiveTableMetadata) {
+                    Table table = HiveUtil.toHiveTable((HiveTableMetadata) 
tbl);
+                    if (checkedHiveCols == null) {
+                        // if checkedHiveCols is null, skip column check
+                        return;
+                    }
+                    List<FieldSchema> fieldSchemas = table.getSd().getCols();
+                    Assertions.assertEquals(checkedHiveCols.size(), 
fieldSchemas.size());
+                    for (int i = 0; i < checkedHiveCols.size(); i++) {
+                        FieldSchema checkedCol = checkedHiveCols.get(i);
+                        FieldSchema actualCol = fieldSchemas.get(i);
+                        Assertions.assertEquals(checkedCol.getName(), 
actualCol.getName().toLowerCase());
+                        Assertions.assertEquals(checkedCol.getType(), 
actualCol.getType().toLowerCase());
+                    }
+                }
+            }
         };
         CreateDbStmt createDbStmt = new CreateDbStmt(true, mockedDbName, 
dbProps);
         Env.getCurrentEnv().createDb(createDbStmt);
@@ -369,4 +394,110 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
         LogicalPlan plan2 = nereidsParser.parseSingle(insertSql2);
         Assertions.assertTrue(plan2 instanceof InsertOverwriteTableCommand);
     }
+
+    @Test
+    public void testComplexTypeCreateTable() throws Exception {
+        checkedHiveCols = new ArrayList<>(); // init it to enable check
+        switchHive();
+        useDatabase(mockedDbName);
+        String createArrayTypeTable = "CREATE TABLE complex_type_array(\n"
+                + "  `col1` ARRAY<BOOLEAN> COMMENT 'col1',\n"
+                + "  `col2` ARRAY<INT(11)> COMMENT 'col2',\n"
+                + "  `col3` ARRAY<DECIMAL(6,4)> COMMENT 'col3',\n"
+                + "  `col4` ARRAY<CHAR(11)> COMMENT 'col4',\n"
+                + "  `col5` ARRAY<CHAR> COMMENT 'col5'\n"
+                + ")  ENGINE=hive\n"
+                + "PROPERTIES ('file_format'='orc')";
+        List<FieldSchema> checkArrayCols = new ArrayList<>();
+        checkArrayCols.add(new FieldSchema("col1", "array<boolean>", ""));
+        checkArrayCols.add(new FieldSchema("col2", "array<int>", ""));
+        checkArrayCols.add(new FieldSchema("col3", "array<decimal(6,4)>", ""));
+        checkArrayCols.add(new FieldSchema("col4", "array<char(11)>", ""));
+        checkArrayCols.add(new FieldSchema("col5", "array<char(1)>", ""));
+        resetCheckedColumns(checkArrayCols);
+
+        LogicalPlan plan = createTablesAndReturnPlans(true, 
createArrayTypeTable).get(0);
+        List<Column> columns = ((CreateTableCommand) 
plan).getCreateTableInfo().translateToLegacyStmt().getColumns();
+        Assertions.assertEquals(5, columns.size());
+        dropTable("complex_type_array", true);
+
+        String createMapTypeTable = "CREATE TABLE complex_type_map(\n"
+                + "  `col1` MAP<int,string> COMMENT 'col1',\n"
+                + "  `col2` MAP<string,double> COMMENT 'col2',\n"
+                + "  `col3` MAP<string,BOOLEAN> COMMENT 'col3',\n"
+                + "  `col4` MAP<BOOLEAN,BOOLEAN> COMMENT 'col4'\n"
+                + ")  ENGINE=hive\n"
+                + "PROPERTIES ('file_format'='orc')";
+        checkArrayCols = new ArrayList<>();
+        checkArrayCols.add(new FieldSchema("col1", "map<int,string>", ""));
+        checkArrayCols.add(new FieldSchema("col2", "map<string,double>", ""));
+        checkArrayCols.add(new FieldSchema("col3", "map<string,boolean>", ""));
+        checkArrayCols.add(new FieldSchema("col4", "map<boolean,boolean>", 
""));
+        resetCheckedColumns(checkArrayCols);
+
+        plan = createTablesAndReturnPlans(true, createMapTypeTable).get(0);
+        columns = ((CreateTableCommand) 
plan).getCreateTableInfo().translateToLegacyStmt().getColumns();
+        Assertions.assertEquals(4, columns.size());
+        dropTable("complex_type_map", true);
+
+        String createStructTypeTable = "CREATE TABLE complex_type_struct(\n"
+                + "  `col1` STRUCT<rates:ARRAY<double>,name:string> COMMENT 
'col1',\n"
+                + "  `col2` STRUCT<id:INT,age:TINYINT> COMMENT 'col2',\n"
+                + "  `col3` STRUCT<pre:DECIMAL(6,4)> COMMENT 'col3',\n"
+                + "  `col4` STRUCT<bul:BOOLEAN,buls:ARRAY<BOOLEAN>> COMMENT 
'col4'\n"
+                + ")  ENGINE=hive\n"
+                + "PROPERTIES ('file_format'='orc')";
+        checkArrayCols = new ArrayList<>();
+        checkArrayCols.add(new FieldSchema("col1", 
"struct<rates:array<double>,name:string>", ""));
+        checkArrayCols.add(new FieldSchema("col2", 
"struct<id:int,age:tinyint>", ""));
+        checkArrayCols.add(new FieldSchema("col3", "struct<pre:decimal(6,4)>", 
""));
+        checkArrayCols.add(new FieldSchema("col4", 
"struct<bul:boolean,buls:array<boolean>>", ""));
+        resetCheckedColumns(checkArrayCols);
+
+        plan = createTablesAndReturnPlans(true, createStructTypeTable).get(0);
+        columns = ((CreateTableCommand) 
plan).getCreateTableInfo().translateToLegacyStmt().getColumns();
+        Assertions.assertEquals(4, columns.size());
+        dropTable("complex_type_struct", true);
+
+        String compoundTypeTable1 = "CREATE TABLE complex_type_compound1(\n"
+                + "  `col1` ARRAY<MAP<string,double>> COMMENT 'col1',\n"
+                + "  `col2` 
ARRAY<STRUCT<name:string,gender:boolean,rate:decimal(3,1)>> COMMENT 'col2'\n"
+                + ")  ENGINE=hive\n"
+                + "PROPERTIES ('file_format'='orc')";
+        checkArrayCols = new ArrayList<>();
+        checkArrayCols.add(new FieldSchema("col1", 
"array<map<string,double>>", ""));
+        checkArrayCols.add(new FieldSchema("col2",
+                "array<struct<name:string,gender:boolean,rate:decimal(3,1)>>", 
""));
+        resetCheckedColumns(checkArrayCols);
+
+        plan = createTablesAndReturnPlans(true, compoundTypeTable1).get(0);
+        columns = ((CreateTableCommand) 
plan).getCreateTableInfo().translateToLegacyStmt().getColumns();
+        Assertions.assertEquals(2, columns.size());
+        dropTable("complex_type_compound1", true);
+
+        String compoundTypeTable2 = "CREATE TABLE complex_type_compound2(\n"
+                + "  `col1` MAP<string,ARRAY<double>> COMMENT 'col1',\n"
+                + "  `col2` MAP<string,ARRAY<MAP<int, string>>> COMMENT 
'col2',\n"
+                + "  `col3` MAP<string,MAP<int,double>> COMMENT 'col3',\n"
+                + "  `col4` 
MAP<bigint,STRUCT<name:string,gender:boolean,rate:decimal(3,1)>> COMMENT 
'col4'\n"
+                + ")  ENGINE=hive\n"
+                + "PROPERTIES ('file_format'='orc')";
+        checkArrayCols = new ArrayList<>();
+        checkArrayCols.add(new FieldSchema("col1", 
"map<string,array<double>>", ""));
+        checkArrayCols.add(new FieldSchema("col2", 
"map<string,array<map<int,string>>>", ""));
+        checkArrayCols.add(new FieldSchema("col3", 
"map<string,map<int,double>>", ""));
+        checkArrayCols.add(new FieldSchema("col4",
+                
"map<bigint,struct<name:string,gender:boolean,rate:decimal(3,1)>>", ""));
+        resetCheckedColumns(checkArrayCols);
+
+        plan = createTablesAndReturnPlans(true, compoundTypeTable2).get(0);
+        columns = ((CreateTableCommand) 
plan).getCreateTableInfo().translateToLegacyStmt().getColumns();
+        Assertions.assertEquals(4, columns.size());
+        dropTable("complex_type_compound2", true);
+    }
+
+    private void resetCheckedColumns(List<FieldSchema> checkArrayCols) {
+        checkedHiveCols.clear();
+        checkedHiveCols.addAll(checkArrayCols);
+    }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to