This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git

commit 01b21da82d85a3af451c6772942396577b147030
Author: slothever <18522955+w...@users.noreply.github.com>
AuthorDate: Sat Mar 30 22:47:24 2024 +0800

    [feature](insert)add hive insert plan ut and remove redundant fields 
(#33051)
    
    add hive insert sink plan UT case
    remove some deprecated code
---
 .../DistributionSpecTableSinkHashPartitioned.java  |   3 -
 .../doris/nereids/rules/analysis/BindSink.java     |   7 -
 ...ogicalHiveTableSinkToPhysicalHiveTableSink.java |   3 +-
 .../commands/insert/InsertIntoTableCommand.java    |   2 +-
 .../trees/plans/logical/LogicalHiveTableSink.java  |  19 +--
 .../plans/physical/PhysicalHiveTableSink.java      |  24 +--
 .../datasource/hive/HiveDDLAndDMLPlanTest.java     | 166 ++++++++++++++++++---
 7 files changed, 161 insertions(+), 63 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/properties/DistributionSpecTableSinkHashPartitioned.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/properties/DistributionSpecTableSinkHashPartitioned.java
index 4333bd956ee..e9fc32c1fd6 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/properties/DistributionSpecTableSinkHashPartitioned.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/properties/DistributionSpecTableSinkHashPartitioned.java
@@ -26,9 +26,6 @@ import java.util.List;
  */
 public class DistributionSpecTableSinkHashPartitioned extends DistributionSpec 
{
 
-    public static final DistributionSpecTableSinkHashPartitioned INSTANCE =
-            new DistributionSpecTableSinkHashPartitioned();
-
     private List<ExprId> outputColExprIds;
 
     public DistributionSpecTableSinkHashPartitioned() {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
index 20f05729822..2400337d50e 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
@@ -68,12 +68,10 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
 
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
-import java.util.Set;
 import java.util.stream.Collectors;
 
 /**
@@ -389,15 +387,10 @@ public class BindSink implements AnalysisRuleFactory {
                 return column;
             }).collect(ImmutableList.toImmutableList());
         }
-        Set<String> hivePartitionKeys = table.getRemoteTable()
-                .getPartitionKeys().stream()
-                .map(FieldSchema::getName)
-                .collect(Collectors.toSet());
         LogicalHiveTableSink<?> boundSink = new LogicalHiveTableSink<>(
                 database,
                 table,
                 bindColumns,
-                hivePartitionKeys,
                 child.getOutput().stream()
                         .map(NamedExpression.class::cast)
                         .collect(ImmutableList.toImmutableList()),
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalHiveTableSinkToPhysicalHiveTableSink.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalHiveTableSinkToPhysicalHiveTableSink.java
index f2128658616..153216d6ac7 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalHiveTableSinkToPhysicalHiveTableSink.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalHiveTableSinkToPhysicalHiveTableSink.java
@@ -42,8 +42,7 @@ public class LogicalHiveTableSinkToPhysicalHiveTableSink 
extends OneImplementati
                     sink.getLogicalProperties(),
                     null,
                     null,
-                    sink.child(),
-                    sink.getHivePartitionKeys());
+                    sink.child());
         
}).toRule(RuleType.LOGICAL_HIVE_TABLE_SINK_TO_PHYSICAL_HIVE_TABLE_SINK_RULE);
     }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java
index 3cfd40cabb6..61024345c06 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java
@@ -76,7 +76,7 @@ public class InsertIntoTableCommand extends Command 
implements ForwardWithSync,
      * constructor
      */
     public InsertIntoTableCommand(LogicalPlan logicalQuery, Optional<String> 
labelName,
-            Optional<InsertCommandContext> insertCtx) {
+                                  Optional<InsertCommandContext> insertCtx) {
         super(PlanType.INSERT_INTO_TABLE_COMMAND);
         this.logicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery 
should not be null");
         this.labelName = Objects.requireNonNull(labelName, "labelName should 
not be null");
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalHiveTableSink.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalHiveTableSink.java
index 360d227b0f0..147f14e5e69 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalHiveTableSink.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalHiveTableSink.java
@@ -37,7 +37,6 @@ import com.google.common.collect.ImmutableList;
 import java.util.List;
 import java.util.Objects;
 import java.util.Optional;
-import java.util.Set;
 
 /**
  * logical hive table sink for insert command
@@ -47,7 +46,6 @@ public class LogicalHiveTableSink<CHILD_TYPE extends Plan> 
extends LogicalTableS
     // bound data sink
     private final HMSExternalDatabase database;
     private final HMSExternalTable targetTable;
-    private final Set<String> hivePartitionKeys;
     private final DMLCommandType dmlCommandType;
 
     /**
@@ -56,7 +54,6 @@ public class LogicalHiveTableSink<CHILD_TYPE extends Plan> 
extends LogicalTableS
     public LogicalHiveTableSink(HMSExternalDatabase database,
                                 HMSExternalTable targetTable,
                                 List<Column> cols,
-                                Set<String> hivePartitionKeys,
                                 List<NamedExpression> outputExprs,
                                 DMLCommandType dmlCommandType,
                                 Optional<GroupExpression> groupExpression,
@@ -66,26 +63,25 @@ public class LogicalHiveTableSink<CHILD_TYPE extends Plan> 
extends LogicalTableS
         this.database = Objects.requireNonNull(database, "database != null in 
LogicalHiveTableSink");
         this.targetTable = Objects.requireNonNull(targetTable, "targetTable != 
null in LogicalHiveTableSink");
         this.dmlCommandType = dmlCommandType;
-        this.hivePartitionKeys = hivePartitionKeys;
     }
 
     public Plan withChildAndUpdateOutput(Plan child) {
         List<NamedExpression> output = child.getOutput().stream()
                 .map(NamedExpression.class::cast)
                 .collect(ImmutableList.toImmutableList());
-        return new LogicalHiveTableSink<>(database, targetTable, cols, 
hivePartitionKeys, output,
+        return new LogicalHiveTableSink<>(database, targetTable, cols, output,
                 dmlCommandType, Optional.empty(), Optional.empty(), child);
     }
 
     @Override
     public Plan withChildren(List<Plan> children) {
         Preconditions.checkArgument(children.size() == 1, 
"LogicalHiveTableSink only accepts one child");
-        return new LogicalHiveTableSink<>(database, targetTable, cols, 
hivePartitionKeys, outputExprs,
+        return new LogicalHiveTableSink<>(database, targetTable, cols, 
outputExprs,
                 dmlCommandType, Optional.empty(), Optional.empty(), 
children.get(0));
     }
 
     public LogicalHiveTableSink<CHILD_TYPE> 
withOutputExprs(List<NamedExpression> outputExprs) {
-        return new LogicalHiveTableSink<>(database, targetTable, cols, 
hivePartitionKeys, outputExprs,
+        return new LogicalHiveTableSink<>(database, targetTable, cols, 
outputExprs,
                 dmlCommandType, Optional.empty(), Optional.empty(), child());
     }
 
@@ -97,10 +93,6 @@ public class LogicalHiveTableSink<CHILD_TYPE extends Plan> 
extends LogicalTableS
         return targetTable;
     }
 
-    public Set<String> getHivePartitionKeys() {
-        return hivePartitionKeys;
-    }
-
     public DMLCommandType getDmlCommandType() {
         return dmlCommandType;
     }
@@ -134,7 +126,6 @@ public class LogicalHiveTableSink<CHILD_TYPE extends Plan> 
extends LogicalTableS
                 "database", database.getFullName(),
                 "targetTable", targetTable.getName(),
                 "cols", cols,
-                "hivePartitionKeys", hivePartitionKeys,
                 "dmlCommandType", dmlCommandType
         );
     }
@@ -146,14 +137,14 @@ public class LogicalHiveTableSink<CHILD_TYPE extends 
Plan> extends LogicalTableS
 
     @Override
     public Plan withGroupExpression(Optional<GroupExpression> groupExpression) 
{
-        return new LogicalHiveTableSink<>(database, targetTable, cols, 
hivePartitionKeys, outputExprs,
+        return new LogicalHiveTableSink<>(database, targetTable, cols, 
outputExprs,
                 dmlCommandType, groupExpression, 
Optional.of(getLogicalProperties()), child());
     }
 
     @Override
     public Plan withGroupExprLogicalPropChildren(Optional<GroupExpression> 
groupExpression,
             Optional<LogicalProperties> logicalProperties, List<Plan> 
children) {
-        return new LogicalHiveTableSink<>(database, targetTable, cols, 
hivePartitionKeys, outputExprs,
+        return new LogicalHiveTableSink<>(database, targetTable, cols, 
outputExprs,
                 dmlCommandType, groupExpression, logicalProperties, 
children.get(0));
     }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalHiveTableSink.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalHiveTableSink.java
index a670290a163..58141e61bf8 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalHiveTableSink.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalHiveTableSink.java
@@ -35,7 +35,6 @@ import org.apache.doris.nereids.util.Utils;
 import org.apache.doris.statistics.Statistics;
 
 import com.google.common.collect.ImmutableList;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -50,7 +49,6 @@ public class PhysicalHiveTableSink<CHILD_TYPE extends Plan> 
extends PhysicalTabl
     private final HMSExternalDatabase database;
     private final HMSExternalTable targetTable;
     private final List<Column> cols;
-    private final Set<String> hivePartitionKeys;
 
     /**
      * constructor
@@ -61,10 +59,9 @@ public class PhysicalHiveTableSink<CHILD_TYPE extends Plan> 
extends PhysicalTabl
                                  List<NamedExpression> outputExprs,
                                  Optional<GroupExpression> groupExpression,
                                  LogicalProperties logicalProperties,
-                                 CHILD_TYPE child,
-                                 Set<String> hivePartitionKeys) {
+                                 CHILD_TYPE child) {
         this(database, targetTable, cols, outputExprs, groupExpression, 
logicalProperties,
-                PhysicalProperties.GATHER, null, child, hivePartitionKeys);
+                PhysicalProperties.GATHER, null, child);
     }
 
     /**
@@ -78,14 +75,12 @@ public class PhysicalHiveTableSink<CHILD_TYPE extends Plan> 
extends PhysicalTabl
                                  LogicalProperties logicalProperties,
                                  PhysicalProperties physicalProperties,
                                  Statistics statistics,
-                                 CHILD_TYPE child,
-                                 Set<String> hivePartitionKeys) {
+                                 CHILD_TYPE child) {
         super(PlanType.PHYSICAL_HIVE_TABLE_SINK, outputExprs, groupExpression,
                 logicalProperties, physicalProperties, statistics, child);
         this.database = Objects.requireNonNull(database, "database != null in 
PhysicalHiveTableSink");
         this.targetTable = Objects.requireNonNull(targetTable, "targetTable != 
null in PhysicalHiveTableSink");
         this.cols = Utils.copyRequiredList(cols);
-        this.hivePartitionKeys = hivePartitionKeys;
     }
 
     public HMSExternalDatabase getDatabase() {
@@ -103,7 +98,7 @@ public class PhysicalHiveTableSink<CHILD_TYPE extends Plan> 
extends PhysicalTabl
     @Override
     public Plan withChildren(List<Plan> children) {
         return new PhysicalHiveTableSink<>(database, targetTable, cols, 
outputExprs, groupExpression,
-                getLogicalProperties(), physicalProperties, statistics, 
children.get(0), hivePartitionKeys);
+                getLogicalProperties(), physicalProperties, statistics, 
children.get(0));
     }
 
     @Override
@@ -119,20 +114,20 @@ public class PhysicalHiveTableSink<CHILD_TYPE extends 
Plan> extends PhysicalTabl
     @Override
     public Plan withGroupExpression(Optional<GroupExpression> groupExpression) 
{
         return new PhysicalHiveTableSink<>(database, targetTable, cols, 
outputExprs,
-                groupExpression, getLogicalProperties(), child(), 
hivePartitionKeys);
+                groupExpression, getLogicalProperties(), child());
     }
 
     @Override
     public Plan withGroupExprLogicalPropChildren(Optional<GroupExpression> 
groupExpression,
                                                  Optional<LogicalProperties> 
logicalProperties, List<Plan> children) {
         return new PhysicalHiveTableSink<>(database, targetTable, cols, 
outputExprs,
-                groupExpression, logicalProperties.get(), children.get(0), 
hivePartitionKeys);
+                groupExpression, logicalProperties.get(), children.get(0));
     }
 
     @Override
     public PhysicalPlan withPhysicalPropertiesAndStats(PhysicalProperties 
physicalProperties, Statistics statistics) {
         return new PhysicalHiveTableSink<>(database, targetTable, cols, 
outputExprs,
-                groupExpression, getLogicalProperties(), physicalProperties, 
statistics, child(), hivePartitionKeys);
+                groupExpression, getLogicalProperties(), physicalProperties, 
statistics, child());
     }
 
     /**
@@ -140,10 +135,7 @@ public class PhysicalHiveTableSink<CHILD_TYPE extends 
Plan> extends PhysicalTabl
      */
     @Override
     public PhysicalProperties getRequirePhysicalProperties() {
-        Set<String> hivePartitionKeys = targetTable.getRemoteTable()
-                .getPartitionKeys().stream()
-                .map(FieldSchema::getName)
-                .collect(Collectors.toSet());
+        Set<String> hivePartitionKeys = targetTable.getPartitionColumnNames();
         if (!hivePartitionKeys.isEmpty()) {
             List<Integer> columnIdx = new ArrayList<>();
             List<Column> fullSchema = targetTable.getFullSchema();
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
index 9d51bf6005f..8247bd84b4d 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
@@ -24,15 +24,28 @@ import org.apache.doris.analysis.HashDistributionDesc;
 import org.apache.doris.analysis.SwitchStmt;
 import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.Env;
+import org.apache.doris.catalog.PrimitiveType;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.ExceptionChecker;
 import org.apache.doris.common.FeConstants;
 import org.apache.doris.datasource.TableMetadata;
+import org.apache.doris.nereids.NereidsPlanner;
+import org.apache.doris.nereids.StatementContext;
 import org.apache.doris.nereids.parser.NereidsParser;
+import 
org.apache.doris.nereids.properties.DistributionSpecTableSinkHashPartitioned;
+import 
org.apache.doris.nereids.properties.DistributionSpecTableSinkRandomPartitioned;
+import org.apache.doris.nereids.properties.PhysicalProperties;
+import org.apache.doris.nereids.trees.plans.Plan;
+import org.apache.doris.nereids.trees.plans.PlanType;
 import org.apache.doris.nereids.trees.plans.commands.CreateTableCommand;
 import 
org.apache.doris.nereids.trees.plans.commands.insert.InsertIntoTableCommand;
 import 
org.apache.doris.nereids.trees.plans.commands.insert.InsertOverwriteTableCommand;
 import org.apache.doris.nereids.trees.plans.logical.LogicalPlan;
+import org.apache.doris.nereids.trees.plans.logical.UnboundLogicalSink;
+import org.apache.doris.nereids.trees.plans.physical.PhysicalDistribute;
+import org.apache.doris.nereids.trees.plans.physical.PhysicalHiveTableSink;
+import org.apache.doris.nereids.trees.plans.physical.PhysicalPlan;
+import org.apache.doris.nereids.util.MemoTestUtils;
 import org.apache.doris.utframe.TestWithFeService;
 
 import mockit.Mock;
@@ -45,9 +58,11 @@ import org.junit.jupiter.api.Test;
 
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
+import java.util.Set;
 
 public class HiveDDLAndDMLPlanTest extends TestWithFeService {
     private static final String mockedDbName = "mockedDb";
@@ -61,7 +76,6 @@ public class HiveDDLAndDMLPlanTest extends TestWithFeService {
     @Override
     protected void runBeforeAll() throws Exception {
         connectContext.getSessionVariable().enableFallbackToOriginalPlanner = 
false;
-        connectContext.getSessionVariable().enableNereidsTimeout = false;
         connectContext.getSessionVariable().enableNereidsDML = true;
         Config.enable_query_hive_views = false;
         // create test internal table
@@ -90,7 +104,7 @@ public class HiveDDLAndDMLPlanTest extends TestWithFeService 
{
 
         // create external catalog and switch it
         CreateCatalogStmt hiveCatalog = createStmt("create catalog hive 
properties('type' = 'hms',"
-                        + " 'hive.metastore.uris' = 
'thrift://192.168.0.1:9083');");
+                + " 'hive.metastore.uris' = 'thrift://192.168.0.1:9083');");
         Env.getCurrentEnv().getCatalogMgr().createCatalog(hiveCatalog);
         switchHive();
 
@@ -366,32 +380,144 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
         Assertions.assertEquals(16, stmt2.getDistributionDesc().getBuckets());
     }
 
+    private static void mockTargetTable(List<Column> schema, Set<String> 
partNames) {
+        new MockUp<HMSExternalTable>(HMSExternalTable.class) {
+            @Mock
+            public boolean isView() {
+                return false;
+            }
+
+            @Mock
+            public List<Column> getFullSchema() {
+                return schema;
+            }
+
+            @Mock
+            public Set<String> getPartitionColumnNames() {
+                return partNames;
+            }
+        };
+    }
+
     @Test
     public void testInsertIntoPlanSql() throws Exception {
         switchHive();
         useDatabase(mockedDbName);
-        String insertSql = "INSERT INTO unpart_ctas_src values(1, 'v1')";
-        LogicalPlan plan = nereidsParser.parseSingle(insertSql);
-        Assertions.assertTrue(plan instanceof InsertIntoTableCommand);
-        // TODO check plan node, exchange node
+        String insertTable = "insert_table";
+        createTargetTable(insertTable);
+
+        // test un-partitioned table
+        List<Column> schema = new ArrayList<Column>() {
+            {
+                add(new Column("col1", PrimitiveType.INT));
+                add(new Column("col2", PrimitiveType.STRING));
+                add(new Column("col3", PrimitiveType.DECIMAL32));
+                add(new Column("col4", PrimitiveType.CHAR));
+            }
+        };
+
+        mockTargetTable(schema, new HashSet<>());
+        String unPartTargetTable = "unpart_" + insertTable;
+        String insertSql = "INSERT INTO " + unPartTargetTable + " values(1, 
'v1', 32.1, 'aabb')";
+        PhysicalPlan physicalSink = getPhysicalPlan(insertSql, 
PhysicalProperties.SINK_RANDOM_PARTITIONED,
+                false);
+        checkUnpartTableSinkPlan(schema, unPartTargetTable, physicalSink);
+
+        String insertOverwriteSql = "INSERT OVERWRITE TABLE " + 
unPartTargetTable + " values(1, 'v1', 32.1, 'aabb')";
+        PhysicalPlan physicalOverwriteSink = 
getPhysicalPlan(insertOverwriteSql, PhysicalProperties.SINK_RANDOM_PARTITIONED,
+                true);
+        checkUnpartTableSinkPlan(schema, unPartTargetTable, 
physicalOverwriteSink);
+
+        // test partitioned table
+        schema = new ArrayList<Column>() {
+            {
+                add(new Column("col1", PrimitiveType.INT));
+                add(new Column("pt1", PrimitiveType.VARCHAR));
+                add(new Column("pt2", PrimitiveType.STRING));
+                add(new Column("pt3", PrimitiveType.DATE));
+            }
+        };
+        Set<String> parts = new HashSet<String>() {
+            {
+                add("pt1");
+                add("pt2");
+                add("pt3");
+            }
+        };
+        mockTargetTable(schema, parts);
+        String partTargetTable = "part_" + insertTable;
+
+        String insertSql2 = "INSERT INTO " + partTargetTable + " values(1, 
'v1', 'v2', '2020-03-13')";
+        PhysicalPlan physicalSink2 = getPhysicalPlan(insertSql2,
+                new PhysicalProperties(new 
DistributionSpecTableSinkHashPartitioned()), false);
+        checkPartTableSinkPlan(schema, partTargetTable, physicalSink2);
+
+        String insertOverwrite2 = "INSERT OVERWRITE TABLE " + partTargetTable 
+ " values(1, 'v1', 'v2', '2020-03-13')";
+        PhysicalPlan physicalOverwriteSink2 = getPhysicalPlan(insertOverwrite2,
+                new PhysicalProperties(new 
DistributionSpecTableSinkHashPartitioned()), true);
+        checkPartTableSinkPlan(schema, partTargetTable, 
physicalOverwriteSink2);
+    }
 
-        String insertSql2 = "INSERT INTO part_ctas_src values(1, 'v1', 'v2')";
-        LogicalPlan plan2 = nereidsParser.parseSingle(insertSql2);
-        Assertions.assertTrue(plan2 instanceof InsertIntoTableCommand);
+    private static void checkUnpartTableSinkPlan(List<Column> schema, String 
unPartTargetTable, PhysicalPlan physicalSink) {
+        Assertions.assertSame(physicalSink.getType(), 
PlanType.PHYSICAL_DISTRIBUTE);
+        // check exchange
+        PhysicalDistribute<?> distribute = (PhysicalDistribute<?>) 
physicalSink;
+        Assertions.assertTrue(distribute.getDistributionSpec() instanceof 
DistributionSpecTableSinkRandomPartitioned);
+        Assertions.assertSame(distribute.child(0).getType(), 
PlanType.PHYSICAL_HIVE_TABLE_SINK);
+        // check sink
+        PhysicalHiveTableSink<?> physicalHiveSink = (PhysicalHiveTableSink<?>) 
physicalSink.child(0);
+        Assertions.assertEquals(unPartTargetTable, 
physicalHiveSink.getTargetTable().getName());
+        Assertions.assertEquals(schema.size(), 
physicalHiveSink.getOutput().size());
     }
 
-    @Test
-    public void testInsertOverwritePlanSql() throws Exception {
-        switchHive();
-        useDatabase(mockedDbName);
-        String insertSql = "INSERT OVERWRITE TABLE unpart_ctas_src values(2, 
'v2')";
-        LogicalPlan plan = nereidsParser.parseSingle(insertSql);
-        Assertions.assertTrue(plan instanceof InsertOverwriteTableCommand);
-        // TODO check plan node, exchange node
+    private static void checkPartTableSinkPlan(List<Column> schema, String 
unPartTargetTable, PhysicalPlan physicalSink) {
+        Assertions.assertSame(physicalSink.getType(), 
PlanType.PHYSICAL_DISTRIBUTE);
+        // check exchange
+        PhysicalDistribute<?> distribute2 = (PhysicalDistribute<?>) 
physicalSink;
+        Assertions.assertTrue(distribute2.getDistributionSpec() instanceof 
DistributionSpecTableSinkHashPartitioned);
+        Assertions.assertSame(distribute2.child(0).getType(), 
PlanType.PHYSICAL_HIVE_TABLE_SINK);
+        // check sink
+        PhysicalHiveTableSink<?> physicalHiveSink2 = 
(PhysicalHiveTableSink<?>) physicalSink.child(0);
+        Assertions.assertEquals(unPartTargetTable, 
physicalHiveSink2.getTargetTable().getName());
+        Assertions.assertEquals(schema.size(), 
physicalHiveSink2.getOutput().size());
+    }
 
-        String insertSql2 = "INSERT OVERWRITE TABLE part_ctas_src values(2, 
'v3', 'v4')";
-        LogicalPlan plan2 = nereidsParser.parseSingle(insertSql2);
-        Assertions.assertTrue(plan2 instanceof InsertOverwriteTableCommand);
+    private void createTargetTable(String tableName) throws Exception {
+        String createInsertTable = "CREATE TABLE `unpart_" + tableName + "`(\n"
+                + "  `col1` INT COMMENT 'col1',\n"
+                + "  `col2` STRING COMMENT 'col2',\n"
+                + "  `col3` DECIMAL(3,1) COMMENT 'col3',\n"
+                + "  `col4` CHAR(11) COMMENT 'col4'\n"
+                + ")  ENGINE=hive\n"
+                + "PROPERTIES ('file_format'='orc')";
+        createTable(createInsertTable, true);
+
+        String createInsertPTable = "CREATE TABLE `part_" + tableName + "`(\n"
+                + "  `col1` INT COMMENT 'col1',\n"
+                + "  `pt1` VARCHAR(16) COMMENT 'pt1',\n"
+                + "  `pt2` STRING COMMENT 'pt2',\n"
+                + "  `pt3` DATE COMMENT 'pt3'\n"
+                + ")  ENGINE=hive\n"
+                + "PARTITION BY LIST (pt1, pt2, pt3) ()\n"
+                + "PROPERTIES ('file_format'='orc')";
+        createTable(createInsertPTable, true);
+    }
+
+    private PhysicalPlan getPhysicalPlan(String insertSql, PhysicalProperties 
physicalProperties,
+                                         boolean isOverwrite) {
+        LogicalPlan plan = nereidsParser.parseSingle(insertSql);
+        StatementContext statementContext = 
MemoTestUtils.createStatementContext(connectContext, insertSql);
+        Plan exPlan;
+        if (isOverwrite) {
+            Assertions.assertTrue(plan instanceof InsertOverwriteTableCommand);
+            exPlan = ((InsertOverwriteTableCommand) 
plan).getExplainPlan(connectContext);
+        } else {
+            Assertions.assertTrue(plan instanceof InsertIntoTableCommand);
+            exPlan = ((InsertIntoTableCommand) 
plan).getExplainPlan(connectContext);
+        }
+        Assertions.assertTrue(exPlan instanceof UnboundLogicalSink);
+        NereidsPlanner planner = new NereidsPlanner(statementContext);
+        return planner.plan((UnboundLogicalSink<?>) exPlan, 
physicalProperties);
     }
 
     @Test


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to