This is an automated email from the ASF dual-hosted git repository.
dengzh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new a0364474ab2 HIVE-27682: AlterTableAlterPartitionOperation cannot
change the type if the column has default partition (Zhihua Deng, reviewed by
Sai Hemanth Gantasala)
a0364474ab2 is described below
commit a0364474ab2bf9926b32d7df31948fd49871cc35
Author: dengzh <[email protected]>
AuthorDate: Tue Oct 17 09:29:09 2023 +0800
HIVE-27682: AlterTableAlterPartitionOperation cannot change the type if the
column has default partition (Zhihua Deng, reviewed by Sai Hemanth Gantasala)
Closes #4684
---
.../alter/AlterTableAlterPartitionOperation.java | 20 +++++++-------
.../clientpositive/alter_partition_coltype.q | 4 +++
.../llap/alter_partition_coltype.q.out | 31 ++++++++++++++++++++++
3 files changed, 45 insertions(+), 10 deletions(-)
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/alter/AlterTableAlterPartitionOperation.java
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/alter/AlterTableAlterPartitionOperation.java
index 2046cbdb432..0fd8785d1bc 100644
---
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/alter/AlterTableAlterPartitionOperation.java
+++
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/alter/AlterTableAlterPartitionOperation.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hive.ql.ddl.table.partition.alter;
import java.util.ArrayList;
import java.util.List;
-import java.util.Set;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
@@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.ddl.DDLUtils;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -92,14 +91,15 @@ public class AlterTableAlterPartitionOperation extends
DDLOperation<AlterTableAl
Converter converter = ObjectInspectorConverters.getConverter(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, outputOI);
- Set<Partition> partitions = context.getDb().getAllPartitionsOf(tbl);
- for (Partition part : partitions) {
- if
(part.getName().equals(context.getConf().getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME)))
{
- continue;
- }
-
+ List<String> partNames = context.getDb().getPartitionNames(tbl.getDbName(),
+ tbl.getTableName(), (short) -1);
+ for (String partName : partNames) {
try {
- String value = part.getValues().get(colIndex);
+ List<String> values = Warehouse.getPartValuesFromPartName(partName);
+ String value = values.get(colIndex);
+ if
(value.equals(context.getConf().getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME)))
{
+ continue;
+ }
Object convertedValue = converter.convert(value);
if (convertedValue == null) {
throw new HiveException(" Converting from " +
TypeInfoFactory.stringTypeInfo + " to " + expectedType +
@@ -107,7 +107,7 @@ public class AlterTableAlterPartitionOperation extends
DDLOperation<AlterTableAl
}
} catch (Exception e) {
throw new HiveException("Exception while converting " +
TypeInfoFactory.stringTypeInfo + " to " +
- expectedType + " for value : " + part.getValues().get(colIndex));
+ expectedType + " for partition : " + partName + ", index: " +
colIndex);
}
}
}
diff --git a/ql/src/test/queries/clientpositive/alter_partition_coltype.q
b/ql/src/test/queries/clientpositive/alter_partition_coltype.q
index e4a7c0015ae..058d73c9288 100644
--- a/ql/src/test/queries/clientpositive/alter_partition_coltype.q
+++ b/ql/src/test/queries/clientpositive/alter_partition_coltype.q
@@ -63,6 +63,7 @@ insert into table pt.alterdynamic_part_table
partition(partcol1, partcol2) selec
insert into table pt.alterdynamic_part_table partition(partcol1, partcol2)
select '1', '2', '1' from src where key=150 limit 5;
insert into table pt.alterdynamic_part_table partition(partcol1, partcol2)
select NULL, '1', '1' from src where key=150 limit 5;
+insert into table pt.alterdynamic_part_table partition(partcol1, partcol2)
select '2', '2', NULL;
alter table pt.alterdynamic_part_table partition column (partcol1 int);
@@ -71,5 +72,8 @@ explain extended select intcol from
pt.alterdynamic_part_table where partcol1='1
explain extended select intcol from pt.alterdynamic_part_table where
(partcol1='2' and partcol2='1')or (partcol1='1' and
partcol2='__HIVE_DEFAULT_PARTITION__');
select intcol from pt.alterdynamic_part_table where (partcol1='2' and
partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__');
+alter table pt.alterdynamic_part_table partition column (partcol2 int);
+select intcol from pt.alterdynamic_part_table where (partcol1=2 and
partcol2=1) or (partcol1=2 and isnull(partcol2));
+
drop table pt.alterdynamic_part_table;
drop database pt;
diff --git
a/ql/src/test/results/clientpositive/llap/alter_partition_coltype.q.out
b/ql/src/test/results/clientpositive/llap/alter_partition_coltype.q.out
index 01a66016694..e6c1340008a 100644
--- a/ql/src/test/results/clientpositive/llap/alter_partition_coltype.q.out
+++ b/ql/src/test/results/clientpositive/llap/alter_partition_coltype.q.out
@@ -453,6 +453,16 @@ POSTHOOK: Input: default@src
POSTHOOK: Output: pt@alterdynamic_part_table
POSTHOOK: Output: pt@alterdynamic_part_table@partcol1=1/partcol2=1
POSTHOOK: Lineage: alterdynamic_part_table
PARTITION(partcol1=1,partcol2=1).intcol EXPRESSION []
+PREHOOK: query: insert into table pt.alterdynamic_part_table
partition(partcol1, partcol2) select '2', '2', NULL
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: pt@alterdynamic_part_table
+POSTHOOK: query: insert into table pt.alterdynamic_part_table
partition(partcol1, partcol2) select '2', '2', NULL
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: pt@alterdynamic_part_table
+POSTHOOK: Output:
pt@alterdynamic_part_table@partcol1=2/partcol2=__HIVE_DEFAULT_PARTITION__
+POSTHOOK: Lineage: alterdynamic_part_table
PARTITION(partcol1=2,partcol2=__HIVE_DEFAULT_PARTITION__).intcol SIMPLE []
PREHOOK: query: alter table pt.alterdynamic_part_table partition column
(partcol1 int)
PREHOOK: type: ALTERTABLE_PARTCOLTYPE
PREHOOK: Input: pt@alterdynamic_part_table
@@ -603,6 +613,27 @@ POSTHOOK: Input: pt@alterdynamic_part_table
POSTHOOK: Input: pt@alterdynamic_part_table@partcol1=2/partcol2=1
#### A masked pattern was here ####
1
+PREHOOK: query: alter table pt.alterdynamic_part_table partition column
(partcol2 int)
+PREHOOK: type: ALTERTABLE_PARTCOLTYPE
+PREHOOK: Input: pt@alterdynamic_part_table
+POSTHOOK: query: alter table pt.alterdynamic_part_table partition column
(partcol2 int)
+POSTHOOK: type: ALTERTABLE_PARTCOLTYPE
+POSTHOOK: Input: pt@alterdynamic_part_table
+POSTHOOK: Output: pt@alterdynamic_part_table
+PREHOOK: query: select intcol from pt.alterdynamic_part_table where
(partcol1=2 and partcol2=1) or (partcol1=2 and isnull(partcol2))
+PREHOOK: type: QUERY
+PREHOOK: Input: pt@alterdynamic_part_table
+PREHOOK: Input: pt@alterdynamic_part_table@partcol1=2/partcol2=1
+PREHOOK: Input:
pt@alterdynamic_part_table@partcol1=2/partcol2=__HIVE_DEFAULT_PARTITION__
+#### A masked pattern was here ####
+POSTHOOK: query: select intcol from pt.alterdynamic_part_table where
(partcol1=2 and partcol2=1) or (partcol1=2 and isnull(partcol2))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: pt@alterdynamic_part_table
+POSTHOOK: Input: pt@alterdynamic_part_table@partcol1=2/partcol2=1
+POSTHOOK: Input:
pt@alterdynamic_part_table@partcol1=2/partcol2=__HIVE_DEFAULT_PARTITION__
+#### A masked pattern was here ####
+1
+2
PREHOOK: query: drop table pt.alterdynamic_part_table
PREHOOK: type: DROPTABLE
PREHOOK: Input: pt@alterdynamic_part_table