Author: thejas
Date: Sat Jan 25 20:28:15 2014
New Revision: 1561391
URL: http://svn.apache.org/r1561391
Log:
HIVE-6205 : alter <table> partition column throws NPE in authorization (Navis
via Thejas Nair)
Modified:
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out
hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
Modified:
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1561391&r1=1561390&r2=1561391&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
(original)
+++
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
Sat Jan 25 20:28:15 2014
@@ -78,7 +78,7 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_ALTERTABLE_PARTITION:
if (((ASTNode) ast.getChild(1)).getToken().getType() ==
HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
return ast;
- } else if (((ASTNode) ast.getChild(1)).getToken().getType() ==
HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES) {
+ } else if (((ASTNode) ast.getChild(1)).getToken().getType() ==
HiveParser.TOK_ALTERTABLE_MERGEFILES) {
// unsupported
throw new SemanticException("Operation not supported.");
} else {
Modified:
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1561391&r1=1561390&r2=1561391&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
(original)
+++
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
Sat Jan 25 20:28:15 2014
@@ -75,7 +75,7 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_ALTERTABLE_PARTITION:
if (((ASTNode) ast.getChild(1)).getToken().getType() ==
HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
return ast;
- } else if (((ASTNode) ast.getChild(1)).getToken().getType() ==
HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES) {
+ } else if (((ASTNode) ast.getChild(1)).getToken().getType() ==
HiveParser.TOK_ALTERTABLE_MERGEFILES) {
// unsupported
throw new SemanticException("Operation not supported.");
} else {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1561391&r1=1561390&r2=1561391&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Sat Jan 25
20:28:15 2014
@@ -527,55 +527,54 @@ public class Driver implements CommandPr
SessionState ss = SessionState.get();
HiveOperation op = ss.getHiveOperation();
Hive db = sem.getDb();
- if(ss.isAuthorizationModeV2()){
+ if (ss.isAuthorizationModeV2()) {
doAuthorizationV2(ss, op, inputs, outputs);
return;
}
- if (op != null) {
- if (op.equals(HiveOperation.CREATEDATABASE)) {
- ss.getAuthorizer().authorize(
- op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges());
- } else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
- || op.equals(HiveOperation.CREATETABLE)) {
- ss.getAuthorizer().authorize(
- db.getDatabase(SessionState.get().getCurrentDatabase()), null,
- HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
- } else {
- if (op.equals(HiveOperation.IMPORT)) {
- ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
- if (!isa.existsTable()) {
- ss.getAuthorizer().authorize(
- db.getDatabase(SessionState.get().getCurrentDatabase()), null,
-
HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
- }
+ if (op == null) {
+ throw new HiveException("Operation should not be null");
+ }
+ if (op.equals(HiveOperation.CREATEDATABASE)) {
+ ss.getAuthorizer().authorize(
+ op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges());
+ } else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
+ || op.equals(HiveOperation.CREATETABLE)) {
+ ss.getAuthorizer().authorize(
+ db.getDatabase(SessionState.get().getCurrentDatabase()), null,
+ HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
+ } else {
+ if (op.equals(HiveOperation.IMPORT)) {
+ ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
+ if (!isa.existsTable()) {
+ ss.getAuthorizer().authorize(
+ db.getDatabase(SessionState.get().getCurrentDatabase()), null,
+
HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
}
}
- if (outputs != null && outputs.size() > 0) {
- //do authorization for each output
- for (WriteEntity write : outputs) {
- if (write.getType() == Entity.Type.DATABASE) {
- ss.getAuthorizer().authorize(write.getDatabase(),
- null, op.getOutputRequiredPrivileges());
- continue;
- }
-
- if (write.getType() == WriteEntity.Type.PARTITION) {
- Partition part = db.getPartition(write.getTable(), write
- .getPartition().getSpec(), false);
- if (part != null) {
- ss.getAuthorizer().authorize(write.getPartition(), null,
- op.getOutputRequiredPrivileges());
- continue;
- }
- }
+ }
+ if (outputs != null && outputs.size() > 0) {
+ for (WriteEntity write : outputs) {
+ if (write.getType() == Entity.Type.DATABASE) {
+ ss.getAuthorizer().authorize(write.getDatabase(),
+ null, op.getOutputRequiredPrivileges());
+ continue;
+ }
- if (write.getTable() != null) {
- ss.getAuthorizer().authorize(write.getTable(), null,
+ if (write.getType() == WriteEntity.Type.PARTITION) {
+ Partition part = db.getPartition(write.getTable(), write
+ .getPartition().getSpec(), false);
+ if (part != null) {
+ ss.getAuthorizer().authorize(write.getPartition(), null,
op.getOutputRequiredPrivileges());
+ continue;
}
}
+ if (write.getTable() != null) {
+ ss.getAuthorizer().authorize(write.getTable(), null,
+ op.getOutputRequiredPrivileges());
+ }
}
}
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1561391&r1=1561390&r2=1561391&view=diff
==============================================================================
---
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
(original)
+++
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
Sat Jan 25 20:28:15 2014
@@ -233,11 +233,11 @@ public class DDLSemanticAnalyzer extends
ast = (ASTNode) ast.getChild(1);
if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
analyzeAlterTableFileFormat(ast, tableName, partSpec);
- } else if (ast.getToken().getType() ==
HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE) {
+ } else if (ast.getToken().getType() ==
HiveParser.TOK_ALTERTABLE_PROTECTMODE) {
analyzeAlterTableProtectMode(ast, tableName, partSpec);
} else if (ast.getToken().getType() ==
HiveParser.TOK_ALTERTABLE_LOCATION) {
analyzeAlterTableLocation(ast, tableName, partSpec);
- } else if (ast.getToken().getType() ==
HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES) {
+ } else if (ast.getToken().getType() ==
HiveParser.TOK_ALTERTABLE_MERGEFILES) {
analyzeAlterTablePartMergeFiles(tablePart, ast, tableName, partSpec);
} else if (ast.getToken().getType() ==
HiveParser.TOK_ALTERTABLE_SERIALIZER) {
analyzeAlterTableSerde(ast, tableName, partSpec);
@@ -365,8 +365,8 @@ public class DDLSemanticAnalyzer extends
case HiveParser.TOK_ALTERTABLE_DROPPARTS:
analyzeAlterTableDropParts(ast, false);
break;
- case HiveParser.TOK_ALTERTABLE_ALTERPARTS:
- analyzeAlterTableAlterParts(ast);
+ case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE:
+ analyzeAlterTablePartColType(ast);
break;
case HiveParser.TOK_ALTERTABLE_PROPERTIES:
analyzeAlterTableProps(ast, false, false);
@@ -2433,7 +2433,7 @@ public class DDLSemanticAnalyzer extends
dropTblDesc), conf));
}
- private void analyzeAlterTableAlterParts(ASTNode ast)
+ private void analyzeAlterTablePartColType(ASTNode ast)
throws SemanticException {
// get table name
String tblName = getUnescapedName((ASTNode)ast.getChild(0));
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1561391&r1=1561390&r2=1561391&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Sat Jan
25 20:28:15 2014
@@ -138,8 +138,9 @@ TOK_ALTERTABLE_RENAMEPART;
TOK_ALTERTABLE_REPLACECOLS;
TOK_ALTERTABLE_ADDPARTS;
TOK_ALTERTABLE_DROPPARTS;
-TOK_ALTERTABLE_ALTERPARTS;
-TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE;
+TOK_ALTERTABLE_PARTCOLTYPE;
+TOK_ALTERTABLE_PROTECTMODE;
+TOK_ALTERTABLE_MERGEFILES;
TOK_ALTERTABLE_TOUCH;
TOK_ALTERTABLE_ARCHIVE;
TOK_ALTERTABLE_UNARCHIVE;
@@ -285,7 +286,6 @@ TOK_DATABASEPROPERTIES;
TOK_DATABASELOCATION;
TOK_DBPROPLIST;
TOK_ALTERDATABASE_PROPERTIES;
-TOK_ALTERTABLE_ALTERPARTS_MERGEFILES;
TOK_TABNAME;
TOK_TABSRC;
TOK_RESTRICT;
@@ -900,8 +900,16 @@ alterTableStatementSuffix
| alterTblPartitionStatement
| alterStatementSuffixSkewedby
| alterStatementSuffixExchangePartition
+ | alterStatementPartitionKeyType
;
+alterStatementPartitionKeyType
+@init {msgs.push("alter partition key type"); }
+@after {msgs.pop();}
+ : identifier KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN
+ -> ^(TOK_ALTERTABLE_PARTCOLTYPE identifier columnNameType)
+ ;
+
alterViewStatementSuffix
@init { msgs.push("alter view statement"); }
@after { msgs.pop(); }
@@ -1058,8 +1066,6 @@ alterTblPartitionStatement
@after {msgs.pop();}
: tablePartitionPrefix alterTblPartitionStatementSuffix
-> ^(TOK_ALTERTABLE_PARTITION tablePartitionPrefix
alterTblPartitionStatementSuffix)
- |Identifier KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN
- -> ^(TOK_ALTERTABLE_ALTERPARTS Identifier columnNameType)
;
alterTblPartitionStatementSuffix
@@ -1151,7 +1157,7 @@ alterStatementSuffixProtectMode
@init { msgs.push("alter partition protect mode statement"); }
@after { msgs.pop(); }
: alterProtectMode
- -> ^(TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE alterProtectMode)
+ -> ^(TOK_ALTERTABLE_PROTECTMODE alterProtectMode)
;
alterStatementSuffixRenamePart
@@ -1165,7 +1171,7 @@ alterStatementSuffixMergeFiles
@init { msgs.push(""); }
@after { msgs.pop(); }
: KW_CONCATENATE
- -> ^(TOK_ALTERTABLE_ALTERPARTS_MERGEFILES)
+ -> ^(TOK_ALTERTABLE_MERGEFILES)
;
alterProtectMode
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1561391&r1=1561390&r2=1561391&view=diff
==============================================================================
---
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
(original)
+++
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
Sat Jan 25 20:28:15 2014
@@ -102,11 +102,12 @@ public final class SemanticAnalyzerFacto
commandType.put(HiveParser.TOK_ALTERTABLE_SKEWED,
HiveOperation.ALTERTABLE_SKEWED);
commandType.put(HiveParser.TOK_ANALYZE, HiveOperation.ANALYZE_TABLE);
commandType.put(HiveParser.TOK_ALTERVIEW_RENAME,
HiveOperation.ALTERVIEW_RENAME);
+ commandType.put(HiveParser.TOK_ALTERTABLE_PARTCOLTYPE,
HiveOperation.ALTERTABLE_PARTCOLTYPE);
}
static {
tablePartitionCommandType.put(
- HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE,
+ HiveParser.TOK_ALTERTABLE_PROTECTMODE,
new HiveOperation[] { HiveOperation.ALTERTABLE_PROTECTMODE,
HiveOperation.ALTERPARTITION_PROTECTMODE });
tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_FILEFORMAT,
@@ -115,7 +116,7 @@ public final class SemanticAnalyzerFacto
tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_LOCATION,
new HiveOperation[] { HiveOperation.ALTERTABLE_LOCATION,
HiveOperation.ALTERPARTITION_LOCATION });
-
tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES,
+ tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_MERGEFILES,
new HiveOperation[] {HiveOperation.ALTERTABLE_MERGEFILES,
HiveOperation.ALTERPARTITION_MERGEFILES });
tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER,
@@ -172,6 +173,7 @@ public final class SemanticAnalyzerFacto
case HiveParser.TOK_DROPTABLE_PROPERTIES:
case HiveParser.TOK_ALTERTABLE_SERIALIZER:
case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
+ case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE:
case HiveParser.TOK_ALTERINDEX_REBUILD:
case HiveParser.TOK_ALTERINDEX_PROPERTIES:
case HiveParser.TOK_ALTERVIEW_PROPERTIES:
@@ -196,7 +198,6 @@ public final class SemanticAnalyzerFacto
case HiveParser.TOK_ALTERTABLE_TOUCH:
case HiveParser.TOK_ALTERTABLE_ARCHIVE:
case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
- case HiveParser.TOK_ALTERTABLE_ALTERPARTS:
case HiveParser.TOK_LOCKTABLE:
case HiveParser.TOK_UNLOCKTABLE:
case HiveParser.TOK_LOCKDB:
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1561391&r1=1561390&r2=1561391&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
Sat Jan 25 20:28:15 2014
@@ -106,6 +106,7 @@ public enum HiveOperation {
ALTERTABLE_SKEWED("ALTERTABLE_SKEWED", new Privilege[]
{Privilege.ALTER_METADATA}, null),
ALTERTBLPART_SKEWED_LOCATION("ALTERTBLPART_SKEWED_LOCATION",
new Privilege[] {Privilege.ALTER_DATA}, null),
+ ALTERTABLE_PARTCOLTYPE("ALTERTABLE_PARTCOLTYPE", new Privilege[] {
Privilege.SELECT }, new Privilege[] { Privilege.ALTER_DATA }),
ALTERVIEW_RENAME("ALTERVIEW_RENAME", new Privilege[]
{Privilege.ALTER_METADATA}, null),
;
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java?rev=1561391&r1=1561390&r2=1561391&view=diff
==============================================================================
---
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
(original)
+++
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
Sat Jan 25 20:28:15 2014
@@ -49,6 +49,7 @@ public enum HiveOperationType {
ALTERTABLE_UNARCHIVE,
ALTERTABLE_PROPERTIES,
ALTERTABLE_SERIALIZER,
+ ALTERTABLE_PARTCOLTYPE,
ALTERPARTITION_SERIALIZER,
ALTERTABLE_SERDEPROPERTIES,
ALTERPARTITION_SERDEPROPERTIES,
Modified:
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out?rev=1561391&r1=1561390&r2=1561391&view=diff
==============================================================================
---
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out
(original)
+++
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out
Sat Jan 25 20:28:15 2014
@@ -33,4 +33,4 @@ ts string
dt string None
ts string None
-FAILED: ParseException line 4:50 mismatched input ',' expecting ) near 'int'
in alter table partition statement
+FAILED: ParseException line 4:50 mismatched input ',' expecting ) near 'int'
in alter partition key type
Modified:
hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out?rev=1561391&r1=1561390&r2=1561391&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
(original)
+++ hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
Sat Jan 25 20:28:15 2014
@@ -50,11 +50,11 @@ POSTHOOK: Lineage: alter_coltype PARTITI
25
PREHOOK: query: -- alter partition key column data type for dt column.
alter table alter_coltype partition column (dt int)
-PREHOOK: type: null
+PREHOOK: type: ALTERTABLE_PARTCOLTYPE
PREHOOK: Input: default@alter_coltype
POSTHOOK: query: -- alter partition key column data type for dt column.
alter table alter_coltype partition column (dt int)
-POSTHOOK: type: null
+POSTHOOK: type: ALTERTABLE_PARTCOLTYPE
POSTHOOK: Input: default@alter_coltype
POSTHOOK: Output: default@alter_coltype
POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
@@ -240,11 +240,11 @@ POSTHOOK: Lineage: alter_coltype PARTITI
0
PREHOOK: query: -- alter partition key column data type for ts column.
alter table alter_coltype partition column (ts double)
-PREHOOK: type: null
+PREHOOK: type: ALTERTABLE_PARTCOLTYPE
PREHOOK: Input: default@alter_coltype
POSTHOOK: query: -- alter partition key column data type for ts column.
alter table alter_coltype partition column (ts double)
-POSTHOOK: type: null
+POSTHOOK: type: ALTERTABLE_PARTCOLTYPE
POSTHOOK: Input: default@alter_coltype
POSTHOOK: Output: default@alter_coltype
POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
@@ -252,10 +252,10 @@ POSTHOOK: Lineage: alter_coltype PARTITI
POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
PREHOOK: query: alter table alter_coltype partition column (dt string)
-PREHOOK: type: null
+PREHOOK: type: ALTERTABLE_PARTCOLTYPE
PREHOOK: Input: default@alter_coltype
POSTHOOK: query: alter table alter_coltype partition column (dt string)
-POSTHOOK: type: null
+POSTHOOK: type: ALTERTABLE_PARTCOLTYPE
POSTHOOK: Input: default@alter_coltype
POSTHOOK: Output: default@alter_coltype
POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]