Author: namit
Date: Fri Feb 22 16:33:43 2013
New Revision: 1449109

URL: http://svn.apache.org/r1449109
Log:
HIVE-3672 Support altering partition column type in Hive
(Jingwei Lu via namit)


Added:
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableAlterPartDesc.java
    
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_2columns.q
    
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_invalidcolname.q
    
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_invalidtype.q
    hive/trunk/ql/src/test/queries/clientpositive/alter_partition_coltype.q
    
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out
    
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_invalidcolname.q.out
    
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_invalidtype.q.out
    hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
Modified:
    
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java?rev=1449109&r1=1449108&r2=1449109&view=diff
==============================================================================
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
 (original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
 Fri Feb 22 16:33:43 2013
@@ -425,8 +425,9 @@ public class HiveAlterHandler implements
     while (oldPartKeysIter.hasNext()) {
       oldFs = oldPartKeysIter.next();
       newFs = newPartKeysIter.next();
-      if (!oldFs.getName().equals(newFs.getName()) ||
-          !oldFs.getType().equals(newFs.getType())) {
+      // Alter table can change the type of partition key now.
+      // So check the column name only.
+      if (!oldFs.getName().equals(newFs.getName())) {
         return false;
       }
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1449109&r1=1449108&r2=1449109&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Fri Feb 
22 16:33:43 2013
@@ -113,6 +113,7 @@ import org.apache.hadoop.hive.ql.plan.Al
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
 import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
+import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc;
 import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
@@ -418,6 +419,11 @@ public class DDLTask extends Task<DDLWor
         return mergeFiles(db, mergeFilesDesc);
       }
 
+      AlterTableAlterPartDesc alterPartDesc = 
work.getAlterTableAlterPartDesc();
+      if(alterPartDesc != null) {
+        return alterTableAlterPart(db, alterPartDesc);
+      }
+
       TruncateTableDesc truncateTableDesc = work.getTruncateTblDesc();
       if (truncateTableDesc != null) {
         return truncateTable(db, truncateTableDesc);
@@ -1070,6 +1076,49 @@ public class DDLTask extends Task<DDLWor
   }
 
   /**
+  * Alter partition column type in a table
+  *
+  * @param db
+  *          Database to rename the partition.
+  * @param alterPartitionDesc
+  *          change partition column type.
+  * @return Returns 0 when execution succeeds and above 0 if it fails.
+  * @throws HiveException
+  */
+  private int alterTableAlterPart(Hive db, AlterTableAlterPartDesc 
alterPartitionDesc)
+    throws HiveException {
+
+    Table tbl = db.getTable(alterPartitionDesc.getDbName(), 
alterPartitionDesc.getTableName());
+    String tabName = alterPartitionDesc.getTableName();
+
+    // This is checked by DDLSemanticAnalyzer
+    assert(tbl.isPartitioned());
+
+    List<FieldSchema> newPartitionKeys = new ArrayList<FieldSchema>();
+
+    for(FieldSchema col : tbl.getTTable().getPartitionKeys()) {
+      if 
(col.getName().compareTo(alterPartitionDesc.getPartKeySpec().getName()) == 0) {
+        newPartitionKeys.add(alterPartitionDesc.getPartKeySpec());
+      } else {
+        newPartitionKeys.add(col);
+      }
+    }
+
+    tbl.getTTable().setPartitionKeys(newPartitionKeys);
+
+    try {
+      db.alterTable(tabName, tbl);
+    } catch (InvalidOperationException e) {
+      throw new HiveException("Uable to update table");
+    }
+
+    work.getInputs().add(new ReadEntity(tbl));
+    work.getOutputs().add(new WriteEntity(tbl));
+
+    return 0;
+  }
+
+  /**
    * Rewrite the partition's metadata and force the pre/post execute hooks to
    * be fired.
    *

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1449109&r1=1449108&r2=1449109&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
Fri Feb 22 16:33:43 2013
@@ -126,6 +126,7 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc;
 
 /**
  * DDLSemanticAnalyzer.
@@ -330,6 +331,9 @@ public class DDLSemanticAnalyzer extends
     case HiveParser.TOK_ALTERTABLE_DROPPARTS:
       analyzeAlterTableDropParts(ast, false);
       break;
+    case HiveParser.TOK_ALTERTABLE_ALTERPARTS:
+      analyzeAlterTableAlterParts(ast);
+      break;
     case HiveParser.TOK_ALTERTABLE_PROPERTIES:
       analyzeAlterTableProps(ast, false, false);
       break;
@@ -2277,7 +2281,60 @@ public class DDLSemanticAnalyzer extends
         dropTblDesc), conf));
   }
 
-  /**
+  private void analyzeAlterTableAlterParts(ASTNode ast)
+      throws SemanticException {
+    // get table name
+    String tblName = getUnescapedName((ASTNode)ast.getChild(0));
+
+    Table tab = null;
+
+    // check if table exists.
+    try {
+      tab = db.getTable(db.getCurrentDatabase(), tblName, true);
+      inputs.add(new ReadEntity(tab));
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+    }
+
+    // validate the DDL is a valid operation on the table.
+    validateAlterTableType(tab, AlterTableTypes.ALTERPARTITION, false);
+
+    // Alter table ... partition column ( column newtype) only takes one 
column at a time.
+    // It must have a column name followed with type.
+    ASTNode colAst = (ASTNode) ast.getChild(1);
+    assert(colAst.getChildCount() == 2);
+
+    FieldSchema newCol = new FieldSchema();
+
+    // get column name
+    String name = colAst.getChild(0).getText().toLowerCase();
+    newCol.setName(unescapeIdentifier(name));
+
+    // get column type
+    ASTNode typeChild = (ASTNode) (colAst.getChild(1));
+    newCol.setType(getTypeStringFromAST(typeChild));
+
+    // check if column is defined or not
+    boolean fFoundColumn = false;
+    for( FieldSchema col : tab.getTTable().getPartitionKeys()) {
+      if (col.getName().compareTo(newCol.getName()) == 0) {
+        fFoundColumn = true;
+      }
+    }
+
+    // raise error if we could not find the column
+    if (!fFoundColumn) {
+      throw new 
SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(newCol.getName()));
+    }
+
+    AlterTableAlterPartDesc alterTblAlterPartDesc =
+            new AlterTableAlterPartDesc(db.getCurrentDatabase(), tblName, 
newCol);
+
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+            alterTblAlterPartDesc), conf));
+  }
+
+    /**
    * Add one or more partitions to a table. Useful when the data has been 
copied
    * to the right location by some other process.
    *

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1449109&r1=1449108&r2=1449109&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Fri Feb 
22 16:33:43 2013
@@ -135,6 +135,7 @@ TOK_ALTERTABLE_RENAMEPART;
 TOK_ALTERTABLE_REPLACECOLS;
 TOK_ALTERTABLE_ADDPARTS;
 TOK_ALTERTABLE_DROPPARTS;
+TOK_ALTERTABLE_ALTERPARTS;
 TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE;
 TOK_ALTERTABLE_TOUCH;
 TOK_ALTERTABLE_ARCHIVE;
@@ -1011,8 +1012,10 @@ tablePartitionPrefix
 alterTblPartitionStatement
 @init {msgs.push("alter table partition statement");}
 @after {msgs.pop();}
-  :  tablePartitionPrefix alterTblPartitionStatementSuffix
+  : tablePartitionPrefix alterTblPartitionStatementSuffix
   -> ^(TOK_ALTERTABLE_PARTITION tablePartitionPrefix 
alterTblPartitionStatementSuffix)
+  |Identifier KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN
+  -> ^(TOK_ALTERTABLE_ALTERPARTS Identifier columnNameType)
   ;
 
 alterTblPartitionStatementSuffix

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1449109&r1=1449108&r2=1449109&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
 Fri Feb 22 16:33:43 2013
@@ -188,6 +188,7 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_ALTERTABLE_TOUCH:
       case HiveParser.TOK_ALTERTABLE_ARCHIVE:
       case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
+      case HiveParser.TOK_ALTERTABLE_ALTERPARTS:
       case HiveParser.TOK_LOCKTABLE:
       case HiveParser.TOK_UNLOCKTABLE:
       case HiveParser.TOK_CREATEROLE:

Added: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableAlterPartDesc.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableAlterPartDesc.java?rev=1449109&view=auto
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableAlterPartDesc.java
 (added)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableAlterPartDesc.java
 Fri Feb 22 16:33:43 2013
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+
+import java.util.List;
+
+public class AlterTableAlterPartDesc extends DDLDesc {
+  private String tableName;
+  private String dbName;
+  private FieldSchema partKeySpec;
+
+  public AlterTableAlterPartDesc() {
+  }
+
+  /**
+   * @param dbName
+   *          database that contains the table / partition
+   * @param tableName
+   *          table containing the partition
+   * @param partKeySpec
+   *          key column specification.
+   */
+  public AlterTableAlterPartDesc(String dbName, String tableName, FieldSchema 
partKeySpec) {
+    super();
+    this.dbName = dbName;
+    this.tableName = tableName;
+    this.partKeySpec = partKeySpec;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDbName() {
+    return dbName;
+  }
+
+  public void setDbName(String dbName) {
+    this.dbName = dbName;
+  }
+
+  public FieldSchema getPartKeySpec() {
+    return partKeySpec;
+  }
+
+  public void setPartKeySpec(FieldSchema partKeySpec) {
+    this.partKeySpec = partKeySpec;
+  }
+}

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java?rev=1449109&r1=1449108&r2=1449109&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java 
Fri Feb 22 16:33:43 2013
@@ -48,7 +48,7 @@ public class AlterTableDesc extends DDLD
     ADDFILEFORMAT, ADDCLUSTERSORTCOLUMN, RENAMECOLUMN, ADDPARTITION,
     TOUCH, ARCHIVE, UNARCHIVE, ALTERPROTECTMODE, ALTERPARTITIONPROTECTMODE,
     ALTERLOCATION, DROPPARTITION, RENAMEPARTITION, ADDSKEWEDBY, 
ALTERSKEWEDLOCATION,
-    ALTERBUCKETNUM
+    ALTERBUCKETNUM, ALTERPARTITION
   }
 
   public static enum ProtectModeType {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=1449109&r1=1449108&r2=1449109&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Fri Feb 
22 16:33:43 2013
@@ -62,6 +62,7 @@ public class DDLWork implements Serializ
   private ShowIndexesDesc showIndexesDesc;
   private DescDatabaseDesc descDbDesc;
   private AlterDatabaseDesc alterDbDesc;
+  private AlterTableAlterPartDesc alterTableAlterPartDesc;
   private TruncateTableDesc truncateTblDesc;
 
   private RoleDDLDesc roleDDLDesc;
@@ -442,7 +443,13 @@ public class DDLWork implements Serializ
     this.mergeFilesDesc = mergeDesc;
   }
 
-  /**
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      AlterTableAlterPartDesc alterPartDesc) {
+    this(inputs, outputs);
+    this.alterTableAlterPartDesc = alterPartDesc;
+  }
+
+    /**
    * @return Create Database descriptor
    */
   public CreateDatabaseDesc getCreateDatabaseDesc() {
@@ -995,6 +1002,21 @@ public class DDLWork implements Serializ
     this.needLock = needLock;
   }
 
+  /**
+   * @return information about the partitions we want to change.
+   */
+  public AlterTableAlterPartDesc getAlterTableAlterPartDesc() {
+    return alterTableAlterPartDesc;
+  }
+
+  /**
+   * @param alterPartitionDesc
+   *          information about the partitions we want to change.
+   */
+  public void setAlterTableAlterPartDesc(AlterTableAlterPartDesc 
alterPartitionDesc) {
+    this.alterTableAlterPartDesc = alterPartitionDesc;
+  }
+
   @Explain(displayName = "Truncate Table Operator")
   public TruncateTableDesc getTruncateTblDesc() {
     return truncateTblDesc;

Added: 
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_2columns.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_2columns.q?rev=1449109&view=auto
==============================================================================
--- 
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_2columns.q
 (added)
+++ 
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_2columns.q
 Fri Feb 22 16:33:43 2013
@@ -0,0 +1,11 @@
+-- create testing table
+create table alter_coltype(key string, value string) partitioned by (dt 
string, ts string);
+
+-- insert and create a partition
+insert overwrite table alter_coltype partition(dt='100x', ts='6:30pm') select 
* from src1;
+
+desc alter_coltype;
+
+-- alter partition change multiple keys at same time 
+alter table alter_coltype partition column (dt int, ts int);
+

Added: 
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_invalidcolname.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_invalidcolname.q?rev=1449109&view=auto
==============================================================================
--- 
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_invalidcolname.q
 (added)
+++ 
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_invalidcolname.q
 Fri Feb 22 16:33:43 2013
@@ -0,0 +1,12 @@
+-- create testing table
+create table alter_coltype(key string, value string) partitioned by (dt 
string, ts string);
+
+-- insert and create a partition
+insert overwrite table alter_coltype partition(dt='100x', ts='6:30pm') select 
* from src1;
+
+desc alter_coltype;
+
+-- alter partition key column with invalid column name
+alter table alter_coltype partition column (dd int);
+
+

Added: 
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_invalidtype.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_invalidtype.q?rev=1449109&view=auto
==============================================================================
--- 
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_invalidtype.q
 (added)
+++ 
hive/trunk/ql/src/test/queries/clientnegative/alter_partition_coltype_invalidtype.q
 Fri Feb 22 16:33:43 2013
@@ -0,0 +1,11 @@
+-- create testing table
+create table alter_coltype(key string, value string) partitioned by (dt 
string, ts string);
+
+-- insert and create a partition
+insert overwrite table alter_coltype partition(dt='100x', ts='6:30pm') select 
* from src1;
+
+desc alter_coltype;
+
+-- alter partition key column data type for ts column to a wrong type
+alter table alter_coltype partition column (ts time);
+

Added: hive/trunk/ql/src/test/queries/clientpositive/alter_partition_coltype.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/alter_partition_coltype.q?rev=1449109&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/alter_partition_coltype.q 
(added)
+++ hive/trunk/ql/src/test/queries/clientpositive/alter_partition_coltype.q Fri 
Feb 22 16:33:43 2013
@@ -0,0 +1,55 @@
+-- create testing table.
+create table alter_coltype(key string, value string) partitioned by (dt 
string, ts string);
+
+-- insert and create a partition.
+insert overwrite table alter_coltype partition(dt='100x', ts='6:30pm') select 
* from src1;
+
+desc alter_coltype;
+
+-- select with paritition predicate.
+select count(*) from alter_coltype where dt = '100x';
+
+-- alter partition key column data type for dt column.
+alter table alter_coltype partition column (dt int);
+
+-- load a new partition using new data type.
+insert overwrite table alter_coltype partition(dt=10, ts='3.0') select * from 
src1;
+
+-- make sure the partition predicate still works. 
+select count(*) from alter_coltype where dt = '100x';
+explain extended select count(*) from alter_coltype where dt = '100x';
+
+select count(*) from alter_coltype where dt = 100;
+
+-- alter partition key column data type for ts column.
+alter table alter_coltype partition column (ts double);
+
+-- load a new partition using new data type.
+insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * 
from src1;
+
+--  validate partition key column predicate can still work.
+select count(*) from alter_coltype where ts = '6:30pm';
+explain extended select count(*) from alter_coltype where ts = '6:30pm';
+
+--  validate partition key column predicate on two different partition column 
data type 
+--  can still work.
+select count(*) from alter_coltype where ts = 3.0 and dt=10;
+explain extended select count(*) from alter_coltype where ts = 3.0 and dt=10;
+
+-- query where multiple partition values (of different datatypes) are being 
selected 
+select key, value, dt, ts from alter_coltype where dt is not null;
+explain extended select key, value, dt, ts from alter_coltype where dt is not 
null;
+
+select count(*) from alter_coltype where ts = 3.0;
+
+-- make sure the partition predicate still works. 
+select count(*) from alter_coltype where dt = '100x' or dt = '10';
+explain extended select count(*) from alter_coltype where dt = '100x' or dt = 
'10';
+
+desc alter_coltype;
+desc alter_coltype partition (dt='100x', ts='6:30pm');
+desc alter_coltype partition (dt='100x', ts=3.0);
+desc alter_coltype partition (dt=10, ts=3.0);
+
+drop table alter_coltype;
+

Added: 
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out?rev=1449109&view=auto
==============================================================================
--- 
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out
 (added)
+++ 
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out
 Fri Feb 22 16:33:43 2013
@@ -0,0 +1,39 @@
+PREHOOK: query: -- create testing table
+create table alter_coltype(key string, value string) partitioned by (dt 
string, ts string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- create testing table
+create table alter_coltype(key string, value string) partitioned by (dt 
string, ts string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@alter_coltype
+PREHOOK: query: -- insert and create a partition
+insert overwrite table alter_coltype partition(dt='100x', ts='6:30pm') select 
* from src1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@alter_coltype@dt=100x/ts=6%3A30pm
+POSTHOOK: query: -- insert and create a partition
+insert overwrite table alter_coltype partition(dt='100x', ts='6:30pm') select 
* from src1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@alter_coltype@dt=100x/ts=6%3A30pm
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc alter_coltype
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc alter_coltype
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+# col_name             data_type               comment             
+                
+key                    string                  None                
+value                  string                  None                
+dt                     string                  None                
+ts                     string                  None                
+                
+# Partition Information                 
+# col_name             data_type               comment             
+                
+dt                     string                  None                
+ts                     string                  None                
+FAILED: ParseException line 4:50 mismatched input ',' expecting ) near 'int' 
in alter table partition statement
+

Added: 
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_invalidcolname.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_invalidcolname.q.out?rev=1449109&view=auto
==============================================================================
--- 
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_invalidcolname.q.out
 (added)
+++ 
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_invalidcolname.q.out
 Fri Feb 22 16:33:43 2013
@@ -0,0 +1,38 @@
+PREHOOK: query: -- create testing table
+create table alter_coltype(key string, value string) partitioned by (dt 
string, ts string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- create testing table
+create table alter_coltype(key string, value string) partitioned by (dt 
string, ts string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@alter_coltype
+PREHOOK: query: -- insert and create a partition
+insert overwrite table alter_coltype partition(dt='100x', ts='6:30pm') select 
* from src1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@alter_coltype@dt=100x/ts=6%3A30pm
+POSTHOOK: query: -- insert and create a partition
+insert overwrite table alter_coltype partition(dt='100x', ts='6:30pm') select 
* from src1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@alter_coltype@dt=100x/ts=6%3A30pm
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc alter_coltype
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc alter_coltype
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+# col_name             data_type               comment             
+                
+key                    string                  None                
+value                  string                  None                
+dt                     string                  None                
+ts                     string                  None                
+                
+# Partition Information                 
+# col_name             data_type               comment             
+                
+dt                     string                  None                
+ts                     string                  None                
+FAILED: SemanticException [Error 10002]: Invalid column reference dd

Added: 
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_invalidtype.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_invalidtype.q.out?rev=1449109&view=auto
==============================================================================
--- 
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_invalidtype.q.out
 (added)
+++ 
hive/trunk/ql/src/test/results/clientnegative/alter_partition_coltype_invalidtype.q.out
 Fri Feb 22 16:33:43 2013
@@ -0,0 +1,39 @@
+PREHOOK: query: -- create testing table
+create table alter_coltype(key string, value string) partitioned by (dt 
string, ts string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- create testing table
+create table alter_coltype(key string, value string) partitioned by (dt 
string, ts string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@alter_coltype
+PREHOOK: query: -- insert and create a partition
+insert overwrite table alter_coltype partition(dt='100x', ts='6:30pm') select 
* from src1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@alter_coltype@dt=100x/ts=6%3A30pm
+POSTHOOK: query: -- insert and create a partition
+insert overwrite table alter_coltype partition(dt='100x', ts='6:30pm') select 
* from src1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@alter_coltype@dt=100x/ts=6%3A30pm
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc alter_coltype
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc alter_coltype
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+# col_name             data_type               comment             
+                
+key                    string                  None                
+value                  string                  None                
+dt                     string                  None                
+ts                     string                  None                
+                
+# Partition Information                 
+# col_name             data_type               comment             
+                
+dt                     string                  None                
+ts                     string                  None                
+FAILED: ParseException line 4:47 cannot recognize input near 'time' ')' 
'<EOF>' in column type
+


Reply via email to