Author: brock
Date: Sun Jan 25 22:59:54 2015
New Revision: 1654715

URL: http://svn.apache.org/r1654715
Log:
HIVE-9445 - Revert HIVE-5700 - enforce single date format for partition column 
storage (Brock reviewed by Xuefu)

Removed:
    hive/trunk/metastore/scripts/upgrade/mysql/015-HIVE-5700.mysql.sql
    hive/trunk/metastore/scripts/upgrade/oracle/015-HIVE-5700.oracle.sql
    hive/trunk/metastore/scripts/upgrade/postgres/015-HIVE-5700.postgres.sql
    
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
Modified:
    
hive/trunk/metastore/scripts/upgrade/mysql/upgrade-0.12.0-to-0.13.0.mysql.sql
    
hive/trunk/metastore/scripts/upgrade/oracle/upgrade-0.12.0-to-0.13.0.oracle.sql
    
hive/trunk/metastore/scripts/upgrade/postgres/upgrade-0.12.0-to-0.13.0.postgres.sql
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java

Modified: 
hive/trunk/metastore/scripts/upgrade/mysql/upgrade-0.12.0-to-0.13.0.mysql.sql
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/scripts/upgrade/mysql/upgrade-0.12.0-to-0.13.0.mysql.sql?rev=1654715&r1=1654714&r2=1654715&view=diff
==============================================================================
--- 
hive/trunk/metastore/scripts/upgrade/mysql/upgrade-0.12.0-to-0.13.0.mysql.sql 
(original)
+++ 
hive/trunk/metastore/scripts/upgrade/mysql/upgrade-0.12.0-to-0.13.0.mysql.sql 
Sun Jan 25 22:59:54 2015
@@ -1,6 +1,5 @@
 SELECT 'Upgrading MetaStore schema from 0.12.0 to 0.13.0' AS ' ';
 
-SOURCE 015-HIVE-5700.mysql.sql;
 SOURCE 016-HIVE-6386.mysql.sql;
 SOURCE 017-HIVE-6458.mysql.sql;
 SOURCE 018-HIVE-6757.mysql.sql;

Modified: 
hive/trunk/metastore/scripts/upgrade/oracle/upgrade-0.12.0-to-0.13.0.oracle.sql
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/scripts/upgrade/oracle/upgrade-0.12.0-to-0.13.0.oracle.sql?rev=1654715&r1=1654714&r2=1654715&view=diff
==============================================================================
--- 
hive/trunk/metastore/scripts/upgrade/oracle/upgrade-0.12.0-to-0.13.0.oracle.sql 
(original)
+++ 
hive/trunk/metastore/scripts/upgrade/oracle/upgrade-0.12.0-to-0.13.0.oracle.sql 
Sun Jan 25 22:59:54 2015
@@ -1,6 +1,5 @@
 SELECT 'Upgrading MetaStore schema from 0.12.0 to 0.13.0' AS Status from dual;
 
[email protected];
 @016-HIVE-6386.oracle.sql;
 @017-HIVE-6458.oracle.sql;
 @018-HIVE-6757.oracle.sql;

Modified: 
hive/trunk/metastore/scripts/upgrade/postgres/upgrade-0.12.0-to-0.13.0.postgres.sql
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/scripts/upgrade/postgres/upgrade-0.12.0-to-0.13.0.postgres.sql?rev=1654715&r1=1654714&r2=1654715&view=diff
==============================================================================
--- 
hive/trunk/metastore/scripts/upgrade/postgres/upgrade-0.12.0-to-0.13.0.postgres.sql
 (original)
+++ 
hive/trunk/metastore/scripts/upgrade/postgres/upgrade-0.12.0-to-0.13.0.postgres.sql
 Sun Jan 25 22:59:54 2015
@@ -1,6 +1,5 @@
 SELECT 'Upgrading MetaStore schema from 0.12.0 to 0.13.0';
 
-\i 015-HIVE-5700.postgres.sql;
 \i 016-HIVE-6386.postgres.sql;
 \i 017-HIVE-6458.postgres.sql;
 \i 018-HIVE-6757.postgres.sql;

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1654715&r1=1654714&r2=1654715&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
 Sun Jan 25 22:59:54 2015
@@ -75,8 +75,6 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
-import com.google.common.annotations.VisibleForTesting;
-
 /**
  * BaseSemanticAnalyzer.
  *
@@ -1247,36 +1245,7 @@ public abstract class BaseSemanticAnalyz
             inputOI.getTypeName(), outputOI.getTypeName());
       }
 
-      normalizeColSpec(partSpec, astKeyName, colType, colSpec, convertedValue);
-    }
-  }
-
-  @VisibleForTesting
-  static void normalizeColSpec(Map<String, String> partSpec, String colName,
-      String colType, String originalColSpec, Object colValue) throws 
SemanticException {
-    if (colValue == null) return; // nothing to do with nulls
-    String normalizedColSpec = originalColSpec;
-    if (colType.equals(serdeConstants.DATE_TYPE_NAME)) {
-      normalizedColSpec = normalizeDateCol(colValue, originalColSpec);
-    }
-    if (!normalizedColSpec.equals(originalColSpec)) {
-      STATIC_LOG.warn("Normalizing partition spec - " + colName + " from "
-          + originalColSpec + " to " + normalizedColSpec);
-      partSpec.put(colName, normalizedColSpec);
-    }
-  }
-
-  private static String normalizeDateCol(
-      Object colValue, String originalColSpec) throws SemanticException {
-    Date value;
-    if (colValue instanceof DateWritable) {
-      value = ((DateWritable) colValue).get();
-    } else if (colValue instanceof Date) {
-      value = (Date) colValue;
-    } else {
-      throw new SemanticException("Unexpected date type " + 
colValue.getClass());
     }
-    return HiveMetaStore.PARTITION_DATE_FORMAT.get().format(value);
   }
 
   protected WriteEntity toWriteEntity(String location) throws 
SemanticException {

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1654715&r1=1654714&r2=1654715&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
Sun Jan 25 22:59:54 2015
@@ -1340,11 +1340,13 @@ public class SemanticAnalyzer extends Ba
       case HiveParser.TOK_INSERT:
         ASTNode destination = (ASTNode) ast.getChild(0);
         Tree tab = destination.getChild(0);
+
         // Proceed if AST contains partition & If Not Exists
         if (destination.getChildCount() == 2 &&
             tab.getChildCount() == 2 &&
             destination.getChild(1).getType() == HiveParser.TOK_IFNOTEXISTS) {
           String tableName = tab.getChild(0).getChild(0).getText();
+
           Tree partitions = tab.getChild(1);
           int childCount = partitions.getChildCount();
           HashMap<String, String> partition = new HashMap<String, String>();
@@ -1358,30 +1360,25 @@ public class SemanticAnalyzer extends Ba
             partition.put(partitionName, partitionVal);
           }
           // if it is a dynamic partition throw the exception
-          if (childCount != partition.size()) {
+          if (childCount == partition.size()) {
+            try {
+              Table table = db.getTable(tableName);
+              Partition parMetaData = db.getPartition(table, partition, false);
+              // Check partition exists if it exists skip the overwrite
+              if (parMetaData != null) {
+                phase1Result = false;
+                skipRecursion = true;
+                LOG.info("Partition already exists so insert into overwrite " +
+                    "skipped for partition : " + parMetaData.toString());
+                break;
+              }
+            } catch (HiveException e) {
+              LOG.info("Error while getting metadata : ", e);
+            }
+          } else {
             throw new 
SemanticException(ErrorMsg.INSERT_INTO_DYNAMICPARTITION_IFNOTEXISTS
                 .getMsg(partition.toString()));
           }
-          Table table = null;
-          try {
-            table = db.getTable(tableName);
-          } catch (HiveException ex) {
-            throw new SemanticException(ex);
-          }
-          try {
-            Partition parMetaData = db.getPartition(table, partition, false);
-            // Check partition exists if it exists skip the overwrite
-            if (parMetaData != null) {
-              phase1Result = false;
-              skipRecursion = true;
-              LOG.info("Partition already exists so insert into overwrite " +
-                  "skipped for partition : " + parMetaData.toString());
-              break;
-            }
-          } catch (HiveException e) {
-            LOG.info("Error while getting metadata : ", e);
-          }
-          validatePartSpec(table, partition, (ASTNode)tab, conf, false);
         }
         skipRecursion = false;
         break;

Modified: 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java?rev=1654715&r1=1654714&r2=1654715&view=diff
==============================================================================
--- 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
 (original)
+++ 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
 Sun Jan 25 22:59:54 2015
@@ -49,7 +49,7 @@ import org.junit.Test;
 
 public class TestUpdateDeleteSemanticAnalyzer {
 
-  static final private Log LOG = 
LogFactory.getLog(TestSemanticAnalyzer.class.getName());
+  static final private Log LOG = 
LogFactory.getLog(TestUpdateDeleteSemanticAnalyzer.class.getName());
 
   private HiveConf conf;
   private Hive db;


Reply via email to