Repository: hive
Updated Branches:
  refs/heads/hive-14535 57a48ce78 -> fecace165


HIVE-15212 : merge branch into master - some test/out fixes 2 (Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fecace16
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fecace16
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fecace16

Branch: refs/heads/hive-14535
Commit: fecace16525d0355797e0e4136644028637eb119
Parents: 57a48ce
Author: Sergey Shelukhin <[email protected]>
Authored: Mon Mar 13 18:46:25 2017 -0700
Committer: Sergey Shelukhin <[email protected]>
Committed: Mon Mar 13 18:46:25 2017 -0700

----------------------------------------------------------------------
 .../java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java  | 6 ++++--
 ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java  | 3 ++-
 ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java   | 4 ++--
 ql/src/test/queries/clientnegative/mm_truncate_cols.q          | 2 +-
 ql/src/test/results/clientnegative/mm_truncate_cols.q.out      | 4 ++--
 ql/src/test/results/clientpositive/tez/explainuser_3.q.out     | 6 +++---
 6 files changed, 14 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/fecace16/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
index fda3e2a..40ea3a3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.Utilities.MissingBucketsContext;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
+import org.apache.hadoop.hive.ql.io.AcidUtils.Operation;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
@@ -262,8 +263,9 @@ public class FileSinkOperator extends 
TerminalOperator<FileSinkDesc> implements
       // before attempting the rename below, check if our file exists.  If it 
doesn't,
       // then skip the rename.  If it does try it.  We could just blindly try 
the rename
       // and avoid the extra stat, but that would mask other errors.
-      boolean needToRename = (conf.getWriteType() != 
AcidUtils.Operation.UPDATE &&
-          conf.getWriteType() != AcidUtils.Operation.DELETE) || 
fs.exists(outPaths[idx]);
+      Operation acidOp = conf.getWriteType();
+      boolean needToRename = outPaths[idx] != null && ((acidOp != 
Operation.UPDATE
+          && acidOp != Operation.DELETE) || fs.exists(outPaths[idx]));
       if (needToRename && outPaths[idx] != null) {
         Utilities.LOG14535.info("committing " + outPaths[idx] + " to " + 
finalPaths[idx] + " (" + isMmTable + ")");
         if (isMmTable) {

http://git-wip-us.apache.org/repos/asf/hive/blob/fecace16/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
index 684f1c1..762d946 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
@@ -67,7 +67,8 @@ public class LoadTableDesc extends 
org.apache.hadoop.hive.ql.plan.LoadDesc
       final boolean replace,
       final AcidUtils.Operation writeType, Long mmWriteId) {
     super(sourcePath);
-    Utilities.LOG14535.info("creating part LTD from " + sourcePath + " to " + 
table.getTableName()/*, new Exception()*/);
+    Utilities.LOG14535.info("creating part LTD from " + sourcePath + " to "
+        + ((table.getProperties() == null) ? "null" : table.getTableName()));
     init(table, partitionSpec, replace, writeType, mmWriteId);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/fecace16/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
index ccc0f9c..0561952 100755
--- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
@@ -326,8 +326,8 @@ public class TestHive extends TestCase {
       tbl.setCreateTime(ft.getTTable().getCreateTime());
       tbl.getParameters().put(hive_metastoreConstants.DDL_TIME,
           ft.getParameters().get(hive_metastoreConstants.DDL_TIME));
-      assertTrue("Tables  doesn't match: " + tableName, ft.getTTable()
-          .equals(tbl.getTTable()));
+      assertTrue("Tables  doesn't match: " + tableName + " (" + ft.getTTable()
+          + "; " + tbl.getTTable() + ")", 
ft.getTTable().equals(tbl.getTTable()));
       assertEquals("SerializationLib is not set correctly", tbl
           .getSerializationLib(), ft.getSerializationLib());
       assertEquals("Serde is not set correctly", tbl.getDeserializer()

http://git-wip-us.apache.org/repos/asf/hive/blob/fecace16/ql/src/test/queries/clientnegative/mm_truncate_cols.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/mm_truncate_cols.q 
b/ql/src/test/queries/clientnegative/mm_truncate_cols.q
index 1780118..3335ed8 100644
--- a/ql/src/test/queries/clientnegative/mm_truncate_cols.q
+++ b/ql/src/test/queries/clientnegative/mm_truncate_cols.q
@@ -1,3 +1,3 @@
-CREATE TABLE mm_table(key int, value string)  
tblproperties('hivecommit'='true');
+CREATE TABLE mm_table(key int, value string) stored as rcfile tblproperties 
("transactional"="true", "transactional_properties"="insert_only");
 
 TRUNCATE TABLE mm_table COLUMNS (value);

http://git-wip-us.apache.org/repos/asf/hive/blob/fecace16/ql/src/test/results/clientnegative/mm_truncate_cols.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/mm_truncate_cols.q.out 
b/ql/src/test/results/clientnegative/mm_truncate_cols.q.out
index 015f251..62dd222 100644
--- a/ql/src/test/results/clientnegative/mm_truncate_cols.q.out
+++ b/ql/src/test/results/clientnegative/mm_truncate_cols.q.out
@@ -1,8 +1,8 @@
-PREHOOK: query: CREATE TABLE mm_table(key int, value string)  
tblproperties('hivecommit'='true')
+PREHOOK: query: CREATE TABLE mm_table(key int, value string) stored as rcfile 
tblproperties ("transactional"="true", "transactional_properties"="insert_only")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@mm_table
-POSTHOOK: query: CREATE TABLE mm_table(key int, value string)  
tblproperties('hivecommit'='true')
+POSTHOOK: query: CREATE TABLE mm_table(key int, value string) stored as rcfile 
tblproperties ("transactional"="true", "transactional_properties"="insert_only")
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@mm_table

http://git-wip-us.apache.org/repos/asf/hive/blob/fecace16/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out 
b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
index da52b0a..e409a36 100644
--- a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
@@ -509,11 +509,11 @@ Stage-3
                   Conditional Operator
                     Stage-1
                       Map 1 vectorized
-                      File Output Operator [FS_10]
+                      File Output Operator [FS_8]
                         table:{"name:":"default.orc_merge5"}
-                        Select Operator [SEL_9] (rows=306 width=268)
+                        Select Operator [SEL_7] (rows=306 width=268)
                           Output:["_col0","_col1","_col2","_col3","_col4"]
-                          Filter Operator [FIL_8] (rows=306 width=268)
+                          Filter Operator [FIL_6] (rows=306 width=268)
                             predicate:(userid <= 13)
                             TableScan [TS_0] (rows=919 width=268)
                               
default@orc_merge5,orc_merge5,Tbl:COMPLETE,Col:NONE,Output:["userid","string1","subtype","decimal1","ts"]

Reply via email to