hive git commit: HIVE-20748: Disable materialized view rewriting when plan pattern is not allowed (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2018-12-20 Thread jcamacho
Repository: hive
Updated Branches:
  refs/heads/master 867a187bb -> 6e7767bc3


HIVE-20748: Disable materialized view rewriting when plan pattern is not 
allowed (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6e7767bc
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6e7767bc
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6e7767bc

Branch: refs/heads/master
Commit: 6e7767bc32319ec79f0977111a9d88bb92c9ded9
Parents: 867a187
Author: Jesus Camacho Rodriguez 
Authored: Mon Oct 15 19:14:32 2018 -0700
Committer: Jesus Camacho Rodriguez 
Committed: Fri Dec 21 07:57:29 2018 +0100

--
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |  28 ++
 .../HiveRelOpMaterializationValidator.java  | 285 -
 .../HiveRelOptMaterializationValidator.java | 307 +++
 .../hadoop/hive/ql/parse/CalcitePlanner.java|  89 +++---
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  47 ++-
 .../materialized_view_no_cbo_rewrite.q  |  11 +
 .../materialized_view_no_cbo_rewrite_2.q|  13 +
 .../materialized_view_no_supported_op_rewrite.q |  11 +
 ...aterialized_view_no_supported_op_rewrite_2.q |  13 +
 .../materialized_view_no_cbo_rewrite.q.out  |  22 ++
 .../materialized_view_no_cbo_rewrite_2.q.out|  36 +++
 ...erialized_view_no_supported_op_rewrite.q.out |  22 ++
 ...ialized_view_no_supported_op_rewrite_2.q.out |  37 +++
 ql/src/test/results/clientpositive/join0.q.out  |   2 +-
 .../clientpositive/llap/explainuser_1.q.out |  40 +--
 .../clientpositive/llap/explainuser_2.q.out |   2 +-
 .../results/clientpositive/parallel_join0.q.out |   2 +-
 .../spark/spark_explainuser_1.q.out |  40 +--
 .../clientpositive/tez/explainanalyze_1.q.out   |   2 +-
 .../clientpositive/tez/explainanalyze_3.q.out   |   2 +-
 .../clientpositive/tez/explainuser_3.q.out  |   2 +-
 21 files changed, 635 insertions(+), 378 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6e7767bc/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index a96d54d..dfa7e5e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -54,6 +54,7 @@ import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.util.concurrent.ListenableFuture;
+import org.apache.calcite.rel.RelNode;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -166,8 +167,10 @@ import 
org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter;
 import org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataTable;
 import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.CalcitePlanner;
 import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
 import org.apache.hadoop.hive.ql.parse.PreInsertTableDesc;
 import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -1259,6 +1262,31 @@ public class DDLTask extends Task implements 
Serializable {
 // This is a noop, return successfully
 return 0;
   }
+  if (alterMVDesc.isRewriteEnable()) {
+try {
+  final QueryState qs =
+  new QueryState.Builder().withHiveConf(conf).build();
+  final CalcitePlanner planner = new CalcitePlanner(qs);
+  final Context ctx = new Context(conf);
+  ctx.setIsLoadingMaterializedView(true);
+  planner.initCtx(ctx);
+  planner.init(false);
+  final RelNode plan = 
planner.genLogicalPlan(ParseUtils.parse(mv.getViewExpandedText()));
+  if (plan == null) {
+String msg = "Cannot enable automatic rewriting for materialized 
view.";
+if (ctx.getCboInfo() != null) {
+  msg += " " + ctx.getCboInfo();
+}
+throw new HiveException(msg);
+  }
+  if (!planner.isValidAutomaticRewritingMaterialization()) {
+throw new HiveException("Cannot enable rewriting for materialized 
view. " +
+planner.getInvalidAutomaticRewritingMaterializationReason());
+  }
+} catch (Exception e) {
+  throw new HiveException(

hive git commit: HIVE-21040 : msck does unnecessary file listing at last level of directory tree (Vihang Karajgaonkar, reviewed by Prasanth Jayachandran)

2018-12-20 Thread vihangk1
Repository: hive
Updated Branches:
  refs/heads/master e103abc3f -> 867a187bb


HIVE-21040 : msck does unnecessary file listing at last level of directory tree 
(Vihang Karajgaonkar, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/867a187b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/867a187b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/867a187b

Branch: refs/heads/master
Commit: 867a187bbd0b4d9fe8b567d8eb8e5a8fcd6afa9f
Parents: e103abc
Author: Vihang Karajgaonkar 
Authored: Mon Dec 17 17:13:56 2018 -0800
Committer: Vihang Karajgaonkar 
Committed: Thu Dec 20 18:47:43 2018 -0800

--
 .../ql/metadata/TestHiveMetaStoreChecker.java   |   3 +
 .../hive/metastore/HiveMetaStoreChecker.java|  18 +--
 .../hive/metastore/TestMsckCheckPartitions.java | 138 +++
 3 files changed, 151 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/867a187b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
index 434d82a..520eb1b 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
@@ -714,6 +714,9 @@ public class TestHiveMetaStoreChecker {
   private void createDirectory(String partPath) throws IOException {
 Path part = new Path(partPath);
 fs.mkdirs(part);
+// create files under partitions to simulate real partitions
+fs.createNewFile(new Path(partPath + Path.SEPARATOR + "dummydata1"));
+fs.createNewFile(new Path(partPath + Path.SEPARATOR + "dummydata2"));
 fs.deleteOnExit(part);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/867a187b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java
--
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java
index 2df45f6..6f4400a 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java
@@ -45,6 +45,7 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.ThreadFactory;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -474,10 +475,13 @@ public class HiveMetaStoreChecker {
 throws IOException, MetastoreException {
   final Path currentPath = pd.p;
   final int currentDepth = pd.depth;
+  if (currentDepth == partColNames.size()) {
+return currentPath;
+  }
   FileStatus[] fileStatuses = fs.listStatus(currentPath, 
FileUtils.HIDDEN_FILES_PATH_FILTER);
   // found no files under a sub-directory under table base path; it is 
possible that the table
   // is empty and hence there are no partition sub-directories created 
under base path
-  if (fileStatuses.length == 0 && currentDepth > 0 && currentDepth < 
partColNames.size()) {
+  if (fileStatuses.length == 0 && currentDepth > 0) {
 // since maxDepth is not yet reached, we are missing partition
 // columns in currentPath
 logOrThrowExceptionWithMsg(
@@ -485,12 +489,12 @@ public class HiveMetaStoreChecker {
   } else {
 // found files under currentPath add them to the queue if it is a 
directory
 for (FileStatus fileStatus : fileStatuses) {
-  if (!fileStatus.isDirectory() && currentDepth < partColNames.size()) 
{
+  if (!fileStatus.isDirectory()) {
 // found a file at depth which is less than number of partition 
keys
 logOrThrowExceptionWithMsg(
 "MSCK finds a file rather than a directory when it searches 
for "
 + fileStatus.getPath().toString());
-  } else if (fileStatus.isDirectory() && currentDepth < 
partColNames.size()) {
+  } else {
 // found a sub-directory at a depth less than number of partition 
keys
 // validate if the partition directory name matches with the 
corresponding
 // partition colName at currentDepth
@@

[2/2] hive git commit: HIVE-21032 : Refactor HiveMetaTool (Miklos Gergely via Ashutosh Chauhan)

2018-12-20 Thread hashutosh
HIVE-21032 : Refactor HiveMetaTool (Miklos Gergely via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e103abc3
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e103abc3
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e103abc3

Branch: refs/heads/master
Commit: e103abc3f6807576663a3b4c49b3dc5f9b1c0a64
Parents: 1020be0
Author: Miklos Gergely 
Authored: Wed Dec 12 05:56:00 2018 -0800
Committer: Ashutosh Chauhan 
Committed: Thu Dec 20 10:37:04 2018 -0800

--
 bin/ext/metatool.sh |   4 +-
 .../hadoop/hive/metastore/TestHiveMetaTool.java | 269 --
 .../tools/metatool/TestHiveMetaTool.java| 156 ++
 .../metastore/tools/metatool/package-info.java  |  20 +
 .../org/apache/hadoop/hive/ql/TestTxnExIm.java  |   1 -
 .../hive/metastore/tools/HiveMetaTool.java  | 490 ---
 .../metastore/tools/metatool/HiveMetaTool.java  |  66 +++
 .../tools/metatool/HiveMetaToolCommandLine.java | 219 +
 .../metastore/tools/metatool/MetaToolTask.java  |  44 ++
 .../metatool/MetaToolTaskExecuteJDOQLQuery.java |  64 +++
 .../tools/metatool/MetaToolTaskListFSRoot.java  |  36 ++
 .../metatool/MetaToolTaskUpdateLocation.java| 159 ++
 .../metastore/tools/metatool/package-info.java  |  23 +
 .../metatool/TestHiveMetaToolCommandLine.java   | 128 +
 .../TestMetaToolTaskExecuteJDOQLQuery.java  | 109 +
 .../metatool/TestMetaToolTaskListFSRoot.java|  58 +++
 .../TestMetaToolTaskUpdateLocation.java |  99 
 .../metastore/tools/metatool/package-info.java  |  20 +
 18 files changed, 1203 insertions(+), 762 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e103abc3/bin/ext/metatool.sh
--
diff --git a/bin/ext/metatool.sh b/bin/ext/metatool.sh
index 20e1c01..c67fd2e 100644
--- a/bin/ext/metatool.sh
+++ b/bin/ext/metatool.sh
@@ -18,12 +18,12 @@ export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
 
 metatool () {
   HIVE_OPTS=''
-  CLASS=org.apache.hadoop.hive.metastore.tools.HiveMetaTool
+  CLASS=org.apache.hadoop.hive.metastore.tools.metatool.HiveMetaTool
   execHiveCmd $CLASS "$@"
 }
 
 metatool_help () {
   HIVE_OPTS=''
-  CLASS=org.apache.hadoop.hive.metastore.tools.HiveMetaTool
+  CLASS=org.apache.hadoop.hive.metastore.tools.metatool.HiveMetaTool
   execHiveCmd $CLASS "--help"
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/e103abc3/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java
deleted file mode 100644
index 5bd83ac..000
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java
+++ /dev/null
@@ -1,269 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore;
-
-import java.io.ByteArrayOutputStream;
-import java.io.OutputStream;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.Type;
-import org.apache.hadoop.hive.metastore.tools.HiveMetaTool;
-import org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat;
-import org.apache.hadoop.hive.ql.io.avro.Av

[1/2] hive git commit: HIVE-21032 : Refactor HiveMetaTool (Miklos Gergely via Ashutosh Chauhan)

2018-12-20 Thread hashutosh
Repository: hive
Updated Branches:
  refs/heads/master 1020be050 -> e103abc3f


http://git-wip-us.apache.org/repos/asf/hive/blob/e103abc3/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskUpdateLocation.java
--
diff --git 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskUpdateLocation.java
 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskUpdateLocation.java
new file mode 100644
index 000..63868b5
--- /dev/null
+++ 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestMetaToolTaskUpdateLocation.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.tools.metatool;
+
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.when;
+
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.net.URI;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
+import org.mockito.Mockito;
+
+/** Unit tests for MetaToolTaskUpdateLocation. */
+@Category(MetastoreUnitTest.class)
+public class TestMetaToolTaskUpdateLocation {
+  @Rule
+  public final ExpectedException exception = ExpectedException.none();
+
+  private OutputStream os;
+
+  @Before
+  public void setup() {
+os = new ByteArrayOutputStream();
+System.setOut(new PrintStream(os));
+System.setErr(new PrintStream(os));
+  }
+
+  @Test
+  public void testNoHost() throws Exception {
+exception.expect(IllegalStateException.class);
+exception.expectMessage("HiveMetaTool:A valid host is required in both 
old-loc and new-loc");
+
+MetaToolTaskUpdateLocation t = new MetaToolTaskUpdateLocation();
+t.setCommandLine(new HiveMetaToolCommandLine(new String[] 
{"-updateLocation", "hdfs://", "hdfs://"}));
+t.execute();
+  }
+
+  @Test
+  public void testNoScheme() throws Exception {
+exception.expect(IllegalStateException.class);
+exception.expectMessage("HiveMetaTool:A valid scheme is required in both 
old-loc and new-loc");
+
+MetaToolTaskUpdateLocation t = new MetaToolTaskUpdateLocation();
+t.setCommandLine(new HiveMetaToolCommandLine(new String[] 
{"-updateLocation", "//old.host", "//new.host"}));
+t.execute();
+  }
+
+  @Test
+  public void testUpdateLocationNoUpdate() throws Exception {
+// testing only that the proper functions are called on ObjectStore - 
effect tested in TestHiveMetaTool in itests
+String oldUriString = "hdfs://old.host";
+String newUriString = "hdfs://new.host";
+String tablePropKey = "abc";
+String serdePropKey = "def";
+
+URI oldUri = new Path(oldUriString).toUri();
+URI newUri = new Path(newUriString).toUri();
+
+ObjectStore mockObjectStore = Mockito.mock(ObjectStore.class);
+when(mockObjectStore.updateMDatabaseURI(eq(oldUri), eq(newUri), 
eq(true))).thenReturn(null);
+when(mockObjectStore.updateMStorageDescriptorTblURI(eq(oldUri), 
eq(newUri), eq(true))).thenReturn(null);
+when(mockObjectStore.updateTblPropURI(eq(oldUri), eq(newUri), 
eq(tablePropKey), eq(true))).thenReturn(null);
+when(mockObjectStore.updateMStorageDescriptorTblPropURI(eq(oldUri), 
eq(newUri), eq(tablePropKey), eq(true)))
+  .thenReturn(null);
+when(mockObjectStore.updateSerdeURI(eq(oldUri), eq(newUri), 
eq(serdePropKey), eq(true))).thenReturn(null);
+
+MetaToolTaskUpdateLocation t = new MetaToolTaskUpdateLocation();
+t.setCommandLine(new HiveMetaToolCommandLine(new String[] 
{"-updateLocation", newUriString, oldUriString, "-dryRun",
+"-tablePropKey", tablePropKey, "-serdePropKey", serdePropKey}));
+t.setObjectStore(mockObjectStore);
+t.execute();
+  }
+}

http://g