hive git commit: HIVE-14896 : Stabilize golden files for currently failing tests

2016-10-05 Thread hashutosh
Repository: hive
Updated Branches:
  refs/heads/master 8e0b19b3c -> e1fa2787f


HIVE-14896 : Stabilize golden files for currently failing tests


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e1fa2787
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e1fa2787
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e1fa2787

Branch: refs/heads/master
Commit: e1fa2787ffb5b65e54ed5c087ca3ed8060f18d83
Parents: 8e0b19b
Author: Ashutosh Chauhan 
Authored: Wed Oct 5 15:26:15 2016 -0700
Committer: Ashutosh Chauhan 
Committed: Wed Oct 5 22:50:45 2016 -0700

--
 .../test/queries/clientpositive/acid_mapjoin.q  |   3 +-
 ql/src/test/queries/clientpositive/ctas.q   |   2 +-
 .../test/queries/clientpositive/ctas_hadoop20.q |  63 --
 .../results/clientpositive/acid_mapjoin.q.out   |  30 +-
 ql/src/test/results/clientpositive/ctas.q.out   | 118 +--
 .../results/clientpositive/ctas_hadoop20.q.out  | 945 ---
 .../test/results/clientpositive/llap/ctas.q.out |  81 +-
 .../results/clientpositive/spark/ctas.q.out |  81 +-
 .../vector_join_part_col_char.q.out |   6 +-
 9 files changed, 35 insertions(+), 1294 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e1fa2787/ql/src/test/queries/clientpositive/acid_mapjoin.q
--
diff --git a/ql/src/test/queries/clientpositive/acid_mapjoin.q 
b/ql/src/test/queries/clientpositive/acid_mapjoin.q
index 5eee6e7..e3d14cc 100644
--- a/ql/src/test/queries/clientpositive/acid_mapjoin.q
+++ b/ql/src/test/queries/clientpositive/acid_mapjoin.q
@@ -13,7 +13,8 @@ create table acid2 (key int, value string) clustered by (key) 
into 2 buckets sto
 
 insert into acid1 values (1, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e'), (6, 
'f'), (7, 'g'), (8, 'h');
 insert into acid2 values (1,'a'),(3,'c'),(5,'e'),(7,'g');
-
+alter table acid2 update statistics set('numRows'='210', 'rawDataSize'='840');
+alter table acid1 update statistics set('numRows'='316', 'rawDataSize'='1265');
 explain
 select count(*) from acid1 join acid2 on acid1.key = acid2.key;
 select count(*) from acid1 join acid2 on acid1.key = acid2.key;

http://git-wip-us.apache.org/repos/asf/hive/blob/e1fa2787/ql/src/test/queries/clientpositive/ctas.q
--
diff --git a/ql/src/test/queries/clientpositive/ctas.q 
b/ql/src/test/queries/clientpositive/ctas.q
index edd1f6a..57a4729 100644
--- a/ql/src/test/queries/clientpositive/ctas.q
+++ b/ql/src/test/queries/clientpositive/ctas.q
@@ -49,7 +49,7 @@ select * from nzhang_ctas4;
 
 describe formatted nzhang_CTAS4;
 
-explain extended create table nzhang_ctas5 row format delimited fields 
terminated by ',' lines terminated by '\012' stored as textfile as select key, 
value from src sort by key, value limit 10;
+explain create table nzhang_ctas5 row format delimited fields terminated by 
',' lines terminated by '\012' stored as textfile as select key, value from src 
sort by key, value limit 10;
 
 set mapreduce.framework.name=yarn;
 set mapreduce.jobtracker.address=localhost:58;

http://git-wip-us.apache.org/repos/asf/hive/blob/e1fa2787/ql/src/test/queries/clientpositive/ctas_hadoop20.q
--
diff --git a/ql/src/test/queries/clientpositive/ctas_hadoop20.q 
b/ql/src/test/queries/clientpositive/ctas_hadoop20.q
deleted file mode 100644
index e275b7b..000
--- a/ql/src/test/queries/clientpositive/ctas_hadoop20.q
+++ /dev/null
@@ -1,63 +0,0 @@
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-
-create table nzhang_Tmp(a int, b string);
-select * from nzhang_Tmp;
-
-explain create table nzhang_CTAS1 as select key k, value from src sort by k, 
value limit 10;
-
-create table nzhang_CTAS1 as select key k, value from src sort by k, value 
limit 10;
-
-select * from nzhang_CTAS1;
-
-describe formatted nzhang_CTAS1;
-
-
-explain create table nzhang_ctas2 as select * from src sort by key, value 
limit 10;
-
-create table nzhang_ctas2 as select * from src sort by key, value limit 10;
-
-select * from nzhang_ctas2;
-
-describe formatted nzhang_CTAS2;
-
-
-explain create table nzhang_ctas3 row format serde 
"org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" stored as RCFile as 
select key/2 half_key, concat(value, "_con") conb  from src sort by half_key, 
conb limit 10;
-
-create table nzhang_ctas3 row format serde 
"org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" stored as RCFile as 
select key/2 half_key, concat(value, "_con") conb  from src sort by half_key, 
conb limit 10;
-
-select * from nzhang_ctas3;
-
-describe formatted nzhang_CTAS3;
-
-
-explain create table if not exists nzhang_ctas3 as select key, value from 

hive git commit: HIVE-14892: Test that explicitly submit jobs via child process are slow (Prasanth Jayachandran reviewed by Siddharth Seth)

2016-10-05 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/master a0bf9d629 -> 8e0b19b3c


HIVE-14892: Test that explicitly submit jobs via child process are slow 
(Prasanth Jayachandran reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8e0b19b3
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8e0b19b3
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8e0b19b3

Branch: refs/heads/master
Commit: 8e0b19b3c469c1e0d4165cc1e4652093195fc587
Parents: a0bf9d6
Author: Prasanth Jayachandran 
Authored: Wed Oct 5 21:12:08 2016 -0700
Committer: Prasanth Jayachandran 
Committed: Wed Oct 5 21:12:08 2016 -0700

--
 .../apache/hadoop/hive/ql/exec/mr/MapRedTask.java  | 17 -
 .../queries/clientpositive/alter_merge_stats_orc.q |  4 ++--
 .../clientpositive/archive_excludeHadoop20.q   |  5 ++---
 .../queries/clientpositive/auto_sortmerge_join_8.q |  4 ++--
 ql/src/test/queries/clientpositive/nonmr_fetch.q   |  4 ++--
 ql/src/test/queries/clientpositive/orc_analyze.q   |  4 ++--
 ql/src/test/queries/clientpositive/sample10.q  |  4 ++--
 .../clientpositive/sample_islocalmode_hook.q   |  4 ++--
 .../sample_islocalmode_hook_use_metadata.q |  6 +++---
 .../queries/clientpositive/vectorized_parquet.q|  4 ++--
 10 files changed, 35 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8e0b19b3/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
index 55bab6c..17db852 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
@@ -138,9 +138,24 @@ public class MapRedTask extends ExecDriver implements 
Serializable {
   runningViaChild = conf.getBoolVar(HiveConf.ConfVars.SUBMITVIACHILD);
 
   if (!runningViaChild) {
+// since we are running the mapred task in the same jvm, we should 
update the job conf
+// in ExecDriver as well to have proper local properties.
+if (this.isLocalMode()) {
+  // save the original job tracker
+  
ctx.setOriginalTracker(ShimLoader.getHadoopShims().getJobLauncherRpcAddress(job));
+  // change it to local
+  ShimLoader.getHadoopShims().setJobLauncherRpcAddress(job, "local");
+}
 // we are not running this mapred task via child jvm
 // so directly invoke ExecDriver
-return super.execute(driverContext);
+int ret = super.execute(driverContext);
+
+// restore the previous properties for framework name, RM address etc.
+if (this.isLocalMode()) {
+  // restore the local job tracker back to original
+  ctx.restoreOriginalTracker();
+}
+return ret;
   }
 
   // we need to edit the configuration to setup cmdline. clone it first

http://git-wip-us.apache.org/repos/asf/hive/blob/8e0b19b3/ql/src/test/queries/clientpositive/alter_merge_stats_orc.q
--
diff --git a/ql/src/test/queries/clientpositive/alter_merge_stats_orc.q 
b/ql/src/test/queries/clientpositive/alter_merge_stats_orc.q
index 66a7c10..8d0c7bd 100644
--- a/ql/src/test/queries/clientpositive/alter_merge_stats_orc.q
+++ b/ql/src/test/queries/clientpositive/alter_merge_stats_orc.q
@@ -1,5 +1,5 @@
-set hive.exec.submitviachild=true;
-set hive.exec.submit.local.task.via.child=true;
+set hive.exec.submitviachild=false;
+set hive.exec.submit.local.task.via.child=false;
 create table src_orc_merge_test_stat(key int, value string) stored as orc;
 
 insert overwrite table src_orc_merge_test_stat select * from src;

http://git-wip-us.apache.org/repos/asf/hive/blob/8e0b19b3/ql/src/test/queries/clientpositive/archive_excludeHadoop20.q
--
diff --git a/ql/src/test/queries/clientpositive/archive_excludeHadoop20.q 
b/ql/src/test/queries/clientpositive/archive_excludeHadoop20.q
index 37d5cf9..e961641 100644
--- a/ql/src/test/queries/clientpositive/archive_excludeHadoop20.q
+++ b/ql/src/test/queries/clientpositive/archive_excludeHadoop20.q
@@ -1,8 +1,7 @@
 set hive.mapred.mode=nonstrict;
 set hive.archive.enabled = true;
-;
-set hive.exec.submitviachild=true;
-set hive.exec.submit.local.task.via.child=true;
+set hive.exec.submitviachild=false;
+set hive.exec.submit.local.task.via.child=false;
 
 drop table tstsrc;
 drop table tstsrcpart;


hive git commit: HIVE-14545 : HiveServer2 with http transport mode spends too much time just creating configs (Rajesh Balamohan via Thejas Nair)

2016-10-05 Thread thejas
Repository: hive
Updated Branches:
  refs/heads/master c53c9be71 -> a0bf9d629


HIVE-14545 : HiveServer2 with http transport mode spends too much time just 
creating configs (Rajesh Balamohan via Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a0bf9d62
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a0bf9d62
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a0bf9d62

Branch: refs/heads/master
Commit: a0bf9d6296760af7ce8ed29d5ccf13683a46d809
Parents: c53c9be
Author: Thejas Nair 
Authored: Wed Oct 5 18:34:09 2016 -0700
Committer: Thejas Nair 
Committed: Wed Oct 5 18:34:09 2016 -0700

--
 .../java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a0bf9d62/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
index b7a1e2d..50449e0 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
@@ -358,7 +358,7 @@ public class ThriftHttpServlet extends TServlet {
   try {
 AuthMethods authMethod = AuthMethods.getValidAuthMethod(authType);
 PasswdAuthenticationProvider provider =
-
AuthenticationProviderFactory.getAuthenticationProvider(authMethod);
+
AuthenticationProviderFactory.getAuthenticationProvider(authMethod, hiveConf);
 provider.Authenticate(userName, getPassword(request, authType));
 
   } catch (Exception e) {



[06/38] hive git commit: HIVE-7224: Set incremental printing to true by default in Beeline (Sahil Takiar, reviewed by Thejas M Nair>

2016-10-05 Thread khorgath
HIVE-7224: Set incremental printing to true by default in Beeline (Sahil 
Takiar, reviewed by Thejas M Nair>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7d3da177
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7d3da177
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7d3da177

Branch: refs/heads/repl2
Commit: 7d3da1778fdbfdb9b3eb0a19a10260b0258e1f87
Parents: d3b88f6
Author: Sahil Takiar 
Authored: Tue Sep 27 17:41:59 2016 -0500
Committer: Sergio Pena 
Committed: Tue Sep 27 17:41:59 2016 -0500

--
 beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7d3da177/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java 
b/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
index 59fbca3..57b9c46 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
@@ -74,7 +74,7 @@ class BeeLineOpts implements Completer {
   private boolean autoCommit = false;
   private boolean verbose = false;
   private boolean force = false;
-  private boolean incremental = false;
+  private boolean incremental = true;
   private int incrementalBufferRows = DEFAULT_INCREMENTAL_BUFFER_ROWS;
   private boolean showWarnings = false;
   private boolean showNestedErrs = false;



[24/38] hive git commit: HIVE-14768: Add a new UDTF Replicate_Rows (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

2016-10-05 Thread khorgath
HIVE-14768: Add a new UDTF Replicate_Rows (Pengcheng Xiong, reviewed by 
Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e19f0e35
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e19f0e35
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e19f0e35

Branch: refs/heads/repl2
Commit: e19f0e35e09ca283e5de46ae7e2db1e11396335e
Parents: 0a4b3d8
Author: Pengcheng Xiong 
Authored: Mon Oct 3 22:07:24 2016 -0700
Committer: Pengcheng Xiong 
Committed: Mon Oct 3 22:07:24 2016 -0700

--
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   1 +
 .../udf/generic/GenericUDTFReplicateRows.java   |  88 +++
 .../clientpositive/udtf_replicate_rows.q|  23 
 .../results/clientpositive/show_functions.q.out |   1 +
 .../clientpositive/udtf_replicate_rows.q.out| 107 +++
 5 files changed, 220 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e19f0e35/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 6870dfa..6b29be1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -468,6 +468,7 @@ public final class FunctionRegistry {
 
 // Generic UDTF's
 system.registerGenericUDTF("explode", GenericUDTFExplode.class);
+system.registerGenericUDTF("replicate_rows", 
GenericUDTFReplicateRows.class);
 system.registerGenericUDTF("inline", GenericUDTFInline.class);
 system.registerGenericUDTF("json_tuple", GenericUDTFJSONTuple.class);
 system.registerGenericUDTF("parse_url_tuple", 
GenericUDTFParseUrlTuple.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/e19f0e35/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFReplicateRows.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFReplicateRows.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFReplicateRows.java
new file mode 100644
index 000..164445d
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFReplicateRows.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import 
org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver;
+import org.apache.hadoop.hive.serde2.lazy.LazyLong;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import 
org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.LongWritable;
+
+/**
+ * Takes a row of data and repeats n times.
+ */
+@Description(name = "replicate_rows", value = "_FUNC_(n, cols...) - turns 1 
row into n rows")
+public class GenericUDTFReplicateRows extends GenericUDTF {
+  @Override
+  public 

[30/38] hive git commit: HIVE-14558: Add support for listing views similar to "show tables" (Naveen Gangam, reviewed by Aihua Xu)

2016-10-05 Thread khorgath
http://git-wip-us.apache.org/repos/asf/hive/blob/21a0142f/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
--
diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp 
b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
index 5a35a50..b4a05b2 100644
--- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
@@ -6186,6 +6186,277 @@ uint32_t 
ThriftHiveMetastore_get_tables_presult::read(::apache::thrift::protocol
 }
 
 
+ThriftHiveMetastore_get_tables_by_type_args::~ThriftHiveMetastore_get_tables_by_type_args()
 throw() {
+}
+
+
+uint32_t 
ThriftHiveMetastore_get_tables_by_type_args::read(::apache::thrift::protocol::TProtocol*
 iprot) {
+
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
+  uint32_t xfer = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TType ftype;
+  int16_t fid;
+
+  xfer += iprot->readStructBegin(fname);
+
+  using ::apache::thrift::protocol::TProtocolException;
+
+
+  while (true)
+  {
+xfer += iprot->readFieldBegin(fname, ftype, fid);
+if (ftype == ::apache::thrift::protocol::T_STOP) {
+  break;
+}
+switch (fid)
+{
+  case 1:
+if (ftype == ::apache::thrift::protocol::T_STRING) {
+  xfer += iprot->readString(this->db_name);
+  this->__isset.db_name = true;
+} else {
+  xfer += iprot->skip(ftype);
+}
+break;
+  case 2:
+if (ftype == ::apache::thrift::protocol::T_STRING) {
+  xfer += iprot->readString(this->pattern);
+  this->__isset.pattern = true;
+} else {
+  xfer += iprot->skip(ftype);
+}
+break;
+  case 3:
+if (ftype == ::apache::thrift::protocol::T_STRING) {
+  xfer += iprot->readString(this->tableType);
+  this->__isset.tableType = true;
+} else {
+  xfer += iprot->skip(ftype);
+}
+break;
+  default:
+xfer += iprot->skip(ftype);
+break;
+}
+xfer += iprot->readFieldEnd();
+  }
+
+  xfer += iprot->readStructEnd();
+
+  return xfer;
+}
+
+uint32_t 
ThriftHiveMetastore_get_tables_by_type_args::write(::apache::thrift::protocol::TProtocol*
 oprot) const {
+  uint32_t xfer = 0;
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
+  xfer += 
oprot->writeStructBegin("ThriftHiveMetastore_get_tables_by_type_args");
+
+  xfer += oprot->writeFieldBegin("db_name", 
::apache::thrift::protocol::T_STRING, 1);
+  xfer += oprot->writeString(this->db_name);
+  xfer += oprot->writeFieldEnd();
+
+  xfer += oprot->writeFieldBegin("pattern", 
::apache::thrift::protocol::T_STRING, 2);
+  xfer += oprot->writeString(this->pattern);
+  xfer += oprot->writeFieldEnd();
+
+  xfer += oprot->writeFieldBegin("tableType", 
::apache::thrift::protocol::T_STRING, 3);
+  xfer += oprot->writeString(this->tableType);
+  xfer += oprot->writeFieldEnd();
+
+  xfer += oprot->writeFieldStop();
+  xfer += oprot->writeStructEnd();
+  return xfer;
+}
+
+
+ThriftHiveMetastore_get_tables_by_type_pargs::~ThriftHiveMetastore_get_tables_by_type_pargs()
 throw() {
+}
+
+
+uint32_t 
ThriftHiveMetastore_get_tables_by_type_pargs::write(::apache::thrift::protocol::TProtocol*
 oprot) const {
+  uint32_t xfer = 0;
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
+  xfer += 
oprot->writeStructBegin("ThriftHiveMetastore_get_tables_by_type_pargs");
+
+  xfer += oprot->writeFieldBegin("db_name", 
::apache::thrift::protocol::T_STRING, 1);
+  xfer += oprot->writeString((*(this->db_name)));
+  xfer += oprot->writeFieldEnd();
+
+  xfer += oprot->writeFieldBegin("pattern", 
::apache::thrift::protocol::T_STRING, 2);
+  xfer += oprot->writeString((*(this->pattern)));
+  xfer += oprot->writeFieldEnd();
+
+  xfer += oprot->writeFieldBegin("tableType", 
::apache::thrift::protocol::T_STRING, 3);
+  xfer += oprot->writeString((*(this->tableType)));
+  xfer += oprot->writeFieldEnd();
+
+  xfer += oprot->writeFieldStop();
+  xfer += oprot->writeStructEnd();
+  return xfer;
+}
+
+
+ThriftHiveMetastore_get_tables_by_type_result::~ThriftHiveMetastore_get_tables_by_type_result()
 throw() {
+}
+
+
+uint32_t 
ThriftHiveMetastore_get_tables_by_type_result::read(::apache::thrift::protocol::TProtocol*
 iprot) {
+
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
+  uint32_t xfer = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TType ftype;
+  int16_t fid;
+
+  xfer += iprot->readStructBegin(fname);
+
+  using ::apache::thrift::protocol::TProtocolException;
+
+
+  while (true)
+  {
+xfer += iprot->readFieldBegin(fname, ftype, fid);
+if (ftype == ::apache::thrift::protocol::T_STOP) {
+  break;
+}
+switch (fid)
+{
+  case 0:
+if (ftype == ::apache::thrift::protocol::T_LIST) {
+  {
+this->success.clear();
+uint32_t _size887;

[11/38] hive git commit: HIVE-14849: Support google-compute-engine provider on Hive ptest framework (Sergio Pena, reviewed by Prasanth Jayachandran)

2016-10-05 Thread khorgath
HIVE-14849: Support google-compute-engine provider on Hive ptest framework 
(Sergio Pena, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/291f3d50
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/291f3d50
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/291f3d50

Branch: refs/heads/repl2
Commit: 291f3d503d5a8627f86ef5f7fdd7880d8da4760c
Parents: cf72a73
Author: Sergio Pena 
Authored: Wed Sep 28 21:33:00 2016 -0500
Committer: Sergio Pena 
Committed: Wed Sep 28 21:33:00 2016 -0500

--
 .../ptest2/conf/cloudhost.properties.example|  37 +++
 testutils/ptest2/pom.xml|   5 +
 .../execution/context/CloudComputeService.java  | 224 +++
 .../context/CloudExecutionContextProvider.java  | 105 +++--
 4 files changed, 311 insertions(+), 60 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/291f3d50/testutils/ptest2/conf/cloudhost.properties.example
--
diff --git a/testutils/ptest2/conf/cloudhost.properties.example 
b/testutils/ptest2/conf/cloudhost.properties.example
new file mode 100644
index 000..c336052
--- /dev/null
+++ b/testutils/ptest2/conf/cloudhost.properties.example
@@ -0,0 +1,37 @@
+#
+# This is just an example of different cloudhost providers
+#
+
+# This context provides configurations for AWS EC2 and GCE (google compute 
engine)
+executionContextProvider = 
org.apache.hive.ptest.execution.context.CloudExecutionContextProvider$Builder
+
+# Option: GCE
+cloudProvider = google-compute-engine
+gceJsonFile = # GCE JSON KEY FILE
+instanceType = 
https://www.googleapis.com/compute/v1/projects//zones/us-central1-a/machineTypes/n1-standard-8
+imageId = 
https://www.googleapis.com/compute/v1/projects//global/images/hive-ptest-debian-8-20160927
+# keyPair = # UNUSED
+securityGroup = hive-ptest
+
+# Option: AWS
+cloudProvider = aws-ec2
+apiKey =# AWS ACCESS KEY
+accessKey = # AWS SECRET ACCESS KEY
+instanceType = c3.2xlarge
+imageId = us-west-1/ami-1fa1445b
+keyPair = hive-ptest
+securityGroup = hive-ptest
+
+# Generic options
+workingDirectory = /data/hive-ptest
+profileDirectory = /usr/local/hiveptest/etc/public/
+privateKey = /home/hiveptest/.ssh/hive-ptest-user-key
+dataDir = /data/hive-ptest/data/
+numHosts = 12
+groupName = hive-ptest-slaves
+localDirs = /home/hiveptest/
+user = hiveptest
+numThreads = 2
+maxLogDirectoriesPerProfile = 30
+userMetadata.owner = # USER
+maxHostsPerCreateRequest = 12
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/291f3d50/testutils/ptest2/pom.xml
--
diff --git a/testutils/ptest2/pom.xml b/testutils/ptest2/pom.xml
index cea29b6..97981fb 100644
--- a/testutils/ptest2/pom.xml
+++ b/testutils/ptest2/pom.xml
@@ -107,6 +107,11 @@ limitations under the License.
   ${jclouds.version}
 
 
+  org.apache.jclouds.labs
+  google-compute-engine
+  ${jclouds.version}
+
+
   org.apache.jclouds.driver
   jclouds-sshj
   ${jclouds.version}

http://git-wip-us.apache.org/repos/asf/hive/blob/291f3d50/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
--
diff --git 
a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
 
b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
index 64ee68e..e26c5ca 100644
--- 
a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
+++ 
b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
@@ -18,11 +18,13 @@
  */
 package org.apache.hive.ptest.execution.context;
 
-import java.util.Collections;
-import java.util.Properties;
-import java.util.Map;
-import java.util.Set;
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
 
+import com.google.common.base.Supplier;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.io.Files;
 import org.jclouds.Constants;
 import org.jclouds.ContextBuilder;
 import org.jclouds.aws.ec2.compute.AWSEC2TemplateOptions;
@@ -34,7 +36,12 @@ import org.jclouds.compute.domain.ComputeMetadata;
 import org.jclouds.compute.domain.NodeMetadata;
 import org.jclouds.compute.domain.NodeMetadata.Status;
 import org.jclouds.compute.domain.Template;
+import org.jclouds.compute.options.TemplateOptions;
+import org.jclouds.domain.Credentials;
+import org.jclouds.googlecloud.GoogleCredentialsFromJson;
+import 

[28/38] hive git commit: HIVE-14558: Add support for listing views similar to "show tables" (Naveen Gangam, reviewed by Aihua Xu)

2016-10-05 Thread khorgath
http://git-wip-us.apache.org/repos/asf/hive/blob/21a0142f/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
--
diff --git 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
index cb5dec9..d827d6c 100644
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
@@ -92,6 +92,8 @@ public class ThriftHiveMetastore {
 
 public List get_tables(String db_name, String pattern) throws 
MetaException, org.apache.thrift.TException;
 
+public List get_tables_by_type(String db_name, String pattern, 
String tableType) throws MetaException, org.apache.thrift.TException;
+
 public List get_table_meta(String db_patterns, String 
tbl_patterns, List tbl_types) throws MetaException, 
org.apache.thrift.TException;
 
 public List get_all_tables(String db_name) throws MetaException, 
org.apache.thrift.TException;
@@ -394,6 +396,8 @@ public class ThriftHiveMetastore {
 
 public void get_tables(String db_name, String pattern, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
+public void get_tables_by_type(String db_name, String pattern, String 
tableType, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
 public void get_table_meta(String db_patterns, String tbl_patterns, 
List tbl_types, org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
 
 public void get_all_tables(String db_name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
@@ -1392,6 +1396,34 @@ public class ThriftHiveMetastore {
   throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "get_tables failed: unknown result");
 }
 
+public List get_tables_by_type(String db_name, String pattern, 
String tableType) throws MetaException, org.apache.thrift.TException
+{
+  send_get_tables_by_type(db_name, pattern, tableType);
+  return recv_get_tables_by_type();
+}
+
+public void send_get_tables_by_type(String db_name, String pattern, String 
tableType) throws org.apache.thrift.TException
+{
+  get_tables_by_type_args args = new get_tables_by_type_args();
+  args.setDb_name(db_name);
+  args.setPattern(pattern);
+  args.setTableType(tableType);
+  sendBase("get_tables_by_type", args);
+}
+
+public List recv_get_tables_by_type() throws MetaException, 
org.apache.thrift.TException
+{
+  get_tables_by_type_result result = new get_tables_by_type_result();
+  receiveBase(result, "get_tables_by_type");
+  if (result.isSetSuccess()) {
+return result.success;
+  }
+  if (result.o1 != null) {
+throw result.o1;
+  }
+  throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "get_tables_by_type failed: unknown result");
+}
+
 public List get_table_meta(String db_patterns, String 
tbl_patterns, List tbl_types) throws MetaException, 
org.apache.thrift.TException
 {
   send_get_table_meta(db_patterns, tbl_patterns, tbl_types);
@@ -5835,6 +5867,44 @@ public class ThriftHiveMetastore {
   }
 }
 
+public void get_tables_by_type(String db_name, String pattern, String 
tableType, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+  checkReady();
+  get_tables_by_type_call method_call = new 
get_tables_by_type_call(db_name, pattern, tableType, resultHandler, this, 
___protocolFactory, ___transport);
+  this.___currentMethod = method_call;
+  ___manager.call(method_call);
+}
+
+public static class get_tables_by_type_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+  private String db_name;
+  private String pattern;
+  private String tableType;
+  public get_tables_by_type_call(String db_name, String pattern, String 
tableType, org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+super(client, protocolFactory, transport, resultHandler, false);
+this.db_name = db_name;
+this.pattern = pattern;
+this.tableType = tableType;
+  }
+
+  public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+

[25/38] hive git commit: HIVE-14558: Add support for listing views similar to "show tables" (Naveen Gangam, reviewed by Aihua Xu)

2016-10-05 Thread khorgath
http://git-wip-us.apache.org/repos/asf/hive/blob/21a0142f/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index de6adb5..691c3a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1313,7 +1313,7 @@ public class Hive {
* @throws HiveException
*/
   public List getAllTables() throws HiveException {
-return getAllTables(SessionState.get().getCurrentDatabase());
+return getTablesByType(SessionState.get().getCurrentDatabase(), null, 
null);
   }
 
   /**
@@ -1323,7 +1323,7 @@ public class Hive {
* @throws HiveException
*/
   public List getAllTables(String dbName) throws HiveException {
-return getTablesByPattern(dbName, ".*");
+return getTablesByType(dbName, ".*", null);
   }
 
   /**
@@ -1336,8 +1336,8 @@ public class Hive {
* @throws HiveException
*/
   public List getTablesByPattern(String tablePattern) throws 
HiveException {
-return getTablesByPattern(SessionState.get().getCurrentDatabase(),
-tablePattern);
+return getTablesByType(SessionState.get().getCurrentDatabase(),
+tablePattern, null);
   }
 
   /**
@@ -1349,11 +1349,7 @@ public class Hive {
* @throws HiveException
*/
   public List getTablesByPattern(String dbName, String tablePattern) 
throws HiveException {
-try {
-  return getMSC().getTables(dbName, tablePattern);
-} catch (Exception e) {
-  throw new HiveException(e);
-}
+return getTablesByType(dbName, tablePattern, null);
   }
 
   /**
@@ -1369,8 +1365,38 @@ public class Hive {
*/
   public List getTablesForDb(String database, String tablePattern)
   throws HiveException {
+return getTablesByType(database, tablePattern, null);
+  }
+
+  /**
+   * Returns all existing tables of a type 
(VIRTUAL_VIEW|EXTERNAL_TABLE|MANAGED_TABLE) from the specified
+   * database which match the given pattern. The matching occurs as per Java 
regular expressions.
+   * @param dbName Database name to find the tables in. if null, uses the 
current database in this session.
+   * @param pattern A pattern to match for the table names.If null, returns 
all names from this DB.
+   * @param type The type of tables to return. VIRTUAL_VIEWS for views. If 
null, returns all tables and views.
+   * @return list of table names that match the pattern.
+   * @throws HiveException
+   */
+  public List getTablesByType(String dbName, String pattern, TableType 
type)
+  throws HiveException {
+List retList = new ArrayList();
+if (dbName == null)
+  dbName = SessionState.get().getCurrentDatabase();
+
 try {
-  return getMSC().getTables(database, tablePattern);
+  if (type != null) {
+if (pattern != null) {
+  return getMSC().getTables(dbName, pattern, type);
+} else {
+  return getMSC().getTables(dbName, ".*", type);
+}
+  } else {
+if (pattern != null) {
+  return getMSC().getTables(dbName, pattern);
+} else {
+  return getMSC().getTables(dbName, ".*");
+}
+  }
 } catch (Exception e) {
   throw new HiveException(e);
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/21a0142f/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 8b0db4a..a264c4d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -394,6 +394,10 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
   ctx.setResFile(ctx.getLocalTmpPath());
   analyzeShowConf(ast);
   break;
+case HiveParser.TOK_SHOWVIEWS:
+  ctx.setResFile(ctx.getLocalTmpPath());
+  analyzeShowViews(ast);
+  break;
 case HiveParser.TOK_DESCFUNCTION:
   ctx.setResFile(ctx.getLocalTmpPath());
   analyzeDescFunction(ast);
@@ -2402,6 +2406,45 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 setFetchTask(createFetchTask(showConfDesc.getSchema()));
   }
 
+  private void analyzeShowViews(ASTNode ast) throws SemanticException {
+ShowTablesDesc showViewsDesc;
+String dbName = SessionState.get().getCurrentDatabase();
+String viewNames = null;
+
+if (ast.getChildCount() > 3) {
+  throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg());
+}
+
+switch (ast.getChildCount()) {
+case 1: // Uses a pattern
+  viewNames = unescapeSQLString(ast.getChild(0).getText());
+  showViewsDesc = new ShowTablesDesc(ctx.getResFile(), 

[32/38] hive git commit: HIVE-14806: Support UDTF in CBO (AST return path) (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

2016-10-05 Thread khorgath
HIVE-14806: Support UDTF in CBO (AST return path) (Pengcheng Xiong, reviewed by 
Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/efe9c84e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/efe9c84e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/efe9c84e

Branch: refs/heads/repl2
Commit: efe9c84e7bd30de537b0b2c0a224cb47b16b7618
Parents: 21a0142
Author: Pengcheng Xiong 
Authored: Tue Oct 4 11:00:00 2016 -0700
Committer: Pengcheng Xiong 
Committed: Tue Oct 4 11:00:00 2016 -0700

--
 .../reloperators/HiveTableFunctionScan.java |  77 +++
 .../calcite/translator/ASTConverter.java|  83 ++--
 .../translator/SqlFunctionConverter.java|   9 +
 .../hadoop/hive/ql/parse/CalcitePlanner.java| 211 +--
 .../clientpositive/allcolref_in_udf.q.out   |  80 ---
 .../clientpositive/lateral_view_noalias.q.out   |  12 +-
 .../test/results/clientpositive/ppd_udtf.q.out  |   4 +-
 .../results/clientpositive/udf_inline.q.out |  12 +-
 .../results/clientpositive/udtf_explode.q.out   |  76 ---
 .../clientpositive/udtf_json_tuple.q.out|  12 +-
 .../clientpositive/udtf_parse_url_tuple.q.out   |  12 +-
 11 files changed, 478 insertions(+), 110 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/efe9c84e/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveTableFunctionScan.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveTableFunctionScan.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveTableFunctionScan.java
new file mode 100644
index 000..bf4896d
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveTableFunctionScan.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.optimizer.calcite.reloperators;
+
+import java.lang.reflect.Type;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.calcite.plan.RelOptCluster;
+import org.apache.calcite.plan.RelTraitSet;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.core.TableFunctionScan;
+import org.apache.calcite.rel.metadata.RelColumnMapping;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rex.RexNode;
+import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
+
+public class HiveTableFunctionScan extends TableFunctionScan implements 
HiveRelNode {
+
+  /**
+   * @param cluster
+   *  cluster - Cluster that this relational expression belongs to
+   * @param traitSet
+   * @param inputs
+   *  inputs - 0 or more relational inputs
+   * @param rexCall
+   *  rexCall - Function invocation expression
+   * @param elementType
+   *  elementType - Element type of the collection that will implement
+   *  this table
+   * @param rowType
+   *  rowType - Row type produced by function
+   * @param columnMappings
+   *  columnMappings - Column mappings associated with this function
+   */
+  public HiveTableFunctionScan(RelOptCluster cluster, RelTraitSet traitSet, 
List inputs,
+  RexNode rexCall, Type elementType, RelDataType rowType, 
Set columnMappings) {
+super(cluster, traitSet, inputs, rexCall, elementType, rowType, 
columnMappings);
+  }
+
+  public static HiveTableFunctionScan create(RelOptCluster cluster, 
RelTraitSet traitSet,
+  List inputs, RexNode rexCall, Type elementType, RelDataType 
rowType,
+  Set columnMappings) throws CalciteSemanticException {
+HiveTableFunctionScan hiveTableFunctionScan = new 
HiveTableFunctionScan(cluster, traitSet,
+inputs, rexCall, elementType, rowType, columnMappings);
+return hiveTableFunctionScan;
+  }
+
+  @Override
+  public TableFunctionScan copy(RelTraitSet traitSet, List inputs, 
RexNode rexCall,
+  Type 

[16/38] hive git commit: HIVE-14819: FunctionInfo for permanent functions shows TEMPORARY FunctionType (Jason Dere, reviewed by Sergey Shelukhin)

2016-10-05 Thread khorgath
HIVE-14819: FunctionInfo for permanent functions shows TEMPORARY FunctionType 
(Jason Dere, reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/74a6ff67
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/74a6ff67
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/74a6ff67

Branch: refs/heads/repl2
Commit: 74a6ff678f9312d946a7d55d73bf4a60127de763
Parents: 474425a
Author: Jason Dere 
Authored: Thu Sep 29 13:35:17 2016 -0700
Committer: Jason Dere 
Committed: Thu Sep 29 13:35:17 2016 -0700

--
 .../hadoop/hive/ql/exec/FunctionInfo.java   |  20 ++--
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   3 +-
 .../apache/hadoop/hive/ql/exec/Registry.java|  96 ++
 .../hadoop/hive/ql/exec/WindowFunctionInfo.java |   4 +-
 .../translator/SqlFunctionConverter.java|   2 +-
 .../hive/ql/exec/TestFunctionRegistry.java  | 100 +++
 6 files changed, 194 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/74a6ff67/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
index 30ba996..8014dab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
@@ -73,36 +73,36 @@ public class FunctionInfo {
 this.discarded = new AtomicBoolean(false);  // shared to all session 
functions
   }
 
-  public FunctionInfo(boolean isNative, String displayName,
+  public FunctionInfo(FunctionType functionType, String displayName,
   GenericUDF genericUDF, FunctionResource... resources) {
-this.functionType = isNative ? FunctionType.BUILTIN : 
FunctionType.TEMPORARY;
+this.functionType = functionType;
 this.displayName = displayName;
 this.genericUDF = genericUDF;
 this.isInternalTableFunction = false;
 this.resources = resources;
   }
 
-  public FunctionInfo(boolean isNative, String displayName,
+  public FunctionInfo(FunctionType functionType, String displayName,
   GenericUDAFResolver genericUDAFResolver, FunctionResource... resources) {
-this.functionType = isNative ? FunctionType.BUILTIN : 
FunctionType.TEMPORARY;
+this.functionType = functionType;
 this.displayName = displayName;
 this.genericUDAFResolver = genericUDAFResolver;
 this.isInternalTableFunction = false;
 this.resources = resources;
   }
 
-  public FunctionInfo(boolean isNative, String displayName,
+  public FunctionInfo(FunctionType functionType, String displayName,
   GenericUDTF genericUDTF, FunctionResource... resources) {
-this.functionType = isNative ? FunctionType.BUILTIN : 
FunctionType.TEMPORARY;
+this.functionType = functionType;
 this.displayName = displayName;
 this.genericUDTF = genericUDTF;
 this.isInternalTableFunction = false;
 this.resources = resources;
   }
 
-  public FunctionInfo(boolean isNative, String displayName, Class tFnCls,
+  public FunctionInfo(FunctionType functionType, String displayName, Class tFnCls,
   FunctionResource... resources) {
-this.functionType = isNative ? FunctionType.BUILTIN : 
FunctionType.TEMPORARY;
+this.functionType = functionType;
 this.displayName = displayName;
 this.tableFunctionResolver = tFnCls;
 PartitionTableFunctionDescription def = AnnotationUtils.getAnnotation(
@@ -263,6 +263,10 @@ public class FunctionInfo {
 }
   }
 
+  public FunctionType getFunctionType() {
+return functionType;
+  }
+
   public static class FunctionResource {
 private final SessionState.ResourceType resourceType;
 private final String resourceURI;

http://git-wip-us.apache.org/repos/asf/hive/blob/74a6ff67/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index de74c3e..b277f5e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -1535,7 +1535,8 @@ public final class FunctionRegistry {
 }
 
 if (clazz != null) {
-  return system.isPermanentFunc(clazz);
+  // Use session registry - see Registry.isPermanentFunc()
+  return SessionState.getRegistryForWrite().isPermanentFunc(clazz);
 }
 return false;
   }


[04/38] hive git commit: HIVE-14029: Update Spark version to 2.0.0 (Ferdinand Xu, via Li Rui, Szehon Ho and Sergio Pena)

2016-10-05 Thread khorgath
HIVE-14029: Update Spark version to 2.0.0 (Ferdinand Xu, via Li Rui, Szehon Ho 
and Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ac977cc8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ac977cc8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ac977cc8

Branch: refs/heads/repl2
Commit: ac977cc88757b49fbbd5c3bb236adcedcaae396c
Parents: 7d3da17
Author: Ferdinand Xu 
Authored: Wed Sep 28 01:44:32 2016 +0800
Committer: Ferdinand Xu 
Committed: Wed Sep 28 01:44:32 2016 +0800

--
 pom.xml | 12 ++-
 ql/pom.xml  | 26 +-
 .../exec/spark/HiveBaseFunctionResultList.java  | 96 +---
 .../hive/ql/exec/spark/HiveMapFunction.java |  2 +-
 .../hive/ql/exec/spark/HiveReduceFunction.java  |  2 +-
 .../hive/ql/exec/spark/SortByShuffler.java  | 84 -
 .../spark/status/impl/JobMetricsListener.java   |  4 +-
 .../ql/exec/spark/TestHiveKVResultCache.java|  5 +-
 spark-client/pom.xml| 15 ++-
 .../hive/spark/client/MetricsCollection.java|  8 +-
 .../apache/hive/spark/client/RemoteDriver.java  |  4 +-
 .../hive/spark/client/metrics/InputMetrics.java |  9 +-
 .../hive/spark/client/metrics/Metrics.java  |  6 +-
 .../client/metrics/ShuffleReadMetrics.java  | 18 ++--
 .../client/metrics/ShuffleWriteMetrics.java |  4 +-
 .../spark/client/TestMetricsCollection.java |  8 +-
 16 files changed, 153 insertions(+), 150 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ac977cc8/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 2fb78cd..756cc34 100644
--- a/pom.xml
+++ b/pom.xml
@@ -145,7 +145,7 @@
 2.4.0
 1.9.13
 
-2.4.2
+2.6.5
 5.5.23
 2.3.4
 2.3.1
@@ -155,6 +155,8 @@
 3.0.1
 7.6.0.v20120127
 1.14
+
+2.22.2
 2.12
 1.1
 2.8.1
@@ -168,7 +170,7 @@
 2.3
 1.9.5
 2.0.0-M5
-4.0.23.Final
+4.0.29.Final
 1.8.1
 0.16.0
 2.5.0
@@ -178,9 +180,9 @@
 0.8.4
 0.90.2-incubating
 2.2.0
-1.6.0
-2.10
-2.10.4
+2.0.0
+2.11
+2.11.8
 1.1
 0.2
 1.4

http://git-wip-us.apache.org/repos/asf/hive/blob/ac977cc8/ql/pom.xml
--
diff --git a/ql/pom.xml b/ql/pom.xml
index 02ddb80..2a93bb7 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -361,7 +361,7 @@
   ${calcite.version}
   
 
 
   org.hsqldb
@@ -380,14 +380,14 @@
   jackson-core
 
   
-   
+
 
   org.apache.calcite
   calcite-avatica
   ${calcite.version}
   
 
 
   org.hsqldb
@@ -685,6 +685,14 @@
  commmons-logging
  commons-logging

+   
+ org.glassfish.jersey.containers
+ *
+   
+   
+ org.glassfish.jersey.core
+ *
+   
  

 
@@ -692,6 +700,18 @@
   jersey-servlet
   test
 
+
+  org.glassfish.jersey.core
+  jersey-server
+  ${glassfish.jersey.version}
+  test
+
+
+  org.glassfish.jersey.containers
+  jersey-container-servlet-core
+  ${glassfish.jersey.version}
+  test
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hive/blob/ac977cc8/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
index 5b65036..0fc79f4 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
@@ -38,15 +38,14 @@ import com.google.common.base.Preconditions;
  * through Iterator interface.
  */
 @SuppressWarnings("rawtypes")
-public abstract class HiveBaseFunctionResultList implements
-Iterable, OutputCollector, Serializable {
+public abstract class HiveBaseFunctionResultList
+  implements Iterator, OutputCollector, Serializable {
   private static final long serialVersionUID = -1L;
   private final Iterator inputIterator;
   private boolean isClosed = false;
 
   // Contains results from last processed input record.
   private final HiveKVResultCache lastRecordOutput;
-  private boolean iteratorAlreadyCreated = false;
 
   public HiveBaseFunctionResultList(Iterator inputIterator) {
 this.inputIterator = inputIterator;

[29/38] hive git commit: HIVE-14558: Add support for listing views similar to "show tables" (Naveen Gangam, reviewed by Aihua Xu)

2016-10-05 Thread khorgath
http://git-wip-us.apache.org/repos/asf/hive/blob/21a0142f/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
--
diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h 
b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
index 6498eb1..525408b 100644
--- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
+++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
@@ -47,6 +47,7 @@ class ThriftHiveMetastoreIf : virtual public  
::facebook::fb303::FacebookService
   virtual void drop_table(const std::string& dbname, const std::string& name, 
const bool deleteData) = 0;
   virtual void drop_table_with_environment_context(const std::string& dbname, 
const std::string& name, const bool deleteData, const EnvironmentContext& 
environment_context) = 0;
   virtual void get_tables(std::vector & _return, const 
std::string& db_name, const std::string& pattern) = 0;
+  virtual void get_tables_by_type(std::vector & _return, const 
std::string& db_name, const std::string& pattern, const std::string& tableType) 
= 0;
   virtual void get_table_meta(std::vector & _return, const 
std::string& db_patterns, const std::string& tbl_patterns, const 
std::vector & tbl_types) = 0;
   virtual void get_all_tables(std::vector & _return, const 
std::string& db_name) = 0;
   virtual void get_table(Table& _return, const std::string& dbname, const 
std::string& tbl_name) = 0;
@@ -277,6 +278,9 @@ class ThriftHiveMetastoreNull : virtual public 
ThriftHiveMetastoreIf , virtual p
   void get_tables(std::vector & /* _return */, const std::string& 
/* db_name */, const std::string& /* pattern */) {
 return;
   }
+  void get_tables_by_type(std::vector & /* _return */, const 
std::string& /* db_name */, const std::string& /* pattern */, const 
std::string& /* tableType */) {
+return;
+  }
   void get_table_meta(std::vector & /* _return */, const 
std::string& /* db_patterns */, const std::string& /* tbl_patterns */, const 
std::vector & /* tbl_types */) {
 return;
   }
@@ -3758,6 +3762,132 @@ class ThriftHiveMetastore_get_tables_presult {
 
 };
 
+typedef struct _ThriftHiveMetastore_get_tables_by_type_args__isset {
+  _ThriftHiveMetastore_get_tables_by_type_args__isset() : db_name(false), 
pattern(false), tableType(false) {}
+  bool db_name :1;
+  bool pattern :1;
+  bool tableType :1;
+} _ThriftHiveMetastore_get_tables_by_type_args__isset;
+
+class ThriftHiveMetastore_get_tables_by_type_args {
+ public:
+
+  ThriftHiveMetastore_get_tables_by_type_args(const 
ThriftHiveMetastore_get_tables_by_type_args&);
+  ThriftHiveMetastore_get_tables_by_type_args& operator=(const 
ThriftHiveMetastore_get_tables_by_type_args&);
+  ThriftHiveMetastore_get_tables_by_type_args() : db_name(), pattern(), 
tableType() {
+  }
+
+  virtual ~ThriftHiveMetastore_get_tables_by_type_args() throw();
+  std::string db_name;
+  std::string pattern;
+  std::string tableType;
+
+  _ThriftHiveMetastore_get_tables_by_type_args__isset __isset;
+
+  void __set_db_name(const std::string& val);
+
+  void __set_pattern(const std::string& val);
+
+  void __set_tableType(const std::string& val);
+
+  bool operator == (const ThriftHiveMetastore_get_tables_by_type_args & rhs) 
const
+  {
+if (!(db_name == rhs.db_name))
+  return false;
+if (!(pattern == rhs.pattern))
+  return false;
+if (!(tableType == rhs.tableType))
+  return false;
+return true;
+  }
+  bool operator != (const ThriftHiveMetastore_get_tables_by_type_args ) 
const {
+return !(*this == rhs);
+  }
+
+  bool operator < (const ThriftHiveMetastore_get_tables_by_type_args & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+};
+
+
+class ThriftHiveMetastore_get_tables_by_type_pargs {
+ public:
+
+
+  virtual ~ThriftHiveMetastore_get_tables_by_type_pargs() throw();
+  const std::string* db_name;
+  const std::string* pattern;
+  const std::string* tableType;
+
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+};
+
+typedef struct _ThriftHiveMetastore_get_tables_by_type_result__isset {
+  _ThriftHiveMetastore_get_tables_by_type_result__isset() : success(false), 
o1(false) {}
+  bool success :1;
+  bool o1 :1;
+} _ThriftHiveMetastore_get_tables_by_type_result__isset;
+
+class ThriftHiveMetastore_get_tables_by_type_result {
+ public:
+
+  ThriftHiveMetastore_get_tables_by_type_result(const 
ThriftHiveMetastore_get_tables_by_type_result&);
+  ThriftHiveMetastore_get_tables_by_type_result& operator=(const 
ThriftHiveMetastore_get_tables_by_type_result&);
+  ThriftHiveMetastore_get_tables_by_type_result() {
+  }
+
+  virtual ~ThriftHiveMetastore_get_tables_by_type_result() throw();
+  std::vector  success;
+  MetaException o1;
+
+  _ThriftHiveMetastore_get_tables_by_type_result__isset __isset;
+
+  void __set_success(const std::vector & val);
+
+  void __set_o1(const 

[19/38] hive git commit: HIVE-14775: Cleanup IOException usage in Metrics APIs (Barna Zsombor Klara reviewed by Peter Vary, Gabor Szadovszky, Szehon Ho, Mohit Sabharwal)

2016-10-05 Thread khorgath
HIVE-14775: Cleanup IOException usage in Metrics APIs (Barna Zsombor Klara 
reviewed by Peter Vary, Gabor Szadovszky, Szehon Ho, Mohit Sabharwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f903c4af
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f903c4af
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f903c4af

Branch: refs/heads/repl2
Commit: f903c4afad360ea66ec266abe8a3f414935c82ff
Parents: 45c1a09
Author: Mohit Sabharwal 
Authored: Fri Sep 30 15:13:14 2016 -0400
Committer: Mohit Sabharwal 
Committed: Fri Sep 30 15:13:14 2016 -0400

--
 .../hive/common/metrics/LegacyMetrics.java  |  96 ++---
 .../hive/common/metrics/MetricsMBean.java   |  13 +--
 .../hive/common/metrics/MetricsMBeanImpl.java   |  16 +--
 .../hive/common/metrics/common/Metrics.java |  31 ++
 .../metrics/metrics2/CodahaleMetrics.java   |  70 ++---
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |  33 ++
 .../hive/common/metrics/TestLegacyMetrics.java  | 103 ++-
 .../hive/metastore/HMSMetricsListener.java  |  52 ++
 .../hadoop/hive/metastore/HiveMetaStore.java|  13 +--
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  13 +--
 .../hadoop/hive/ql/exec/mr/MapRedTask.java  |   6 +-
 .../hadoop/hive/ql/exec/mr/MapredLocalTask.java |   6 +-
 .../hadoop/hive/ql/exec/spark/SparkTask.java|   6 +-
 .../apache/hadoop/hive/ql/exec/tez/TezTask.java |   6 +-
 .../hive/service/cli/operation/Operation.java   |  22 ++--
 15 files changed, 176 insertions(+), 310 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f903c4af/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java 
b/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
index 9be9b50..ba2267b 100644
--- a/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
+++ b/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
@@ -21,11 +21,13 @@ import org.apache.hadoop.hive.common.metrics.common.Metrics;
 import org.apache.hadoop.hive.common.metrics.common.MetricsScope;
 import org.apache.hadoop.hive.common.metrics.common.MetricsVariable;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
 import java.lang.management.ManagementFactory;
 import java.util.HashMap;
 
+import javax.management.JMException;
 import javax.management.MBeanServer;
 import javax.management.MalformedObjectNameException;
 import javax.management.ObjectName;
@@ -47,6 +49,8 @@ import javax.management.ObjectName;
  */
 public class LegacyMetrics implements Metrics {
 
+  private static final Logger LOG = 
LoggerFactory.getLogger(LegacyMetrics.class);
+
   private LegacyMetrics() {
 // block
   }
@@ -59,12 +63,12 @@ public class LegacyMetrics implements Metrics {
*/
   public static class LegacyMetricsScope implements MetricsScope {
 
-final LegacyMetrics metrics;
+private final LegacyMetrics metrics;
 
-final String name;
-final String numCounter;
-final String timeCounter;
-final String avgTimeCounter;
+private final String name;
+private final String numCounter;
+private final String timeCounter;
+private final String avgTimeCounter;
 
 private boolean isOpen = false;
 private Long startTime = null;
@@ -72,9 +76,8 @@ public class LegacyMetrics implements Metrics {
 /**
  * Instantiates a named scope - intended to only be called by Metrics, so 
locally scoped.
  * @param name - name of the variable
- * @throws IOException
  */
-private LegacyMetricsScope(String name, LegacyMetrics metrics) throws 
IOException {
+private LegacyMetricsScope(String name, LegacyMetrics metrics) {
   this.metrics = metrics;
   this.name = name;
   this.numCounter = name + ".n";
@@ -83,33 +86,41 @@ public class LegacyMetrics implements Metrics {
   open();
 }
 
-public Long getNumCounter() throws IOException {
-  return (Long) metrics.get(numCounter);
+public Long getNumCounter() {
+  try {
+return (Long) metrics.get(numCounter);
+  } catch (JMException e) {
+LOG.warn("Could not find counter value for " + numCounter + ", 
returning null instead. ", e);
+return null;
+  }
 }
 
-public Long getTimeCounter() throws IOException {
-  return (Long) metrics.get(timeCounter);
+public Long getTimeCounter() {
+  try {
+return (Long) metrics.get(timeCounter);
+  } catch (JMException e) {
+LOG.warn("Could 

[09/38] hive git commit: HIVE-12222: Define port range in property for RPCServer (Aihua Xu, reviewed by Xuefu Zhang)

2016-10-05 Thread khorgath
HIVE-1: Define port range in property for RPCServer (Aihua Xu, reviewed by 
Xuefu Zhang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e2bd513a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e2bd513a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e2bd513a

Branch: refs/heads/repl2
Commit: e2bd513a3970b141576f7ead25fc6cfcc5fcda17
Parents: 667e9dd
Author: Aihua Xu 
Authored: Thu Sep 22 14:20:51 2016 -0400
Committer: Aihua Xu 
Committed: Wed Sep 28 12:07:40 2016 -0400

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  3 ++
 .../hive/spark/client/rpc/RpcConfiguration.java | 38 +
 .../apache/hive/spark/client/rpc/RpcServer.java | 44 +---
 .../apache/hive/spark/client/rpc/TestRpc.java   | 37 +++-
 4 files changed, 115 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e2bd513a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 43a16d7..4c3ef3e 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3080,6 +3080,9 @@ public class HiveConf extends Configuration {
   "Default is empty, which means the address will be determined in the 
same way as for hive.server2.thrift.bind.host." +
   "This is only necessary if the host has mutiple network addresses and if 
a different network address other than " +
   "hive.server2.thrift.bind.host is to be used."),
+SPARK_RPC_SERVER_PORT("hive.spark.client.rpc.server.port", "", "A list of 
port ranges which can be used by RPC server " +
+"with the format of 49152-49222,49228 and a random one is selected 
from the list. Default is empty, which randomly " +
+"selects one port from all available ones."),
 SPARK_DYNAMIC_PARTITION_PRUNING(
 "hive.spark.dynamic.partition.pruning", false,
 "When dynamic pruning is enabled, joins on partition keys will be 
processed by writing\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/e2bd513a/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
--
diff --git 
a/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
 
b/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
index 210f8a4..8c59015 100644
--- 
a/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
+++ 
b/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
@@ -18,7 +18,9 @@
 package org.apache.hive.spark.client.rpc;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
@@ -107,6 +109,42 @@ public final class RpcConfiguration {
 return ServerUtils.getHostAddress(hiveHost).getHostName();
   }
 
+  /**
+   * Parses the port string like 49152-49222,49228 into the port list. A 
default 0
+   * is added for the empty port string.
+   * @return a list of configured ports.
+   * @exception IOException is thrown if the property is not configured 
properly
+   */
+  List getServerPorts() throws IOException {
+String errMsg = "Incorrect RPC server port configuration for HiveServer2";
+String portString = 
config.get(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT.varname);
+ArrayList ports = new ArrayList();
+try {
+  if(!StringUtils.isEmpty(portString)) {
+for (String portRange : portString.split(",")) {
+  String[] range = portRange.split("-");
+  if (range.length == 0 || range.length > 2
+  || (range.length == 2 && Integer.valueOf(range[0]) > 
Integer.valueOf(range[1]))) {
+throw new IOException(errMsg);
+  }
+  if (range.length == 1) {
+ports.add(Integer.valueOf(range[0]));
+  } else {
+for (int i = Integer.valueOf(range[0]); i <= 
Integer.valueOf(range[1]); i++) {
+  ports.add(i);
+}
+  }
+}
+  } else {
+ports.add(0);
+  }
+
+  return ports;
+} catch(NumberFormatException e) {
+  throw new IOException(errMsg);
+}
+  }
+
   String getRpcChannelLogLevel() {
 return config.get(HiveConf.ConfVars.SPARK_RPC_CHANNEL_LOG_LEVEL.varname);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/e2bd513a/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcServer.java

[17/38] hive git commit: HIVE-14784: Operation logs are disabled automatically if the parent directory does not exist. (Naveen Gangam via Yongzhi Chen)

2016-10-05 Thread khorgath
HIVE-14784: Operation logs are disabled automatically if the parent directory 
does not exist. (Naveen Gangam via Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0562efce
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0562efce
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0562efce

Branch: refs/heads/repl2
Commit: 0562efce642e70f1ac69eae6cca8c0a63230bafd
Parents: 74a6ff6
Author: Yongzhi Chen 
Authored: Fri Sep 30 10:39:11 2016 -0400
Committer: Yongzhi Chen 
Committed: Fri Sep 30 10:41:37 2016 -0400

--
 .../org/apache/hive/service/cli/operation/Operation.java | 11 +++
 1 file changed, 11 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0562efce/service/src/java/org/apache/hive/service/cli/operation/Operation.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/Operation.java 
b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index 90fe76d..6a656f9 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -234,6 +234,17 @@ public abstract class Operation {
   operationLogFile.getAbsolutePath());
   operationLogFile.delete();
 }
+if (!operationLogFile.getParentFile().exists()) {
+  LOG.warn("Operations log directory for this session does not exist, 
it could have been deleted " +
+  "externally. Recreating the directory for future queries in this 
session but the older operation " +
+  "logs for this session are no longer available");
+  if (!operationLogFile.getParentFile().mkdir()) {
+LOG.warn("Log directory for this session could not be created, 
disabling " +
+"operation logs: " + 
operationLogFile.getParentFile().getAbsolutePath());
+isOperationLogEnabled = false;
+return;
+  }
+}
 if (!operationLogFile.createNewFile()) {
   // the log file already exists and cannot be deleted.
   // If it can be read/written, keep its contents and use it.



[05/38] hive git commit: HIVE-14835: Improve ptest2 build time (Prasanth Jayachandran reviewed by Sergio Pena)

2016-10-05 Thread khorgath
HIVE-14835: Improve ptest2 build time (Prasanth Jayachandran reviewed by Sergio 
Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d3b88f66
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d3b88f66
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d3b88f66

Branch: refs/heads/repl2
Commit: d3b88f664415ff114de74aa2a0da2f1e1acbf60d
Parents: 0c55d46
Author: Prasanth Jayachandran 
Authored: Tue Sep 27 10:59:33 2016 -0700
Committer: Prasanth Jayachandran 
Committed: Tue Sep 27 10:59:33 2016 -0700

--
 dev-support/jenkins-execute-build.sh   | 4 +---
 testutils/ptest2/src/main/resources/source-prep.vm | 4 ++--
 2 files changed, 3 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d3b88f66/dev-support/jenkins-execute-build.sh
--
diff --git a/dev-support/jenkins-execute-build.sh 
b/dev-support/jenkins-execute-build.sh
index 2142942..972abae 100644
--- a/dev-support/jenkins-execute-build.sh
+++ b/dev-support/jenkins-execute-build.sh
@@ -70,9 +70,7 @@ test -n "$TEST_HANDLE" || fail "TEST_HANDLE must be specified 
and cannot be empt
 test -n "$PTEST_API_ENDPOINT" || fail "PTEST_API_ENDPOINT must be specified 
and cannot be empty."
 test -n "$PTEST_LOG_ENDPOINT" || fail "PTEST_LOG_ENDPOINT must be specified 
and cannot be empty."
 
-# WORKSPACE is an environment variable created by Jenkins, and it is the 
directory where the build is executed.
-# If not set, then default to $HOME
-MVN_REPO_LOCAL=${WORKSPACE:-$HOME}/.m2/repository
+MVN_REPO_LOCAL=${HOME}/.m2/repository
 
 # Directory where to build the ptest framework
 PTEST_BUILD_DIR="$PWD/hive/build"

http://git-wip-us.apache.org/repos/asf/hive/blob/d3b88f66/testutils/ptest2/src/main/resources/source-prep.vm
--
diff --git a/testutils/ptest2/src/main/resources/source-prep.vm 
b/testutils/ptest2/src/main/resources/source-prep.vm
index 67e6a95..0fc22be 100644
--- a/testutils/ptest2/src/main/resources/source-prep.vm
+++ b/testutils/ptest2/src/main/resources/source-prep.vm
@@ -102,11 +102,11 @@ cd $workingDir/
 fi
   done
 #end
-mvn -B clean install -DskipTests -Dmaven.repo.local=$workingDir/maven 
$mavenArgs $mavenBuildArgs
+mvn -B clean install -DskipTests -T 4 -q 
-Dmaven.repo.local=$workingDir/maven $mavenArgs $mavenBuildArgs
 if [[ -d "itests" ]]
 then
   cd itests
-  mvn -B clean install -DskipTests -Dmaven.repo.local=$workingDir/maven 
$mavenArgs $mavenBuildArgs
+  mvn -B clean install -DskipTests -T 4 -q 
-Dmaven.repo.local=$workingDir/maven $mavenArgs $mavenBuildArgs
 fi
   elif [[ "${buildTool}" == "ant" ]]
   then



[07/38] hive git commit: Revert "HIVE-14835: Improve ptest2 build time (Prasanth Jayachandran reviewed by Sergio Pena)"

2016-10-05 Thread khorgath
Revert "HIVE-14835: Improve ptest2 build time (Prasanth Jayachandran reviewed 
by Sergio Pena)"

This reverts commit d3b88f664415ff114de74aa2a0da2f1e1acbf60d.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/871b55fa
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/871b55fa
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/871b55fa

Branch: refs/heads/repl2
Commit: 871b55fa17e3d63a91a34f48df0bc2156f3473ce
Parents: ac977cc
Author: Prasanth Jayachandran 
Authored: Tue Sep 27 21:42:09 2016 -0700
Committer: Prasanth Jayachandran 
Committed: Tue Sep 27 21:42:09 2016 -0700

--
 dev-support/jenkins-execute-build.sh   | 4 +++-
 testutils/ptest2/src/main/resources/source-prep.vm | 4 ++--
 2 files changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/871b55fa/dev-support/jenkins-execute-build.sh
--
diff --git a/dev-support/jenkins-execute-build.sh 
b/dev-support/jenkins-execute-build.sh
index 972abae..2142942 100644
--- a/dev-support/jenkins-execute-build.sh
+++ b/dev-support/jenkins-execute-build.sh
@@ -70,7 +70,9 @@ test -n "$TEST_HANDLE" || fail "TEST_HANDLE must be specified 
and cannot be empt
 test -n "$PTEST_API_ENDPOINT" || fail "PTEST_API_ENDPOINT must be specified 
and cannot be empty."
 test -n "$PTEST_LOG_ENDPOINT" || fail "PTEST_LOG_ENDPOINT must be specified 
and cannot be empty."
 
-MVN_REPO_LOCAL=${HOME}/.m2/repository
+# WORKSPACE is an environment variable created by Jenkins, and it is the 
directory where the build is executed.
+# If not set, then default to $HOME
+MVN_REPO_LOCAL=${WORKSPACE:-$HOME}/.m2/repository
 
 # Directory where to build the ptest framework
 PTEST_BUILD_DIR="$PWD/hive/build"

http://git-wip-us.apache.org/repos/asf/hive/blob/871b55fa/testutils/ptest2/src/main/resources/source-prep.vm
--
diff --git a/testutils/ptest2/src/main/resources/source-prep.vm 
b/testutils/ptest2/src/main/resources/source-prep.vm
index 0fc22be..67e6a95 100644
--- a/testutils/ptest2/src/main/resources/source-prep.vm
+++ b/testutils/ptest2/src/main/resources/source-prep.vm
@@ -102,11 +102,11 @@ cd $workingDir/
 fi
   done
 #end
-mvn -B clean install -DskipTests -T 4 -q 
-Dmaven.repo.local=$workingDir/maven $mavenArgs $mavenBuildArgs
+mvn -B clean install -DskipTests -Dmaven.repo.local=$workingDir/maven 
$mavenArgs $mavenBuildArgs
 if [[ -d "itests" ]]
 then
   cd itests
-  mvn -B clean install -DskipTests -T 4 -q 
-Dmaven.repo.local=$workingDir/maven $mavenArgs $mavenBuildArgs
+  mvn -B clean install -DskipTests -Dmaven.repo.local=$workingDir/maven 
$mavenArgs $mavenBuildArgs
 fi
   elif [[ "${buildTool}" == "ant" ]]
   then



[08/38] hive git commit: HIVE-14843: HIVE-14751 introduced ambiguity in grammar (Jesus Camacho Rodriguez, reviewed by Pengcheng Xiong)

2016-10-05 Thread khorgath
HIVE-14843: HIVE-14751 introduced ambiguity in grammar (Jesus Camacho 
Rodriguez, reviewed by Pengcheng Xiong)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/667e9dd5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/667e9dd5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/667e9dd5

Branch: refs/heads/repl2
Commit: 667e9dd50ef4cfc9f743f8716da34339ec012f91
Parents: 871b55f
Author: Jesus Camacho Rodriguez 
Authored: Tue Sep 27 17:00:45 2016 +0100
Committer: Jesus Camacho Rodriguez 
Committed: Wed Sep 28 08:13:50 2016 +0100

--
 ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/667e9dd5/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index e6b70a0..04f87b8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -347,8 +347,8 @@ intervalLiteral
 
 intervalQualifiers
 :
-KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH_LITERAL
-| KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME_LITERAL
+(KW_YEAR KW_TO) => KW_YEAR KW_TO KW_MONTH -> 
TOK_INTERVAL_YEAR_MONTH_LITERAL
+| (KW_DAY KW_TO) => KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME_LITERAL
 | KW_YEAR -> TOK_INTERVAL_YEAR_LITERAL
 | KW_MONTH -> TOK_INTERVAL_MONTH_LITERAL
 | KW_DAY -> TOK_INTERVAL_DAY_LITERAL



[10/38] hive git commit: HIVE-14824. Separate fstype from cluster type in QTestUtil. (Siddharth Seth, reviewed by Prasanth Jayachandran)

2016-10-05 Thread khorgath
HIVE-14824. Separate fstype from cluster type in QTestUtil. (Siddharth Seth, 
reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cf72a737
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cf72a737
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cf72a737

Branch: refs/heads/repl2
Commit: cf72a73708b00b2e585d101258d95eb9cbd3791f
Parents: e2bd513
Author: Siddharth Seth 
Authored: Wed Sep 28 13:40:04 2016 -0700
Committer: Siddharth Seth 
Committed: Wed Sep 28 13:40:04 2016 -0700

--
 .../hive/cli/control/AbstractCliConfig.java | 10 +++
 .../hadoop/hive/cli/control/CliConfigs.java | 12 ++-
 .../hadoop/hive/cli/control/CoreCliDriver.java  |  3 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java| 84 +++-
 4 files changed, 69 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/cf72a737/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
index efbd465..03d4075 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
@@ -35,6 +35,7 @@ import java.util.regex.Pattern;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.QTestUtil;
+import org.apache.hadoop.hive.ql.QTestUtil.FsType;
 import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
 import com.google.common.base.Splitter;
 import com.google.common.collect.Sets;
@@ -63,6 +64,7 @@ public abstract class AbstractCliConfig {
   private String initScript;
   private String hiveConfDir;
   private MiniClusterType clusterType;
+  private FsType fsType;
 
   // FIXME: null value is treated differently on the other end..when those 
filter will be
   // moved...this may change
@@ -380,6 +382,14 @@ public abstract class AbstractCliConfig {
 }
   }
 
+  protected FsType getFsType() {
+return this.fsType;
+  }
+
+  protected void setFsType(FsType fsType) {
+this.fsType = fsType;
+  }
+
   private String getSysPropValue(String propName) {
 String propValue = System.getProperty(propName);
 if (propValue == null || propValue.trim().length() == 0) {

http://git-wip-us.apache.org/repos/asf/hive/blob/cf72a737/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
index 0068b95..ca72282 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
@@ -21,6 +21,7 @@ import java.io.File;
 import java.net.MalformedURLException;
 import java.net.URL;
 
+import org.apache.hadoop.hive.ql.QTestUtil;
 import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
 import org.apache.hadoop.hive.ql.parse.CoreParseNegative;
 
@@ -171,8 +172,15 @@ public class CliConfigs {
 setInitScript("q_test_init_for_encryption.sql");
 setCleanupScript("q_test_cleanup_for_encryption.sql");
 
-setHiveConfDir("data/conf");
-setClusterType(MiniClusterType.encrypted);
+
+setClusterType(MiniClusterType.mr);
+setFsType(QTestUtil.FsType.encrypted_hdfs);
+if (getClusterType() == MiniClusterType.tez) {
+  setHiveConfDir("data/conf/tez");
+} else {
+  setHiveConfDir("data/conf");
+}
+
   } catch (Exception e) {
 throw new RuntimeException("can't construct cliconfig", e);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/cf72a737/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
index db58f1d..d83ff45 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
@@ -61,7 +61,8 @@ public class CoreCliDriver extends CliAdapter {
 @Override
 public QTestUtil invokeInternal() throws Exception {
   

[14/38] hive git commit: HIVE-14852. Change qtest logging to not redirect all logs to console. (Siddharth Seth, reviewed by Prasanth Jayachandran)

2016-10-05 Thread khorgath
HIVE-14852. Change qtest logging to not redirect all logs to console. 
(Siddharth Seth, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a6c60807
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a6c60807
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a6c60807

Branch: refs/heads/repl2
Commit: a6c60807eb1daccb940d9caaeb2f7cafa7643afe
Parents: 20304c0
Author: Siddharth Seth 
Authored: Thu Sep 29 10:42:21 2016 -0700
Committer: Siddharth Seth 
Committed: Thu Sep 29 10:43:01 2016 -0700

--
 itests/qtest-spark/pom.xml | 2 ++
 itests/qtest/pom.xml   | 2 ++
 pom.xml| 3 +++
 3 files changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a6c60807/itests/qtest-spark/pom.xml
--
diff --git a/itests/qtest-spark/pom.xml b/itests/qtest-spark/pom.xml
index 1e6c3a2..240852e 100644
--- a/itests/qtest-spark/pom.xml
+++ b/itests/qtest-spark/pom.xml
@@ -30,6 +30,8 @@
 
   
 ../..
+
+OFF
 
 
 8.1.14.v20131031

http://git-wip-us.apache.org/repos/asf/hive/blob/a6c60807/itests/qtest/pom.xml
--
diff --git a/itests/qtest/pom.xml b/itests/qtest/pom.xml
index e762d0e..72028f3 100644
--- a/itests/qtest/pom.xml
+++ b/itests/qtest/pom.xml
@@ -30,6 +30,8 @@
 
   
 ../..
+
+OFF
 
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/a6c60807/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 756cc34..5d13344 100644
--- a/pom.xml
+++ b/pom.xml
@@ -78,6 +78,8 @@
 file://
 ${project.build.directory}/tmp
 file://${test.tmp.dir}
+
+INFO
 
${project.build.directory}/warehouse
 pfile://
 
@@ -1028,6 +1030,7 @@
 
${maven.repo.local}
 local
 
${test.log4j.scheme}${test.tmp.dir}/conf/hive-log4j2.properties
+
${test.console.log.level}
 true
 
 ${test.tmp.dir}



[26/38] hive git commit: HIVE-14558: Add support for listing views similar to "show tables" (Naveen Gangam, reviewed by Aihua Xu)

2016-10-05 Thread khorgath
http://git-wip-us.apache.org/repos/asf/hive/blob/21a0142f/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
--
diff --git 
a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py 
b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
index 57a748a..10778f2 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
@@ -213,6 +213,15 @@ class Iface(fb303.FacebookService.Iface):
 """
 pass
 
+  def get_tables_by_type(self, db_name, pattern, tableType):
+"""
+Parameters:
+ - db_name
+ - pattern
+ - tableType
+"""
+pass
+
   def get_table_meta(self, db_patterns, tbl_patterns, tbl_types):
 """
 Parameters:
@@ -2106,6 +2115,43 @@ class Client(fb303.FacebookService.Client, Iface):
   raise result.o1
 raise TApplicationException(TApplicationException.MISSING_RESULT, 
"get_tables failed: unknown result")
 
+  def get_tables_by_type(self, db_name, pattern, tableType):
+"""
+Parameters:
+ - db_name
+ - pattern
+ - tableType
+"""
+self.send_get_tables_by_type(db_name, pattern, tableType)
+return self.recv_get_tables_by_type()
+
+  def send_get_tables_by_type(self, db_name, pattern, tableType):
+self._oprot.writeMessageBegin('get_tables_by_type', TMessageType.CALL, 
self._seqid)
+args = get_tables_by_type_args()
+args.db_name = db_name
+args.pattern = pattern
+args.tableType = tableType
+args.write(self._oprot)
+self._oprot.writeMessageEnd()
+self._oprot.trans.flush()
+
+  def recv_get_tables_by_type(self):
+iprot = self._iprot
+(fname, mtype, rseqid) = iprot.readMessageBegin()
+if mtype == TMessageType.EXCEPTION:
+  x = TApplicationException()
+  x.read(iprot)
+  iprot.readMessageEnd()
+  raise x
+result = get_tables_by_type_result()
+result.read(iprot)
+iprot.readMessageEnd()
+if result.success is not None:
+  return result.success
+if result.o1 is not None:
+  raise result.o1
+raise TApplicationException(TApplicationException.MISSING_RESULT, 
"get_tables_by_type failed: unknown result")
+
   def get_table_meta(self, db_patterns, tbl_patterns, tbl_types):
 """
 Parameters:
@@ -6625,6 +6671,7 @@ class Processor(fb303.FacebookService.Processor, Iface, 
TProcessor):
 self._processMap["drop_table"] = Processor.process_drop_table
 self._processMap["drop_table_with_environment_context"] = 
Processor.process_drop_table_with_environment_context
 self._processMap["get_tables"] = Processor.process_get_tables
+self._processMap["get_tables_by_type"] = 
Processor.process_get_tables_by_type
 self._processMap["get_table_meta"] = Processor.process_get_table_meta
 self._processMap["get_all_tables"] = Processor.process_get_all_tables
 self._processMap["get_table"] = Processor.process_get_table
@@ -7411,6 +7458,28 @@ class Processor(fb303.FacebookService.Processor, Iface, 
TProcessor):
 oprot.writeMessageEnd()
 oprot.trans.flush()
 
+  def process_get_tables_by_type(self, seqid, iprot, oprot):
+args = get_tables_by_type_args()
+args.read(iprot)
+iprot.readMessageEnd()
+result = get_tables_by_type_result()
+try:
+  result.success = self._handler.get_tables_by_type(args.db_name, 
args.pattern, args.tableType)
+  msg_type = TMessageType.REPLY
+except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+  raise
+except MetaException as o1:
+  msg_type = TMessageType.REPLY
+  result.o1 = o1
+except Exception as ex:
+  msg_type = TMessageType.EXCEPTION
+  logging.exception(ex)
+  result = TApplicationException(TApplicationException.INTERNAL_ERROR, 
'Internal error')
+oprot.writeMessageBegin("get_tables_by_type", msg_type, seqid)
+result.write(oprot)
+oprot.writeMessageEnd()
+oprot.trans.flush()
+
   def process_get_table_meta(self, seqid, iprot, oprot):
 args = get_table_meta_args()
 args.read(iprot)
@@ -14626,6 +14695,183 @@ class get_tables_result:
   def __ne__(self, other):
 return not (self == other)
 
+class get_tables_by_type_args:
+  """
+  Attributes:
+   - db_name
+   - pattern
+   - tableType
+  """
+
+  thrift_spec = (
+None, # 0
+(1, TType.STRING, 'db_name', None, None, ), # 1
+(2, TType.STRING, 'pattern', None, None, ), # 2
+(3, TType.STRING, 'tableType', None, None, ), # 3
+  )
+
+  def __init__(self, db_name=None, pattern=None, tableType=None,):
+self.db_name = db_name
+self.pattern = pattern
+self.tableType = tableType
+
+  def read(self, iprot):
+if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and 
isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is 
not None and fastbinary is not None:
+  

[31/38] hive git commit: HIVE-14558: Add support for listing views similar to "show tables" (Naveen Gangam, reviewed by Aihua Xu)

2016-10-05 Thread khorgath
HIVE-14558: Add support for listing views similar to "show tables" (Naveen 
Gangam, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/21a0142f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/21a0142f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/21a0142f

Branch: refs/heads/repl2
Commit: 21a0142f333fba231f2648db53a48dc41384ad72
Parents: e19f0e3
Author: Aihua Xu 
Authored: Tue Oct 4 09:53:12 2016 -0400
Committer: Aihua Xu 
Committed: Tue Oct 4 09:53:12 2016 -0400

--
 metastore/if/hive_metastore.thrift  |1 +
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  | 2163 +++-
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h|  148 +
 .../ThriftHiveMetastore_server.skeleton.cpp |5 +
 .../hive/metastore/api/ThriftHiveMetastore.java | 3184 --
 .../gen-php/metastore/ThriftHiveMetastore.php   | 1341 +---
 .../hive_metastore/ThriftHiveMetastore-remote   |7 +
 .../hive_metastore/ThriftHiveMetastore.py   |  938 --
 .../gen/thrift/gen-rb/thrift_hive_metastore.rb  |   65 +
 .../hadoop/hive/metastore/HiveMetaStore.java|   23 +
 .../hive/metastore/HiveMetaStoreClient.java |   12 +
 .../hadoop/hive/metastore/IMetaStoreClient.java |   15 +
 .../hadoop/hive/metastore/ObjectStore.java  |8 +
 .../apache/hadoop/hive/metastore/RawStore.java  |4 +
 .../hadoop/hive/metastore/hbase/HBaseStore.java |6 +
 .../DummyRawStoreControlledCommit.java  |6 +
 .../DummyRawStoreForJdoConnection.java  |6 +
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   45 +-
 .../apache/hadoop/hive/ql/metadata/Hive.java|   46 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java  |   43 +
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |1 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g|2 +
 .../hive/ql/parse/SemanticAnalyzerFactory.java  |2 +
 .../org/apache/hadoop/hive/ql/plan/DDLWork.java |2 +-
 .../hadoop/hive/ql/plan/HiveOperation.java  |1 +
 .../hadoop/hive/ql/plan/ShowTablesDesc.java |   29 +
 .../authorization/plugin/HiveOperationType.java |1 +
 .../plugin/sqlstd/Operation2Privilege.java  |2 +
 ql/src/test/queries/clientpositive/show_views.q |   56 +
 .../results/clientpositive/show_views.q.out |  320 ++
 30 files changed, 5774 insertions(+), 2708 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/21a0142f/metastore/if/hive_metastore.thrift
--
diff --git a/metastore/if/hive_metastore.thrift 
b/metastore/if/hive_metastore.thrift
index 872c0f3..c5ba309 100755
--- a/metastore/if/hive_metastore.thrift
+++ b/metastore/if/hive_metastore.thrift
@@ -1030,6 +1030,7 @@ service ThriftHiveMetastore extends fb303.FacebookService
   4:EnvironmentContext environment_context)
throws(1:NoSuchObjectException o1, 2:MetaException o3)
   list get_tables(1: string db_name, 2: string pattern) throws (1: 
MetaException o1)
+  list get_tables_by_type(1: string db_name, 2: string pattern, 3: 
string tableType) throws (1: MetaException o1)
   list get_table_meta(1: string db_patterns, 2: string 
tbl_patterns, 3: list tbl_types)
throws (1: MetaException o1)
   list get_all_tables(1: string db_name) throws (1: MetaException o1)



[36/38] hive git commit: HIVE-14873: Add UDF for extraction of 'day of week' (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2016-10-05 Thread khorgath
http://git-wip-us.apache.org/repos/asf/hive/blob/59539885/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
--
diff --git a/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out 
b/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
index 299d3bc..68a2a4d 100644
--- a/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
@@ -209,6 +209,7 @@ PREHOOK: query: EXPLAIN SELECT
   month(fl_time),
   day(fl_time),
   dayofmonth(fl_time),
+  dayofweek(fl_time),
   weekofyear(fl_time),
   date(fl_time),
   to_date(fl_time),
@@ -223,6 +224,7 @@ POSTHOOK: query: EXPLAIN SELECT
   month(fl_time),
   day(fl_time),
   dayofmonth(fl_time),
+  dayofweek(fl_time),
   weekofyear(fl_time),
   date(fl_time),
   to_date(fl_time),
@@ -243,8 +245,8 @@ STAGE PLANS:
 alias: date_udf_flight_orc
 Statistics: Num rows: 137 Data size: 13152 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
-  expressions: to_unix_timestamp(fl_time) (type: bigint), 
year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: 
int), dayofmonth(fl_time) (type: int), weekofyear(fl_time) (type: int), CAST( 
fl_time AS DATE) (type: date), to_date(fl_time) (type: date), date_add(fl_time, 
2) (type: date), date_sub(fl_time, 2) (type: date), datediff(fl_time, 
'2000-01-01') (type: int)
-  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10
+  expressions: to_unix_timestamp(fl_time) (type: bigint), 
year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: 
int), dayofmonth(fl_time) (type: int), dayofweek(fl_time) (type: int), 
weekofyear(fl_time) (type: int), CAST( fl_time AS DATE) (type: date), 
to_date(fl_time) (type: date), date_add(fl_time, 2) (type: date), 
date_sub(fl_time, 2) (type: date), datediff(fl_time, '2000-01-01') (type: int)
+  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11
   Statistics: Num rows: 137 Data size: 13152 Basic stats: COMPLETE 
Column stats: NONE
   File Output Operator
 compressed: false
@@ -267,6 +269,7 @@ PREHOOK: query: SELECT
   month(fl_time),
   day(fl_time),
   dayofmonth(fl_time),
+  dayofweek(fl_time),
   weekofyear(fl_time),
   date(fl_time),
   to_date(fl_time),
@@ -283,6 +286,7 @@ POSTHOOK: query: SELECT
   month(fl_time),
   day(fl_time),
   dayofmonth(fl_time),
+  dayofweek(fl_time),
   weekofyear(fl_time),
   date(fl_time),
   to_date(fl_time),
@@ -293,149 +297,150 @@ FROM date_udf_flight_orc
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_udf_flight_orc
  A masked pattern was here 
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  2010-10-19  3946
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  2010-10-19  3946
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  2010-10-19  3946
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  2010-10-19  3946
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  2010-10-19  3946
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  

[38/38] hive git commit: HIVE-14773: NPE aggregating column statistics for date column in partitioned table (Pengcheng Xiong, reviewed by Gopal V)

2016-10-05 Thread khorgath
HIVE-14773: NPE aggregating column statistics for date column in partitioned 
table (Pengcheng Xiong, reviewed by Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c53c9be7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c53c9be7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c53c9be7

Branch: refs/heads/repl2
Commit: c53c9be7181fc47bb5422473edbba1ad9ae81042
Parents: 5953988
Author: Pengcheng Xiong 
Authored: Wed Oct 5 10:40:53 2016 -0700
Committer: Pengcheng Xiong 
Committed: Wed Oct 5 10:41:00 2016 -0700

--
 .../hive/metastore/IExtrapolatePartStatus.java  |   1 +
 .../hive/metastore/StatObjectConverter.java |  29 ++
 .../extrapolate_part_stats_date.q   |  14 +
 .../extrapolate_part_stats_date.q.out   | 302 +++
 4 files changed, 346 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c53c9be7/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
index 4859cff..d0569fb 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
@@ -39,6 +39,7 @@ public interface IExtrapolatePartStatus {
   put("int", new Integer[] { 0, 1, 6, 7, 12, 15 });
   put("smallint", new Integer[] { 0, 1, 6, 7, 12, 15 });
   put("tinyint", new Integer[] { 0, 1, 6, 7, 12, 15 });
+  put("date", new Integer[] { 0, 1, 6, 7, 12, 15 });
   put("timestamp", new Integer[] { 0, 1, 6, 7, 12, 15 });
   put("long", new Integer[] { 0, 1, 6, 7, 12, 15 });
   put("double", new Integer[] { 2, 3, 6, 7, 13, 15 });

http://git-wip-us.apache.org/repos/asf/hive/blob/c53c9be7/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
index e119dd8..b259dfa 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
@@ -579,6 +579,35 @@ public class StatObjectConverter {
 longStats.setNumDVs(lowerBound);
   }
   data.setLongStats(longStats);
+} else if (colType.equals("date")) {
+  DateColumnStatsData dateStats = new DateColumnStatsData();
+  dateStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+  if (lhigh != null) {
+dateStats.setHighValue(new 
Date(MetaStoreDirectSql.extractSqlLong(lhigh)));
+  }
+  if (llow != null) {
+dateStats.setLowValue(new 
Date(MetaStoreDirectSql.extractSqlLong(llow)));
+  }
+  long lowerBound = MetaStoreDirectSql.extractSqlLong(dist);
+  long higherBound = MetaStoreDirectSql.extractSqlLong(sumDist);
+  if (useDensityFunctionForNDVEstimation && lhigh != null && llow != null 
&& avgLong != null
+  && MetaStoreDirectSql.extractSqlDouble(avgLong) != 0.0) {
+// We have estimation, lowerbound and higherbound. We use estimation if
+// it is between lowerbound and higherbound.
+long estimation = MetaStoreDirectSql
+.extractSqlLong((MetaStoreDirectSql.extractSqlLong(lhigh) - 
MetaStoreDirectSql
+.extractSqlLong(llow)) / 
MetaStoreDirectSql.extractSqlDouble(avgLong));
+if (estimation < lowerBound) {
+  dateStats.setNumDVs(lowerBound);
+} else if (estimation > higherBound) {
+  dateStats.setNumDVs(higherBound);
+} else {
+  dateStats.setNumDVs(estimation);
+}
+  } else {
+dateStats.setNumDVs(lowerBound);
+  }
+  data.setDateStats(dateStats);
 } else if (colType.equals("double") || colType.equals("float")) {
   DoubleColumnStatsData doubleStats = new DoubleColumnStatsData();
   doubleStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));

http://git-wip-us.apache.org/repos/asf/hive/blob/c53c9be7/ql/src/test/queries/clientpositive/extrapolate_part_stats_date.q
--
diff --git a/ql/src/test/queries/clientpositive/extrapolate_part_stats_date.q 
b/ql/src/test/queries/clientpositive/extrapolate_part_stats_date.q
new file mode 100644
index 000..1f38a65
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/extrapolate_part_stats_date.q
@@ 

[20/38] hive git commit: HIVE-14865 Fix comments after HIVE-14350 (Eugene Koifman, reviewed by Alan Gates)

2016-10-05 Thread khorgath
HIVE-14865 Fix comments after HIVE-14350 (Eugene Koifman, reviewed by Alan 
Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/297b4433
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/297b4433
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/297b4433

Branch: refs/heads/repl2
Commit: 297b4433cd2fdfb84182668bf7b1c524e92c6593
Parents: f903c4a
Author: Eugene Koifman 
Authored: Fri Sep 30 15:10:23 2016 -0700
Committer: Eugene Koifman 
Committed: Fri Sep 30 15:10:23 2016 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java | 6 --
 1 file changed, 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/297b4433/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
index cda5f39..f1eba5d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
@@ -866,7 +866,6 @@ public class AcidUtils {
* {@link txnList}.  Note that 'original' files are logically a 
base_Long.MIN_VALUE and thus
* cannot have any data for an open txn.  We could check {@link deltas} 
has files to cover
* [1,n] w/o gaps but this would almost never happen...*/
-  //todo: this should only care about 'open' tnxs (HIVE-14211)
   long[] exceptions = txnList.getInvalidTransactions();
   String minOpenTxn = exceptions != null && exceptions.length > 0 ?
 Long.toString(exceptions[0]) : "x";
@@ -910,11 +909,6 @@ public class AcidUtils {
* files within the snapshot.
*/
   private static boolean isValidBase(long baseTxnId, ValidTxnList txnList) {
-/*This implementation is suboptimal.  It considers open/aborted txns 
invalid while we are only
-* concerned with 'open' ones.  (Compaction removes any data that belongs 
to aborted txns and
-* reads skip anything that belongs to aborted txn, thus base_7 is still OK 
if the only exception
-* is txn 5 which is aborted).  So this implementation can generate false 
positives. (HIVE-14211)
-* */
 if(baseTxnId == Long.MIN_VALUE) {
   //such base is created by 1st compaction in case of non-acid to acid 
table conversion
   //By definition there are no open txns with id < 1.



[33/38] hive git commit: HIVE-14882: Lean qtest initialization for CliDrivers (Prasanth Jayachandran reviewed by Siddharth Seth)

2016-10-05 Thread khorgath
HIVE-14882: Lean qtest initialization for CliDrivers (Prasanth Jayachandran 
reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/481d7cd5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/481d7cd5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/481d7cd5

Branch: refs/heads/repl2
Commit: 481d7cd5c469217b7c8c2ad50e06242beefec17f
Parents: efe9c84
Author: Prasanth Jayachandran 
Authored: Tue Oct 4 17:47:03 2016 -0700
Committer: Prasanth Jayachandran 
Committed: Tue Oct 4 17:47:03 2016 -0700

--
 data/scripts/q_test_cleanup_compare.sql |  1 +
 data/scripts/q_test_cleanup_contrib.sql |  2 +
 data/scripts/q_test_cleanup_for_encryption.sql  |  1 -
 data/scripts/q_test_cleanup_src.sql |  1 +
 data/scripts/q_test_cleanup_tez.sql |  4 +
 data/scripts/q_test_init_compare.sql| 26 +++
 data/scripts/q_test_init_contrib.sql| 29 
 data/scripts/q_test_init_for_encryption.sql |  5 --
 data/scripts/q_test_init_src.sql|  5 ++
 data/scripts/q_test_init_src_with_stats.sql |  9 +++
 data/scripts/q_test_init_tez.sql| 78 
 .../apache/hadoop/hive/ql/TestMTQueries.java|  3 +-
 .../hadoop/hive/cli/control/CliConfigs.java | 36 -
 .../org/apache/hadoop/hive/ql/QTestUtil.java|  6 +-
 .../clientpositive/tez/explainuser_3.q.out  |  6 +-
 15 files changed, 182 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/481d7cd5/data/scripts/q_test_cleanup_compare.sql
--
diff --git a/data/scripts/q_test_cleanup_compare.sql 
b/data/scripts/q_test_cleanup_compare.sql
new file mode 100644
index 000..0c6ab14
--- /dev/null
+++ b/data/scripts/q_test_cleanup_compare.sql
@@ -0,0 +1 @@
+DROP TABLE IF EXISTS alltypesorc;

http://git-wip-us.apache.org/repos/asf/hive/blob/481d7cd5/data/scripts/q_test_cleanup_contrib.sql
--
diff --git a/data/scripts/q_test_cleanup_contrib.sql 
b/data/scripts/q_test_cleanup_contrib.sql
new file mode 100644
index 000..95d4ba0
--- /dev/null
+++ b/data/scripts/q_test_cleanup_contrib.sql
@@ -0,0 +1,2 @@
+DROP TABLE IF EXISTS src;
+DROP TABLE IF EXISTS src_thrift;

http://git-wip-us.apache.org/repos/asf/hive/blob/481d7cd5/data/scripts/q_test_cleanup_for_encryption.sql
--
diff --git a/data/scripts/q_test_cleanup_for_encryption.sql 
b/data/scripts/q_test_cleanup_for_encryption.sql
deleted file mode 100644
index 070cdbe..000
--- a/data/scripts/q_test_cleanup_for_encryption.sql
+++ /dev/null
@@ -1 +0,0 @@
-DROP TABLE IF EXISTS src PURGE;

http://git-wip-us.apache.org/repos/asf/hive/blob/481d7cd5/data/scripts/q_test_cleanup_src.sql
--
diff --git a/data/scripts/q_test_cleanup_src.sql 
b/data/scripts/q_test_cleanup_src.sql
new file mode 100644
index 000..08264ee
--- /dev/null
+++ b/data/scripts/q_test_cleanup_src.sql
@@ -0,0 +1 @@
+DROP TABLE IF EXISTS src;

http://git-wip-us.apache.org/repos/asf/hive/blob/481d7cd5/data/scripts/q_test_cleanup_tez.sql
--
diff --git a/data/scripts/q_test_cleanup_tez.sql 
b/data/scripts/q_test_cleanup_tez.sql
new file mode 100644
index 000..0e50ef3
--- /dev/null
+++ b/data/scripts/q_test_cleanup_tez.sql
@@ -0,0 +1,4 @@
+DROP TABLE IF EXISTS src;
+DROP TABLE IF EXISTS src1;
+DROP TABLE IF EXISTS srcpart;
+DROP TABLE IF EXISTS alltypesorc;

http://git-wip-us.apache.org/repos/asf/hive/blob/481d7cd5/data/scripts/q_test_init_compare.sql
--
diff --git a/data/scripts/q_test_init_compare.sql 
b/data/scripts/q_test_init_compare.sql
new file mode 100644
index 000..c554250
--- /dev/null
+++ b/data/scripts/q_test_init_compare.sql
@@ -0,0 +1,26 @@
+set hive.stats.dbclass=fs;
+--
+-- Table alltypesorc
+--
+DROP TABLE IF EXISTS alltypesorc;
+CREATE TABLE alltypesorc(
+ctinyint TINYINT,
+csmallint SMALLINT,
+cint INT,
+cbigint BIGINT,
+cfloat FLOAT,
+cdouble DOUBLE,
+cstring1 STRING,
+cstring2 STRING,
+ctimestamp1 TIMESTAMP,
+ctimestamp2 TIMESTAMP,
+cboolean1 BOOLEAN,
+cboolean2 BOOLEAN)
+STORED AS ORC;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/alltypesorc"
+OVERWRITE INTO  TABLE alltypesorc;
+
+ANALYZE TABLE alltypesorc COMPUTE STATISTICS;
+
+ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS 

[15/38] hive git commit: HIVE-14854. Add a core cluster type to QTestUtil. (Siddharth Seth, reviewed by Prasanth Jayachandran)

2016-10-05 Thread khorgath
HIVE-14854. Add a core cluster type to QTestUtil. (Siddharth Seth, reviewed by 
Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/474425aa
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/474425aa
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/474425aa

Branch: refs/heads/repl2
Commit: 474425aa62e3f25b119419439373aa684c6c2121
Parents: a6c6080
Author: Siddharth Seth 
Authored: Thu Sep 29 13:10:44 2016 -0700
Committer: Siddharth Seth 
Committed: Thu Sep 29 13:10:44 2016 -0700

--
 .../hive/cli/control/AbstractCliConfig.java | 13 +++-
 .../hadoop/hive/cli/control/CoreCliDriver.java  |  2 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java| 75 +++-
 .../hive/llap/daemon/impl/LlapDaemon.java   |  5 +-
 4 files changed, 56 insertions(+), 39 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/474425aa/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
index 03d4075..c12f51e 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
@@ -409,7 +409,18 @@ public abstract class AbstractCliConfig {
   }
 
   protected void setMetastoreType(MetastoreType mt) {
-metastoreType=mt;
+String metaStoreTypeProperty = getSysPropValue("metaStoreType");
+if (metaStoreTypeProperty != null) {
+  if (metaStoreTypeProperty.equalsIgnoreCase("sql")) {
+metastoreType = MetastoreType.sql;
+  } else if (metaStoreTypeProperty.equalsIgnoreCase("hbase")) {
+metastoreType = MetastoreType.hbase;
+  } else {
+throw new IllegalArgumentException("Unknown metastore type: " + 
metaStoreTypeProperty);
+  }
+} else {
+  metastoreType = mt;
+}
   }
 
   public MetastoreType getMetastoreType() {

http://git-wip-us.apache.org/repos/asf/hive/blob/474425aa/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
index d83ff45..a735346 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
@@ -61,7 +61,7 @@ public class CoreCliDriver extends CliAdapter {
 @Override
 public QTestUtil invokeInternal() throws Exception {
   return new QTestUtil((cliConfig.getResultsDir()), 
(cliConfig.getLogDir()), miniMR,
-  hiveConfDir, hadoopVer, initScript, cleanupScript, 
useHBaseMetastore, true, false,
+  hiveConfDir, hadoopVer, initScript, cleanupScript, 
useHBaseMetastore, true,
   cliConfig.getFsType());
 }
   }.invoke("QtestUtil instance created", LOG, true);

http://git-wip-us.apache.org/repos/asf/hive/blob/474425aa/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 909d7f6..e49ecd9 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -50,6 +50,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Comparator;
 import java.util.Deque;
+import java.util.EnumSet;
 import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
@@ -152,7 +153,6 @@ public class QTestUtil {
 
   private String testWarehouse;
   private final String testFiles;
-  private final boolean localMode;
   protected final String outDir;
   protected final String logDir;
   private final TreeMap qMap;
@@ -411,6 +411,11 @@ public class QTestUtil {
 }
   }
 
+  private enum CoreClusterType {
+MR,
+TEZ,
+SPARK
+  }
 
   public enum FsType {
 local,
@@ -420,35 +425,48 @@ public class QTestUtil {
 
   public enum MiniClusterType {
 
-mr(FsType.hdfs),
-tez(FsType.hdfs),
-spark(FsType.local),
-miniSparkOnYarn(FsType.hdfs),
-llap(FsType.hdfs),
-none(FsType.local);
+mr(CoreClusterType.MR, FsType.hdfs),
+

[35/38] hive git commit: HIVE-14099: Hive security authorization can be disabled by users (Aihua Xu, reviewed by Yongzhi Chen)

2016-10-05 Thread khorgath
HIVE-14099: Hive security authorization can be disabled by users (Aihua Xu, 
reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/96bcee86
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/96bcee86
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/96bcee86

Branch: refs/heads/repl2
Commit: 96bcee86c8df1c1a511fe70c1a37840d996abb9e
Parents: 54ff3f5
Author: Aihua Xu 
Authored: Wed Oct 5 10:19:33 2016 -0400
Committer: Aihua Xu 
Committed: Wed Oct 5 10:19:33 2016 -0400

--
 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/96bcee86/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 4c3ef3e..5ea9751 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3116,8 +3116,9 @@ public class HiveConf extends Configuration {
 
 
 HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list",
-
"hive.security.authenticator.manager,hive.security.authorization.manager,hive.users.in.admin.role,"
 +
-"hive.server2.xsrf.filter.enabled",
+
"hive.security.authenticator.manager,hive.security.authorization.manager," +
+
"hive.security.metastore.authorization.manager,hive.security.metastore.authenticator.manager,"
 +
+
"hive.users.in.admin.role,hive.server2.xsrf.filter.enabled,hive.security.authorization.enabled",
 "Comma separated list of configuration options which are immutable at 
runtime"),
 HIVE_CONF_HIDDEN_LIST("hive.conf.hidden.list",
 METASTOREPWD.varname + "," + HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname



[37/38] hive git commit: HIVE-14873: Add UDF for extraction of 'day of week' (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2016-10-05 Thread khorgath
HIVE-14873: Add UDF for extraction of 'day of week' (Jesus Camacho Rodriguez, 
reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/59539885
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/59539885
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/59539885

Branch: refs/heads/repl2
Commit: 59539885725a96cca4b3f0759a5b26e0d8198dc8
Parents: 96bcee86
Author: Jesus Camacho Rodriguez 
Authored: Sat Oct 1 09:30:35 2016 +0100
Committer: Jesus Camacho Rodriguez 
Committed: Wed Oct 5 17:51:41 2016 +0100

--
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |  18 +-
 .../expressions/VectorUDFDayOfWeekDate.java |  39 +
 .../expressions/VectorUDFDayOfWeekString.java   |  61 ++
 .../VectorUDFDayOfWeekTimestamp.java|  39 +
 .../hive/ql/optimizer/physical/Vectorizer.java  |   2 +
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |   1 +
 .../hadoop/hive/ql/parse/IdentifiersParser.g|   3 +-
 .../apache/hadoop/hive/ql/udf/UDFDayOfWeek.java | 105 +++
 ql/src/test/queries/clientpositive/extract.q|   3 +
 .../clientpositive/vectorized_date_funcs.q  |   8 +-
 .../test/results/clientpositive/extract.q.out   |  11 +
 .../llap/vectorized_date_funcs.q.out| 846 ++-
 .../results/clientpositive/show_functions.q.out |   1 +
 .../clientpositive/vectorized_date_funcs.q.out  | 846 ++-
 14 files changed, 1139 insertions(+), 844 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/59539885/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 6b29be1..0dbbc1d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -37,14 +37,6 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorDay;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorHour;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorMinute;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorMonth;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorQuarter;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorSecond;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorWeek;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorYear;
 import org.apache.hadoop.hive.ql.udf.SettableUDF;
 import org.apache.hadoop.hive.ql.udf.UDAFPercentile;
 import org.apache.hadoop.hive.ql.udf.UDFAcos;
@@ -57,7 +49,16 @@ import org.apache.hadoop.hive.ql.udf.UDFChr;
 import org.apache.hadoop.hive.ql.udf.UDFConv;
 import org.apache.hadoop.hive.ql.udf.UDFCos;
 import org.apache.hadoop.hive.ql.udf.UDFCrc32;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorDay;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorHour;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorMinute;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorMonth;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorQuarter;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorSecond;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorWeek;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorYear;
 import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
+import org.apache.hadoop.hive.ql.udf.UDFDayOfWeek;
 import org.apache.hadoop.hive.ql.udf.UDFDegrees;
 import org.apache.hadoop.hive.ql.udf.UDFE;
 import org.apache.hadoop.hive.ql.udf.UDFExp;
@@ -283,6 +284,7 @@ public final class FunctionRegistry {
 
 system.registerUDF("day", UDFDayOfMonth.class, false);
 system.registerUDF("dayofmonth", UDFDayOfMonth.class, false);
+system.registerUDF("dayofweek", UDFDayOfWeek.class, false);
 system.registerUDF("month", UDFMonth.class, false);
 system.registerGenericUDF("quarter", GenericUDFQuarter.class);
 system.registerUDF("year", UDFYear.class, false);

http://git-wip-us.apache.org/repos/asf/hive/blob/59539885/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekDate.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekDate.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekDate.java
new file mode 100644
index 000..bd9c480
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekDate.java
@@ -0,0 +1,39 @@
+/**
+ * 

[23/38] hive git commit: HIVE-14830. Move a majority of the MiniLlapCliDriver tests to use an inline AM. (Siddharth Seth, reviewed by Prasanth Jayachandran)

2016-10-05 Thread khorgath
HIVE-14830. Move a majority of the MiniLlapCliDriver tests to use an inline AM. 
(Siddharth Seth, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0a4b3d8f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0a4b3d8f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0a4b3d8f

Branch: refs/heads/repl2
Commit: 0a4b3d8ff673f6f6670293a7491873c229cb0f40
Parents: b86342f
Author: Siddharth Seth 
Authored: Mon Oct 3 13:05:34 2016 -0700
Committer: Siddharth Seth 
Committed: Mon Oct 3 13:05:34 2016 -0700

--
 .../hive/cli/TestMiniLlapLocalCliDriver.java|  62 ++
 .../test/resources/testconfiguration.properties | 117 ++-
 .../hadoop/hive/cli/control/CliConfigs.java |  29 +
 .../hive/ql/optimizer/physical/LlapDecider.java |   1 +
 4 files changed, 152 insertions(+), 57 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0a4b3d8f/itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapLocalCliDriver.java
--
diff --git 
a/itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapLocalCliDriver.java
 
b/itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapLocalCliDriver.java
new file mode 100644
index 000..5b1caf6
--- /dev/null
+++ 
b/itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapLocalCliDriver.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.cli;
+
+import java.io.File;
+import java.util.List;
+
+import org.apache.hadoop.hive.cli.control.CliAdapter;
+import org.apache.hadoop.hive.cli.control.CliConfigs;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestRule;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+@RunWith(Parameterized.class)
+public class TestMiniLlapLocalCliDriver {
+
+  static CliAdapter adapter = new 
CliConfigs.MiniLlapLocalCliConfig().getCliAdapter();
+
+  @Parameters(name = "{0}")
+  public static List getParameters() throws Exception {
+return adapter.getParameters();
+  }
+
+  @ClassRule
+  public static TestRule cliClassRule = adapter.buildClassRule();
+
+  @Rule
+  public TestRule cliTestRule = adapter.buildTestRule();
+
+  private String name;
+  private File qfile;
+
+  public TestMiniLlapLocalCliDriver(String name, File qfile) {
+this.name = name;
+this.qfile = qfile;
+  }
+
+  @Test
+  public void testCliDriver() throws Exception {
+adapter.runTest(name, qfile);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/0a4b3d8f/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index e810a58..fbba0cd 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -54,8 +54,40 @@ minitez.query.files=explainuser_3.q,\
   stats_filemetadata.q,\
   tez_union_with_udf.q
 
+
 minillap.shared.query.files=acid_globallimit.q,\
-  alter_merge_2_orc.q,\
+  insert_into1.q,\
+  insert_into2.q,\
+  insert_values_orig_table.,\
+  llapdecider.q,\
+  mapreduce1.q,\
+  mapreduce2.q,\
+  orc_merge1.q,\
+  orc_merge10.q,\
+  orc_merge2.q,\
+  orc_merge3.q,\
+  orc_merge4.q,\
+  orc_merge_diff_fs.q,\
+  script_env_var1.q,\
+  script_env_var2.q,\
+  script_pipe.q,\
+  tez_union_view.q,\
+  transform1.q,\
+  transform2.q,\
+  transform_ppr1.q,\
+  transform_ppr2.q,\
+  unionDistinct_1.q,\
+  union_type_chk.q,\
+  orc_ppd_basic.q,\
+  orc_ppd_schema_evol_3a.q,\
+  cte_2.q,\
+  cte_4.q,\
+  llap_nullscan.q,\
+  dynamic_partition_pruning_2.q,\
+  tez_union_dynamic_partition.q,\
+  union_fast_stats.q
+

[03/38] hive git commit: HIVE-14358: Add metrics for number of queries executed for each execution engine (Barna Zsombor Klara, reviewed by Gabor Szadovszky, Yongzhi Chen)

2016-10-05 Thread khorgath
HIVE-14358: Add metrics for number of queries executed for each execution 
engine (Barna Zsombor Klara, reviewed by Gabor Szadovszky, Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0c55d46f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0c55d46f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0c55d46f

Branch: refs/heads/repl2
Commit: 0c55d46f2afdc7c282304839a10ac39221520316
Parents: 737fd09
Author: Yongzhi Chen 
Authored: Mon Sep 26 13:55:28 2016 -0400
Committer: Yongzhi Chen 
Committed: Tue Sep 27 09:23:16 2016 -0400

--
 .../common/metrics/common/MetricsConstant.java  |  7 +++
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  5 +++
 .../org/apache/hadoop/hive/ql/exec/Task.java|  9 
 .../hadoop/hive/ql/exec/mr/MapRedTask.java  | 11 +
 .../hadoop/hive/ql/exec/mr/MapredLocalTask.java | 11 +
 .../hadoop/hive/ql/exec/spark/SparkTask.java| 11 +
 .../apache/hadoop/hive/ql/exec/tez/TezTask.java | 11 +
 .../hadoop/hive/ql/exec/mr/TestMapRedTask.java  | 47 
 .../hive/ql/exec/mr/TestMapredLocalTask.java| 46 +++
 .../hive/ql/exec/spark/TestSparkTask.java   | 46 +++
 .../hadoop/hive/ql/exec/tez/TestTezTask.java| 17 +++
 11 files changed, 221 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0c55d46f/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
 
b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
index 9dc96f9..c9d4087 100644
--- 
a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
+++ 
b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
@@ -61,4 +61,11 @@ public class MetricsConstant {
   // The number of Hive operations that are waiting to enter the compile block
   public static final String WAITING_COMPILE_OPS = "waiting_compile_ops";
 
+  // The number of map reduce tasks executed by the HiveServer2 since the last 
restart
+  public static final String HIVE_MR_TASKS = "hive_mapred_tasks";
+  // The number of spark tasks executed by the HiveServer2 since the last 
restart
+  public static final String HIVE_SPARK_TASKS = "hive_spark_tasks";
+  // The number of tez tasks executed by the HiveServer2 since the last restart
+  public static final String HIVE_TEZ_TASKS = "hive_tez_tasks";
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/0c55d46f/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 42d398d..03c56e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -1671,6 +1671,11 @@ public class Driver implements CommandProcessor {
 // incorrect results.
 assert tsk.getParentTasks() == null || tsk.getParentTasks().isEmpty();
 driverCxt.addToRunnable(tsk);
+
+Metrics metrics = MetricsFactory.getInstance();
+if (metrics != null) {
+  tsk.updateTaskMetrics(metrics);
+}
   }
 
   perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.RUN_TASKS);

http://git-wip-us.apache.org/repos/asf/hive/blob/0c55d46f/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
index eeaa543..e1bd291 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
@@ -27,6 +27,8 @@ import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 
+import org.apache.hadoop.hive.common.metrics.common.Metrics;
+import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
@@ -534,6 +536,13 @@ public abstract class Task 
implements Serializable, Node
 }
   }
 
+  /**
+   * Provide metrics on the type and number of tasks executed by the HiveServer
+   * @param metrics
+   */
+  public void updateTaskMetrics(Metrics metrics) {
+// no metrics gathered by default
+   }
 
   public int getTaskTag() {
 return taskTag;


[01/38] hive git commit: HIVE-3173 Add tests for JDBC getTypeInfo method (Xiu Gao via gates)

2016-10-05 Thread khorgath
Repository: hive
Updated Branches:
  refs/heads/repl2 4ce5fe131 -> c53c9be71


HIVE-3173 Add tests for JDBC getTypeInfo method (Xiu Gao via gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/19fd5613
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/19fd5613
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/19fd5613

Branch: refs/heads/repl2
Commit: 19fd56137caa23fbe8ef1e452a11603fc14f4325
Parents: 4ce5fe1
Author: Alan Gates 
Authored: Mon Sep 26 10:47:48 2016 -0700
Committer: Alan Gates 
Committed: Mon Sep 26 10:47:48 2016 -0700

--
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   | 32 
 1 file changed, 32 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/19fd5613/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index fc91f9d..ff4d63f 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -1145,6 +1145,38 @@ public class TestJdbcDriver2 {
 assertFalse("Unexpected table", rs.next());
   }
 
+  @Test
+  public void testMetaDataGetTypeInfo() throws SQLException {
+HiveBaseResultSet rs = (HiveBaseResultSet) con.getMetaData().getTypeInfo();
+Set typeInfos = new HashSet();
+typeInfos.add("BOOLEAN");
+typeInfos.add("TINYINT");
+typeInfos.add("SMALLINT");
+typeInfos.add("INT");
+typeInfos.add("BIGINT");
+typeInfos.add("FLOAT");
+typeInfos.add("DOUBLE");
+typeInfos.add("STRING");
+typeInfos.add("TIMESTAMP");
+typeInfos.add("BINARY");
+typeInfos.add("DECIMAL");
+typeInfos.add("ARRAY");
+typeInfos.add("MAP");
+typeInfos.add("STRUCT");
+typeInfos.add("UNIONTYPE");
+
+int cnt = 0;
+while (rs.next()) {
+  String typeInfo = rs.getString("TYPE_NAME");
+  assertEquals("Get by index different from get by name", rs.getString(1), 
typeInfo);
+  typeInfos.remove(typeInfo);
+  cnt++;
+}
+rs.close();
+assertEquals("Incorrect typeInfo count.", 0, typeInfos.size());
+assertTrue("Found less typeInfos than we test for.", cnt >= 
typeInfos.size());
+  }
+
   /**
* Test the type returned for pre-created table type table and view type 
table
* @param tableTypeNames expected table types



[18/38] hive git commit: HIVE-14100: Adding a new logged_in_user() UDF which returns the user provided when connecting (Peter Vary, reviewed by Mohit Sabharwal)

2016-10-05 Thread khorgath
HIVE-14100: Adding a new logged_in_user() UDF which returns the user provided 
when connecting (Peter Vary, reviewed by Mohit Sabharwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/45c1a09b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/45c1a09b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/45c1a09b

Branch: refs/heads/repl2
Commit: 45c1a09b7b76e41f05520de4bb0e26bb6fadc21f
Parents: 0562efc
Author: Mohit Sabharwal 
Authored: Fri Sep 30 13:54:31 2016 -0400
Committer: Mohit Sabharwal 
Committed: Fri Sep 30 13:57:10 2016 -0400

--
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |  1 +
 .../ql/udf/generic/GenericUDFLoggedInUser.java  | 82 
 .../queries/clientpositive/udf_logged_in_user.q |  5 ++
 .../results/clientpositive/show_functions.q.out |  5 ++
 .../clientpositive/udf_logged_in_user.q.out | 22 ++
 5 files changed, 115 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/45c1a09b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index b277f5e..6870dfa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -344,6 +344,7 @@ public final class FunctionRegistry {
 system.registerGenericUDF("current_date", GenericUDFCurrentDate.class);
 system.registerGenericUDF("current_timestamp", 
GenericUDFCurrentTimestamp.class);
 system.registerGenericUDF("current_user", GenericUDFCurrentUser.class);
+system.registerGenericUDF("logged_in_user", GenericUDFLoggedInUser.class);
 
 system.registerGenericUDF("isnull", GenericUDFOPNull.class);
 system.registerGenericUDF("isnotnull", GenericUDFOPNotNull.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/45c1a09b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java
new file mode 100644
index 000..2915b86
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Text;
+
+@UDFType(deterministic = true)
+@Description(name = "logged_in_user", value = "_FUNC_() - Returns logged in 
user name",
+extended = "SessionState GetUserName - the username provided at 
session initialization")
+@NDV(maxNdv = 1)
+public class GenericUDFLoggedInUser extends GenericUDF {
+  protected Text loggedInUser;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
+if (arguments.length != 0) {
+  throw new UDFArgumentLengthException(
+  "The function LOGGED_IN_USER does not take any arguments, but found 
" + arguments.length);
+}
+
+if (loggedInUser == null) {
+  String loggedInUserName = SessionState.get().getUserName();
+  if (loggedInUserName != null) {
+loggedInUser = new Text(loggedInUserName);
+  }
+}
+
+return 

[34/38] hive git commit: HIVE-14858: Analyze command should support custom input formats (Chao Sun, reviewed by Xuefu Zhang and Prasanth Jayachandran)

2016-10-05 Thread khorgath
HIVE-14858: Analyze command should support custom input formats (Chao Sun, 
reviewed by Xuefu Zhang and Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/54ff3f56
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/54ff3f56
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/54ff3f56

Branch: refs/heads/repl2
Commit: 54ff3f56d495e24352624de2655be69d433bd179
Parents: 481d7cd
Author: Chao Sun 
Authored: Thu Sep 29 13:33:17 2016 -0700
Committer: Chao Sun 
Committed: Wed Oct 5 12:33:36 2016 +0800

--
 .../org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/54ff3f56/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
index d6f1b7a0..9297a0b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
@@ -91,8 +91,8 @@ public class GenMRTableScan1 implements NodeProcessor {
 if (parseCtx.getQueryProperties().isAnalyzeCommand()) {
   boolean partialScan = 
parseCtx.getQueryProperties().isPartialScanAnalyzeCommand();
   boolean noScan = 
parseCtx.getQueryProperties().isNoScanAnalyzeCommand();
-  if (inputFormat.equals(OrcInputFormat.class) ||
-  inputFormat.equals(MapredParquetInputFormat.class)) {
+  if (OrcInputFormat.class.isAssignableFrom(inputFormat) ||
+  
MapredParquetInputFormat.class.isAssignableFrom(inputFormat)) {
 // For ORC and Parquet, all the following statements are the same
 // ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS
 // ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS 
partialscan;



[27/38] hive git commit: HIVE-14558: Add support for listing views similar to "show tables" (Naveen Gangam, reviewed by Aihua Xu)

2016-10-05 Thread khorgath
http://git-wip-us.apache.org/repos/asf/hive/blob/21a0142f/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
--
diff --git a/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php 
b/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
index 2d82c92..24b3ba1 100644
--- a/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
+++ b/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
@@ -209,6 +209,14 @@ interface ThriftHiveMetastoreIf extends \FacebookServiceIf 
{
*/
   public function get_tables($db_name, $pattern);
   /**
+   * @param string $db_name
+   * @param string $pattern
+   * @param string $tableType
+   * @return string[]
+   * @throws \metastore\MetaException
+   */
+  public function get_tables_by_type($db_name, $pattern, $tableType);
+  /**
* @param string $db_patterns
* @param string $tbl_patterns
* @param string[] $tbl_types
@@ -2598,6 +2606,62 @@ class ThriftHiveMetastoreClient extends 
\FacebookServiceClient implements \metas
 throw new \Exception("get_tables failed: unknown result");
   }
 
+  public function get_tables_by_type($db_name, $pattern, $tableType)
+  {
+$this->send_get_tables_by_type($db_name, $pattern, $tableType);
+return $this->recv_get_tables_by_type();
+  }
+
+  public function send_get_tables_by_type($db_name, $pattern, $tableType)
+  {
+$args = new \metastore\ThriftHiveMetastore_get_tables_by_type_args();
+$args->db_name = $db_name;
+$args->pattern = $pattern;
+$args->tableType = $tableType;
+$bin_accel = ($this->output_ instanceof TBinaryProtocolAccelerated) && 
function_exists('thrift_protocol_write_binary');
+if ($bin_accel)
+{
+  thrift_protocol_write_binary($this->output_, 'get_tables_by_type', 
TMessageType::CALL, $args, $this->seqid_, $this->output_->isStrictWrite());
+}
+else
+{
+  $this->output_->writeMessageBegin('get_tables_by_type', 
TMessageType::CALL, $this->seqid_);
+  $args->write($this->output_);
+  $this->output_->writeMessageEnd();
+  $this->output_->getTransport()->flush();
+}
+  }
+
+  public function recv_get_tables_by_type()
+  {
+$bin_accel = ($this->input_ instanceof TBinaryProtocolAccelerated) && 
function_exists('thrift_protocol_read_binary');
+if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, 
'\metastore\ThriftHiveMetastore_get_tables_by_type_result', 
$this->input_->isStrictRead());
+else
+{
+  $rseqid = 0;
+  $fname = null;
+  $mtype = 0;
+
+  $this->input_->readMessageBegin($fname, $mtype, $rseqid);
+  if ($mtype == TMessageType::EXCEPTION) {
+$x = new TApplicationException();
+$x->read($this->input_);
+$this->input_->readMessageEnd();
+throw $x;
+  }
+  $result = new \metastore\ThriftHiveMetastore_get_tables_by_type_result();
+  $result->read($this->input_);
+  $this->input_->readMessageEnd();
+}
+if ($result->success !== null) {
+  return $result->success;
+}
+if ($result->o1 !== null) {
+  throw $result->o1;
+}
+throw new \Exception("get_tables_by_type failed: unknown result");
+  }
+
   public function get_table_meta($db_patterns, $tbl_patterns, array $tbl_types)
   {
 $this->send_get_table_meta($db_patterns, $tbl_patterns, $tbl_types);
@@ -15313,6 +15377,253 @@ class ThriftHiveMetastore_get_tables_result {
 
 }
 
+class ThriftHiveMetastore_get_tables_by_type_args {
+  static $_TSPEC;
+
+  /**
+   * @var string
+   */
+  public $db_name = null;
+  /**
+   * @var string
+   */
+  public $pattern = null;
+  /**
+   * @var string
+   */
+  public $tableType = null;
+
+  public function __construct($vals=null) {
+if (!isset(self::$_TSPEC)) {
+  self::$_TSPEC = array(
+1 => array(
+  'var' => 'db_name',
+  'type' => TType::STRING,
+  ),
+2 => array(
+  'var' => 'pattern',
+  'type' => TType::STRING,
+  ),
+3 => array(
+  'var' => 'tableType',
+  'type' => TType::STRING,
+  ),
+);
+}
+if (is_array($vals)) {
+  if (isset($vals['db_name'])) {
+$this->db_name = $vals['db_name'];
+  }
+  if (isset($vals['pattern'])) {
+$this->pattern = $vals['pattern'];
+  }
+  if (isset($vals['tableType'])) {
+$this->tableType = $vals['tableType'];
+  }
+}
+  }
+
+  public function getName() {
+return 'ThriftHiveMetastore_get_tables_by_type_args';
+  }
+
+  public function read($input)
+  {
+$xfer = 0;
+$fname = null;
+$ftype = 0;
+$fid = 0;
+$xfer += $input->readStructBegin($fname);
+while (true)
+{
+  $xfer += $input->readFieldBegin($fname, $ftype, $fid);
+  if ($ftype == TType::STOP) {
+break;
+  }
+  switch ($fid)
+  {
+case 1:
+  if ($ftype == 

[21/38] hive git commit: HIVE-14874: Master: Update errata.txt for the missing JIRA number in HIVE-9423 commit msg (Chaoyu Tang)"

2016-10-05 Thread khorgath
HIVE-14874: Master: Update errata.txt for the missing JIRA number in HIVE-9423 
commit msg (Chaoyu Tang)"


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/84e49742
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/84e49742
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/84e49742

Branch: refs/heads/repl2
Commit: 84e49742ab53ee41436c6511fd6af93edd23bbf1
Parents: 297b443
Author: ctang 
Authored: Sat Oct 1 21:47:06 2016 -0400
Committer: ctang 
Committed: Sat Oct 1 21:47:47 2016 -0400

--
 errata.txt | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/84e49742/errata.txt
--
diff --git a/errata.txt b/errata.txt
index 522b1e1..791d57e 100644
--- a/errata.txt
+++ b/errata.txt
@@ -82,3 +82,4 @@ d8298e1c85a515150562b0df68af89c18c468638 llap   HIVE-9418 
 https://issues.ap
 22df7a8441ca85ad7f64e5191d4675f2f36a0664 master HIVE-14182 
https://issues.apache.org/jira/browse/HIVE-14182
 223350894fe5aa653668e9f39e43218e514f2b24 master HIVE-14182 
https://issues.apache.org/jira/browse/HIVE-14182
 5c58dceeaf662b6314eedb9afa01a2896657ef77 master HIVE-14182 
https://issues.apache.org/jira/browse/HIVE-14182
+d16d4f1bcc43d6ebcab0eaf5bc635fb88b60be5f master HIVE-9423  
https://issues.apache.org/jira/browse/HIVE-9423



[12/38] hive git commit: HiveServer2: Provide the user with different error messages depending on the Thrift client exception code (Peter Vary via Chaoyu Tang)

2016-10-05 Thread khorgath
HiveServer2: Provide the user with different error messages depending on the 
Thrift client exception code (Peter Vary via Chaoyu Tang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d16d4f1b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d16d4f1b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d16d4f1b

Branch: refs/heads/repl2
Commit: d16d4f1bcc43d6ebcab0eaf5bc635fb88b60be5f
Parents: 291f3d5
Author: ctang 
Authored: Thu Sep 29 11:25:21 2016 -0400
Committer: ctang 
Committed: Thu Sep 29 11:25:21 2016 -0400

--
 .../java/org/apache/hive/beeline/BeeLine.java   | 20 +-
 beeline/src/main/resources/BeeLine.properties   | 11 ++-
 .../beeline/TestBeeLineExceptionHandling.java   | 72 
 3 files changed, 101 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d16d4f1b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java 
b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index 5322ca6..79922d2 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -1778,7 +1778,25 @@ public class BeeLine implements Closeable {
 }
 
 if (e.getCause() instanceof TTransportException) {
-  error(loc("hs2-unavailable"));
+  switch (((TTransportException)e.getCause()).getType()) {
+case TTransportException.ALREADY_OPEN:
+  error(loc("hs2-connection-already-open"));
+  break;
+case TTransportException.END_OF_FILE:
+  error(loc("hs2-unexpected-end-of-file"));
+  break;
+case TTransportException.NOT_OPEN:
+  error(loc("hs2-could-not-open-connection"));
+  break;
+case TTransportException.TIMED_OUT:
+  error(loc("hs2-connection-timed-out"));
+  break;
+case TTransportException.UNKNOWN:
+  error(loc("hs2-unknown-connection-problem"));
+  break;
+default:
+  error(loc("hs2-unexpected-error"));
+  }
 }
 
 error(loc(e instanceof SQLWarning ? "Warning" : "Error",

http://git-wip-us.apache.org/repos/asf/hive/blob/d16d4f1b/beeline/src/main/resources/BeeLine.properties
--
diff --git a/beeline/src/main/resources/BeeLine.properties 
b/beeline/src/main/resources/BeeLine.properties
index 13321d2..ad79c01 100644
--- a/beeline/src/main/resources/BeeLine.properties
+++ b/beeline/src/main/resources/BeeLine.properties
@@ -142,7 +142,16 @@ active-connections: 0#No active connections|1#{0} active 
connection:|1<{0} activ
 
 time-ms: ({0,number,#.###} seconds)
 
-hs2-unavailable: HS2 may be unavailable, check server status
+hs2-connection-already-open: Socket already connected.
+hs2-unexpected-end-of-file: Unexpected end of file when reading from HS2 
server. The root \
+cause might be too many concurrent connections. Please ask the administrator 
to check the number \
+of active connections, and adjust hive.server2.thrift.max.worker.threads if 
applicable.
+hs2-could-not-open-connection: Could not open connection to the HS2 server. 
Please check the \
+server URI and if the URI is correct, then ask the administrator to check the 
server status.\
+hs2-connection-timed-out: Connection timeout when communicating with HS2 
server.
+hs2-unknown-connection-problem: Unknown HS2 problem when communicating with 
Thrift server.
+hs2-unexpected-error: Unexpected HS2 error when communicating with the Thrift 
server.
+
 
 cmd-usage: Usage: java org.apache.hive.cli.beeline.BeeLine \n \
 \  -uthe JDBC URL to connect to\n \

http://git-wip-us.apache.org/repos/asf/hive/blob/d16d4f1b/beeline/src/test/org/apache/hive/beeline/TestBeeLineExceptionHandling.java
--
diff --git 
a/beeline/src/test/org/apache/hive/beeline/TestBeeLineExceptionHandling.java 
b/beeline/src/test/org/apache/hive/beeline/TestBeeLineExceptionHandling.java
new file mode 100644
index 000..08579e8
--- /dev/null
+++ b/beeline/src/test/org/apache/hive/beeline/TestBeeLineExceptionHandling.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * 

[13/38] hive git commit: HIVE-14778 document threading model of Streaming API (Eugene Koifman, reviewed by Alan Gates)

2016-10-05 Thread khorgath
HIVE-14778 document threading model of Streaming API (Eugene Koifman, reviewed 
by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/20304c07
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/20304c07
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/20304c07

Branch: refs/heads/repl2
Commit: 20304c0705c4ad861b5915dacceaa6d6bdfe91fc
Parents: d16d4f1
Author: Eugene Koifman 
Authored: Thu Sep 29 10:41:42 2016 -0700
Committer: Eugene Koifman 
Committed: Thu Sep 29 10:41:42 2016 -0700

--
 .../apache/hive/hcatalog/streaming/StreamingConnection.java  | 2 ++
 .../org/apache/hive/hcatalog/streaming/TransactionBatch.java | 8 ++--
 2 files changed, 8 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/20304c07/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
index 8785a21..a8f4089 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
@@ -22,6 +22,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 
 /**
  * Represents a connection to a HiveEndPoint. Used to acquire transaction 
batches.
+ * Note: the expectation is that there is at most 1 TransactionBatch 
outstanding for any given
+ * StreamingConnection.  Violating this may result in "out of sequence 
response".
  */
 public interface StreamingConnection {
 

http://git-wip-us.apache.org/repos/asf/hive/blob/20304c07/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
index 3c8670d..3bcc510 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
@@ -24,8 +24,12 @@ import java.util.Collection;
 /**
  * Represents a set of Transactions returned by Hive. Supports opening, 
writing to
  * and commiting/aborting each transaction. The interface is designed to ensure
- * transactions in a batch are used up sequentially. Multiple transaction 
batches can be
- * used (initialized with separate RecordWriters) for concurrent streaming
+ * transactions in a batch are used up sequentially. To stream to the same 
HiveEndPoint
+ * concurrently, create separate StreamingConnections.
+ *
+ * Note on thread safety: At most 2 threads can run through a given 
TransactionBatch at the same
+ * time.  One thread may call {@link #heartbeat()} and the other all other 
methods.
+ * Violating this may result in "out of sequence response".
  *
  */
 public interface TransactionBatch  {



[02/38] hive git commit: HIVE-5867: JDBC driver and beeline should support executing an initial SQL script(Jianguo Tian, via Ferdinand Xu)

2016-10-05 Thread khorgath
HIVE-5867: JDBC driver and beeline should support executing an initial SQL 
script(Jianguo Tian, via Ferdinand Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/737fd09a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/737fd09a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/737fd09a

Branch: refs/heads/repl2
Commit: 737fd09a20dbb4b728307f84825f7ddc4294ae02
Parents: 19fd561
Author: Jianguo Tian 
Authored: Tue Sep 27 04:01:49 2016 +0800
Committer: Ferdinand Xu 
Committed: Tue Sep 27 04:01:49 2016 +0800

--
 .../org/apache/hive/jdbc/HiveConnection.java| 79 
 jdbc/src/java/org/apache/hive/jdbc/Utils.java   |  1 +
 .../org/apache/hive/jdbc/TestJdbcDriver.java| 98 
 3 files changed, 178 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/737fd09a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index ad96a64..ce85320 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -67,8 +67,11 @@ import javax.net.ssl.TrustManagerFactory;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 
+import java.io.BufferedReader;
+import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
 import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
@@ -92,6 +95,7 @@ import java.sql.SQLXML;
 import java.sql.Savepoint;
 import java.sql.Statement;
 import java.sql.Struct;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
@@ -125,6 +129,7 @@ public class HiveConnection implements java.sql.Connection {
   private int loginTimeout = 0;
   private TProtocolVersion protocol;
   private int fetchSize = HiveStatement.DEFAULT_FETCH_SIZE;
+  private String initFile = null;
 
   public HiveConnection(String uri, Properties info) throws SQLException {
 setupLoginTimeout();
@@ -147,6 +152,9 @@ public class HiveConnection implements java.sql.Connection {
 if (sessConfMap.containsKey(JdbcConnectionParams.FETCH_SIZE)) {
   fetchSize = 
Integer.parseInt(sessConfMap.get(JdbcConnectionParams.FETCH_SIZE));
 }
+if (sessConfMap.containsKey(JdbcConnectionParams.INIT_FILE)) {
+  initFile = sessConfMap.get(JdbcConnectionParams.INIT_FILE);
+}
 
 // add supported protocols
 supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1);
@@ -166,6 +174,7 @@ public class HiveConnection implements java.sql.Connection {
 
   // open client session
   openSession();
+  executeInitSql();
 } else {
   int maxRetries = 1;
   try {
@@ -184,6 +193,7 @@ public class HiveConnection implements java.sql.Connection {
   client = new TCLIService.Client(new TBinaryProtocol(transport));
   // open client session
   openSession();
+  executeInitSql();
 
   break;
 } catch (Exception e) {
@@ -218,6 +228,75 @@ public class HiveConnection implements java.sql.Connection 
{
 client = newSynchronizedClient(client);
   }
 
+  private void executeInitSql() throws SQLException {
+if (initFile != null) {
+  try {
+List sqlList = parseInitFile(initFile);
+Statement st = createStatement();
+for(String sql : sqlList) {
+  boolean hasResult = st.execute(sql);
+  if (hasResult) {
+ResultSet rs = st.getResultSet();
+while (rs.next()) {
+  System.out.println(rs.getString(1));
+}
+  }
+}
+  } catch(Exception e) {
+LOG.error("Failed to execute initial SQL");
+throw new SQLException(e.getMessage());
+  }
+}
+  }
+
+  public static List parseInitFile(String initFile) throws IOException 
{
+File file = new File(initFile);
+BufferedReader br = null;
+List initSqlList = null;
+try {
+  FileInputStream input = new FileInputStream(file);
+  br = new BufferedReader(new InputStreamReader(input, "UTF-8"));
+  String line;
+  StringBuilder sb = new StringBuilder("");
+  while ((line = br.readLine()) != null) {
+line = line.trim();
+if (line.length() != 0) {
+  if (line.startsWith("#") || line.startsWith("--")) {
+continue;
+  } else {
+line = line.concat(" ");
+sb.append(line);
+  }
+}
+  }
+  initSqlList = 

hive git commit: HIVE-14773: NPE aggregating column statistics for date column in partitioned table (Pengcheng Xiong, reviewed by Gopal V)

2016-10-05 Thread pxiong
Repository: hive
Updated Branches:
  refs/heads/master 595398857 -> c53c9be71


HIVE-14773: NPE aggregating column statistics for date column in partitioned 
table (Pengcheng Xiong, reviewed by Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c53c9be7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c53c9be7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c53c9be7

Branch: refs/heads/master
Commit: c53c9be7181fc47bb5422473edbba1ad9ae81042
Parents: 5953988
Author: Pengcheng Xiong 
Authored: Wed Oct 5 10:40:53 2016 -0700
Committer: Pengcheng Xiong 
Committed: Wed Oct 5 10:41:00 2016 -0700

--
 .../hive/metastore/IExtrapolatePartStatus.java  |   1 +
 .../hive/metastore/StatObjectConverter.java |  29 ++
 .../extrapolate_part_stats_date.q   |  14 +
 .../extrapolate_part_stats_date.q.out   | 302 +++
 4 files changed, 346 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c53c9be7/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
index 4859cff..d0569fb 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
@@ -39,6 +39,7 @@ public interface IExtrapolatePartStatus {
   put("int", new Integer[] { 0, 1, 6, 7, 12, 15 });
   put("smallint", new Integer[] { 0, 1, 6, 7, 12, 15 });
   put("tinyint", new Integer[] { 0, 1, 6, 7, 12, 15 });
+  put("date", new Integer[] { 0, 1, 6, 7, 12, 15 });
   put("timestamp", new Integer[] { 0, 1, 6, 7, 12, 15 });
   put("long", new Integer[] { 0, 1, 6, 7, 12, 15 });
   put("double", new Integer[] { 2, 3, 6, 7, 13, 15 });

http://git-wip-us.apache.org/repos/asf/hive/blob/c53c9be7/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
index e119dd8..b259dfa 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
@@ -579,6 +579,35 @@ public class StatObjectConverter {
 longStats.setNumDVs(lowerBound);
   }
   data.setLongStats(longStats);
+} else if (colType.equals("date")) {
+  DateColumnStatsData dateStats = new DateColumnStatsData();
+  dateStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
+  if (lhigh != null) {
+dateStats.setHighValue(new 
Date(MetaStoreDirectSql.extractSqlLong(lhigh)));
+  }
+  if (llow != null) {
+dateStats.setLowValue(new 
Date(MetaStoreDirectSql.extractSqlLong(llow)));
+  }
+  long lowerBound = MetaStoreDirectSql.extractSqlLong(dist);
+  long higherBound = MetaStoreDirectSql.extractSqlLong(sumDist);
+  if (useDensityFunctionForNDVEstimation && lhigh != null && llow != null 
&& avgLong != null
+  && MetaStoreDirectSql.extractSqlDouble(avgLong) != 0.0) {
+// We have estimation, lowerbound and higherbound. We use estimation if
+// it is between lowerbound and higherbound.
+long estimation = MetaStoreDirectSql
+.extractSqlLong((MetaStoreDirectSql.extractSqlLong(lhigh) - 
MetaStoreDirectSql
+.extractSqlLong(llow)) / 
MetaStoreDirectSql.extractSqlDouble(avgLong));
+if (estimation < lowerBound) {
+  dateStats.setNumDVs(lowerBound);
+} else if (estimation > higherBound) {
+  dateStats.setNumDVs(higherBound);
+} else {
+  dateStats.setNumDVs(estimation);
+}
+  } else {
+dateStats.setNumDVs(lowerBound);
+  }
+  data.setDateStats(dateStats);
 } else if (colType.equals("double") || colType.equals("float")) {
   DoubleColumnStatsData doubleStats = new DoubleColumnStatsData();
   doubleStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));

http://git-wip-us.apache.org/repos/asf/hive/blob/c53c9be7/ql/src/test/queries/clientpositive/extrapolate_part_stats_date.q
--
diff --git a/ql/src/test/queries/clientpositive/extrapolate_part_stats_date.q 
b/ql/src/test/queries/clientpositive/extrapolate_part_stats_date.q
new file mode 100644
index 000..1f38a65
--- 

[1/2] hive git commit: HIVE-14873: Add UDF for extraction of 'day of week' (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2016-10-05 Thread jcamacho
Repository: hive
Updated Branches:
  refs/heads/master 96bcee86c -> 595398857


http://git-wip-us.apache.org/repos/asf/hive/blob/59539885/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
--
diff --git a/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out 
b/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
index 299d3bc..68a2a4d 100644
--- a/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
@@ -209,6 +209,7 @@ PREHOOK: query: EXPLAIN SELECT
   month(fl_time),
   day(fl_time),
   dayofmonth(fl_time),
+  dayofweek(fl_time),
   weekofyear(fl_time),
   date(fl_time),
   to_date(fl_time),
@@ -223,6 +224,7 @@ POSTHOOK: query: EXPLAIN SELECT
   month(fl_time),
   day(fl_time),
   dayofmonth(fl_time),
+  dayofweek(fl_time),
   weekofyear(fl_time),
   date(fl_time),
   to_date(fl_time),
@@ -243,8 +245,8 @@ STAGE PLANS:
 alias: date_udf_flight_orc
 Statistics: Num rows: 137 Data size: 13152 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
-  expressions: to_unix_timestamp(fl_time) (type: bigint), 
year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: 
int), dayofmonth(fl_time) (type: int), weekofyear(fl_time) (type: int), CAST( 
fl_time AS DATE) (type: date), to_date(fl_time) (type: date), date_add(fl_time, 
2) (type: date), date_sub(fl_time, 2) (type: date), datediff(fl_time, 
'2000-01-01') (type: int)
-  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10
+  expressions: to_unix_timestamp(fl_time) (type: bigint), 
year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: 
int), dayofmonth(fl_time) (type: int), dayofweek(fl_time) (type: int), 
weekofyear(fl_time) (type: int), CAST( fl_time AS DATE) (type: date), 
to_date(fl_time) (type: date), date_add(fl_time, 2) (type: date), 
date_sub(fl_time, 2) (type: date), datediff(fl_time, '2000-01-01') (type: int)
+  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11
   Statistics: Num rows: 137 Data size: 13152 Basic stats: COMPLETE 
Column stats: NONE
   File Output Operator
 compressed: false
@@ -267,6 +269,7 @@ PREHOOK: query: SELECT
   month(fl_time),
   day(fl_time),
   dayofmonth(fl_time),
+  dayofweek(fl_time),
   weekofyear(fl_time),
   date(fl_time),
   to_date(fl_time),
@@ -283,6 +286,7 @@ POSTHOOK: query: SELECT
   month(fl_time),
   day(fl_time),
   dayofmonth(fl_time),
+  dayofweek(fl_time),
   weekofyear(fl_time),
   date(fl_time),
   to_date(fl_time),
@@ -293,149 +297,150 @@ FROM date_udf_flight_orc
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_udf_flight_orc
  A masked pattern was here 
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287583200 201010  20  20  42  2010-10-20  
2010-10-20  2010-10-22  2010-10-18  3945
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  2010-10-19  3946
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  2010-10-19  3946
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  2010-10-19  3946
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  2010-10-19  3946
-1287669600 201010  21  21  42  2010-10-21  
2010-10-21  2010-10-23  2010-10-19  3946
-1287669600 201010 

[2/2] hive git commit: HIVE-14873: Add UDF for extraction of 'day of week' (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2016-10-05 Thread jcamacho
HIVE-14873: Add UDF for extraction of 'day of week' (Jesus Camacho Rodriguez, 
reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/59539885
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/59539885
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/59539885

Branch: refs/heads/master
Commit: 59539885725a96cca4b3f0759a5b26e0d8198dc8
Parents: 96bcee86
Author: Jesus Camacho Rodriguez 
Authored: Sat Oct 1 09:30:35 2016 +0100
Committer: Jesus Camacho Rodriguez 
Committed: Wed Oct 5 17:51:41 2016 +0100

--
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |  18 +-
 .../expressions/VectorUDFDayOfWeekDate.java |  39 +
 .../expressions/VectorUDFDayOfWeekString.java   |  61 ++
 .../VectorUDFDayOfWeekTimestamp.java|  39 +
 .../hive/ql/optimizer/physical/Vectorizer.java  |   2 +
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |   1 +
 .../hadoop/hive/ql/parse/IdentifiersParser.g|   3 +-
 .../apache/hadoop/hive/ql/udf/UDFDayOfWeek.java | 105 +++
 ql/src/test/queries/clientpositive/extract.q|   3 +
 .../clientpositive/vectorized_date_funcs.q  |   8 +-
 .../test/results/clientpositive/extract.q.out   |  11 +
 .../llap/vectorized_date_funcs.q.out| 846 ++-
 .../results/clientpositive/show_functions.q.out |   1 +
 .../clientpositive/vectorized_date_funcs.q.out  | 846 ++-
 14 files changed, 1139 insertions(+), 844 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/59539885/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 6b29be1..0dbbc1d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -37,14 +37,6 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorDay;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorHour;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorMinute;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorMonth;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorQuarter;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorSecond;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorWeek;
-import org.apache.hadoop.hive.ql.udf.UDFDateFloorYear;
 import org.apache.hadoop.hive.ql.udf.SettableUDF;
 import org.apache.hadoop.hive.ql.udf.UDAFPercentile;
 import org.apache.hadoop.hive.ql.udf.UDFAcos;
@@ -57,7 +49,16 @@ import org.apache.hadoop.hive.ql.udf.UDFChr;
 import org.apache.hadoop.hive.ql.udf.UDFConv;
 import org.apache.hadoop.hive.ql.udf.UDFCos;
 import org.apache.hadoop.hive.ql.udf.UDFCrc32;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorDay;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorHour;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorMinute;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorMonth;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorQuarter;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorSecond;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorWeek;
+import org.apache.hadoop.hive.ql.udf.UDFDateFloorYear;
 import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
+import org.apache.hadoop.hive.ql.udf.UDFDayOfWeek;
 import org.apache.hadoop.hive.ql.udf.UDFDegrees;
 import org.apache.hadoop.hive.ql.udf.UDFE;
 import org.apache.hadoop.hive.ql.udf.UDFExp;
@@ -283,6 +284,7 @@ public final class FunctionRegistry {
 
 system.registerUDF("day", UDFDayOfMonth.class, false);
 system.registerUDF("dayofmonth", UDFDayOfMonth.class, false);
+system.registerUDF("dayofweek", UDFDayOfWeek.class, false);
 system.registerUDF("month", UDFMonth.class, false);
 system.registerGenericUDF("quarter", GenericUDFQuarter.class);
 system.registerUDF("year", UDFYear.class, false);

http://git-wip-us.apache.org/repos/asf/hive/blob/59539885/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekDate.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekDate.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekDate.java
new file mode 100644
index 000..bd9c480
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekDate.java
@@ -0,0 +1,39 @@
+/**
+ * 

hive git commit: HIVE-14099: Hive security authorization can be disabled by users (Aihua Xu, reviewed by Yongzhi Chen)

2016-10-05 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 54ff3f56d -> 96bcee86c


HIVE-14099: Hive security authorization can be disabled by users (Aihua Xu, 
reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/96bcee86
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/96bcee86
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/96bcee86

Branch: refs/heads/master
Commit: 96bcee86c8df1c1a511fe70c1a37840d996abb9e
Parents: 54ff3f5
Author: Aihua Xu 
Authored: Wed Oct 5 10:19:33 2016 -0400
Committer: Aihua Xu 
Committed: Wed Oct 5 10:19:33 2016 -0400

--
 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/96bcee86/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 4c3ef3e..5ea9751 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3116,8 +3116,9 @@ public class HiveConf extends Configuration {
 
 
 HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list",
-
"hive.security.authenticator.manager,hive.security.authorization.manager,hive.users.in.admin.role,"
 +
-"hive.server2.xsrf.filter.enabled",
+
"hive.security.authenticator.manager,hive.security.authorization.manager," +
+
"hive.security.metastore.authorization.manager,hive.security.metastore.authenticator.manager,"
 +
+
"hive.users.in.admin.role,hive.server2.xsrf.filter.enabled,hive.security.authorization.enabled",
 "Comma separated list of configuration options which are immutable at 
runtime"),
 HIVE_CONF_HIDDEN_LIST("hive.conf.hidden.list",
 METASTOREPWD.varname + "," + HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname