[49/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
--
diff --git 
a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp 
b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
index dede79b..f667ba0 100644
--- a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
+++ b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
@@ -1240,14 +1240,14 @@ uint32_t 
ThriftHiveMetastore_get_databases_result::read(::apache::thrift::protoc
 if (ftype == ::apache::thrift::protocol::T_LIST) {
   {
 this->success.clear();
-uint32_t _size1018;
-::apache::thrift::protocol::TType _etype1021;
-xfer += iprot->readListBegin(_etype1021, _size1018);
-this->success.resize(_size1018);
-uint32_t _i1022;
-for (_i1022 = 0; _i1022 < _size1018; ++_i1022)
+uint32_t _size1045;
+::apache::thrift::protocol::TType _etype1048;
+xfer += iprot->readListBegin(_etype1048, _size1045);
+this->success.resize(_size1045);
+uint32_t _i1049;
+for (_i1049 = 0; _i1049 < _size1045; ++_i1049)
 {
-  xfer += iprot->readString(this->success[_i1022]);
+  xfer += iprot->readString(this->success[_i1049]);
 }
 xfer += iprot->readListEnd();
   }
@@ -1286,10 +1286,10 @@ uint32_t 
ThriftHiveMetastore_get_databases_result::write(::apache::thrift::proto
 xfer += oprot->writeFieldBegin("success", 
::apache::thrift::protocol::T_LIST, 0);
 {
   xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, 
static_cast(this->success.size()));
-  std::vector ::const_iterator _iter1023;
-  for (_iter1023 = this->success.begin(); _iter1023 != 
this->success.end(); ++_iter1023)
+  std::vector ::const_iterator _iter1050;
+  for (_iter1050 = this->success.begin(); _iter1050 != 
this->success.end(); ++_iter1050)
   {
-xfer += oprot->writeString((*_iter1023));
+xfer += oprot->writeString((*_iter1050));
   }
   xfer += oprot->writeListEnd();
 }
@@ -1334,14 +1334,14 @@ uint32_t 
ThriftHiveMetastore_get_databases_presult::read(::apache::thrift::proto
 if (ftype == ::apache::thrift::protocol::T_LIST) {
   {
 (*(this->success)).clear();
-uint32_t _size1024;
-::apache::thrift::protocol::TType _etype1027;
-xfer += iprot->readListBegin(_etype1027, _size1024);
-(*(this->success)).resize(_size1024);
-uint32_t _i1028;
-for (_i1028 = 0; _i1028 < _size1024; ++_i1028)
+uint32_t _size1051;
+::apache::thrift::protocol::TType _etype1054;
+xfer += iprot->readListBegin(_etype1054, _size1051);
+(*(this->success)).resize(_size1051);
+uint32_t _i1055;
+for (_i1055 = 0; _i1055 < _size1051; ++_i1055)
 {
-  xfer += iprot->readString((*(this->success))[_i1028]);
+  xfer += iprot->readString((*(this->success))[_i1055]);
 }
 xfer += iprot->readListEnd();
   }
@@ -1458,14 +1458,14 @@ uint32_t 
ThriftHiveMetastore_get_all_databases_result::read(::apache::thrift::pr
 if (ftype == ::apache::thrift::protocol::T_LIST) {
   {
 this->success.clear();
-uint32_t _size1029;
-::apache::thrift::protocol::TType _etype1032;
-xfer += iprot->readListBegin(_etype1032, _size1029);
-this->success.resize(_size1029);
-uint32_t _i1033;
-for (_i1033 = 0; _i1033 < _size1029; ++_i1033)
+uint32_t _size1056;
+::apache::thrift::protocol::TType _etype1059;
+xfer += iprot->readListBegin(_etype1059, _size1056);
+this->success.resize(_size1056);
+uint32_t _i1060;
+for (_i1060 = 0; _i1060 < _size1056; ++_i1060)
 {
-  xfer += iprot->readString(this->success[_i1033]);
+  xfer += iprot->readString(this->success[_i1060]);
 }
 xfer += iprot->readListEnd();
   }
@@ -1504,10 +1504,10 @@ uint32_t 
ThriftHiveMetastore_get_all_databases_result::write(::apache::thrift::p
 xfer += oprot->writeFieldBegin("success", 
::apache::thrift::protocol::T_LIST, 0);
 {
   xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, 
static_cast(this->success.size()));
-  std::vector ::const_iterator _iter1034;
-  for (_iter1034 = this->success.begin(); _iter1034 != 
this->success.end(); ++_iter1034)
+  std::vector ::const_iterator _iter1061;
+  for (_iter1061 = this->success.begin(); _iter1061 != 
this->success.end(); ++_iter1061)
   {
-xfer += 

[32/50] [abbrv] hive git commit: HIVE-17983 Make the standalone metastore generate tarballs etc.

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/b99b8c7a/standalone-metastore/src/main/sql/derby/hive-schema-2.3.0.derby.sql
--
diff --git 
a/standalone-metastore/src/main/sql/derby/hive-schema-2.3.0.derby.sql 
b/standalone-metastore/src/main/sql/derby/hive-schema-2.3.0.derby.sql
new file mode 100644
index 000..332b93e
--- /dev/null
+++ b/standalone-metastore/src/main/sql/derby/hive-schema-2.3.0.derby.sql
@@ -0,0 +1,456 @@
+-- Timestamp: 2011-09-22 15:32:02.024
+-- Source database is: 
/home/carl/Work/repos/hive1/metastore/scripts/upgrade/derby/mdb
+-- Connection URL is: 
jdbc:derby:/home/carl/Work/repos/hive1/metastore/scripts/upgrade/derby/mdb
+-- Specified schema is: APP
+-- appendLogs: false
+
+-- --
+-- DDL Statements for functions
+-- --
+
+CREATE FUNCTION "APP"."NUCLEUS_ASCII" (C CHAR(1)) RETURNS INTEGER LANGUAGE 
JAVA PARAMETER STYLE JAVA READS SQL DATA CALLED ON NULL INPUT EXTERNAL NAME 
'org.datanucleus.store.rdbms.adapter.DerbySQLFunction.ascii' ;
+
+CREATE FUNCTION "APP"."NUCLEUS_MATCHES" (TEXT VARCHAR(8000),PATTERN 
VARCHAR(8000)) RETURNS INTEGER LANGUAGE JAVA PARAMETER STYLE JAVA READS SQL 
DATA CALLED ON NULL INPUT EXTERNAL NAME 
'org.datanucleus.store.rdbms.adapter.DerbySQLFunction.matches' ;
+
+-- --
+-- DDL Statements for tables
+-- --
+
+CREATE TABLE "APP"."DBS" ("DB_ID" BIGINT NOT NULL, "DESC" VARCHAR(4000), 
"DB_LOCATION_URI" VARCHAR(4000) NOT NULL, "NAME" VARCHAR(128), "OWNER_NAME" 
VARCHAR(128), "OWNER_TYPE" VARCHAR(10));
+
+CREATE TABLE "APP"."TBL_PRIVS" ("TBL_GRANT_ID" BIGINT NOT NULL, "CREATE_TIME" 
INTEGER NOT NULL, "GRANT_OPTION" SMALLINT NOT NULL, "GRANTOR" VARCHAR(128), 
"GRANTOR_TYPE" VARCHAR(128), "PRINCIPAL_NAME" VARCHAR(128), "PRINCIPAL_TYPE" 
VARCHAR(128), "TBL_PRIV" VARCHAR(128), "TBL_ID" BIGINT);
+
+CREATE TABLE "APP"."DATABASE_PARAMS" ("DB_ID" BIGINT NOT NULL, "PARAM_KEY" 
VARCHAR(180) NOT NULL, "PARAM_VALUE" VARCHAR(4000));
+
+CREATE TABLE "APP"."TBL_COL_PRIVS" ("TBL_COLUMN_GRANT_ID" BIGINT NOT NULL, 
"COLUMN_NAME" VARCHAR(767), "CREATE_TIME" INTEGER NOT NULL, "GRANT_OPTION" 
SMALLINT NOT NULL, "GRANTOR" VARCHAR(128), "GRANTOR_TYPE" VARCHAR(128), 
"PRINCIPAL_NAME" VARCHAR(128), "PRINCIPAL_TYPE" VARCHAR(128), "TBL_COL_PRIV" 
VARCHAR(128), "TBL_ID" BIGINT);
+
+CREATE TABLE "APP"."SERDE_PARAMS" ("SERDE_ID" BIGINT NOT NULL, "PARAM_KEY" 
VARCHAR(256) NOT NULL, "PARAM_VALUE" CLOB);
+
+CREATE TABLE "APP"."COLUMNS_V2" ("CD_ID" BIGINT NOT NULL, "COMMENT" 
VARCHAR(4000), "COLUMN_NAME" VARCHAR(767) NOT NULL, "TYPE_NAME" CLOB, 
"INTEGER_IDX" INTEGER NOT NULL);
+
+CREATE TABLE "APP"."SORT_COLS" ("SD_ID" BIGINT NOT NULL, "COLUMN_NAME" 
VARCHAR(767), "ORDER" INTEGER NOT NULL, "INTEGER_IDX" INTEGER NOT NULL);
+
+CREATE TABLE "APP"."CDS" ("CD_ID" BIGINT NOT NULL);
+
+CREATE TABLE "APP"."PARTITION_KEY_VALS" ("PART_ID" BIGINT NOT NULL, 
"PART_KEY_VAL" VARCHAR(256), "INTEGER_IDX" INTEGER NOT NULL);
+
+CREATE TABLE "APP"."DB_PRIVS" ("DB_GRANT_ID" BIGINT NOT NULL, "CREATE_TIME" 
INTEGER NOT NULL, "DB_ID" BIGINT, "GRANT_OPTION" SMALLINT NOT NULL, "GRANTOR" 
VARCHAR(128), "GRANTOR_TYPE" VARCHAR(128), "PRINCIPAL_NAME" VARCHAR(128), 
"PRINCIPAL_TYPE" VARCHAR(128), "DB_PRIV" VARCHAR(128));
+
+CREATE TABLE "APP"."IDXS" ("INDEX_ID" BIGINT NOT NULL, "CREATE_TIME" INTEGER 
NOT NULL, "DEFERRED_REBUILD" CHAR(1) NOT NULL, "INDEX_HANDLER_CLASS" 
VARCHAR(4000), "INDEX_NAME" VARCHAR(128), "INDEX_TBL_ID" BIGINT, 
"LAST_ACCESS_TIME" INTEGER NOT NULL, "ORIG_TBL_ID" BIGINT, "SD_ID" BIGINT);
+
+CREATE TABLE "APP"."INDEX_PARAMS" ("INDEX_ID" BIGINT NOT NULL, "PARAM_KEY" 
VARCHAR(256) NOT NULL, "PARAM_VALUE" VARCHAR(4000));
+
+CREATE TABLE "APP"."PARTITIONS" ("PART_ID" BIGINT NOT NULL, "CREATE_TIME" 
INTEGER NOT NULL, "LAST_ACCESS_TIME" INTEGER NOT NULL, "PART_NAME" 
VARCHAR(767), "SD_ID" BIGINT, "TBL_ID" BIGINT);
+
+CREATE TABLE "APP"."SERDES" ("SERDE_ID" BIGINT NOT NULL, "NAME" VARCHAR(128), 
"SLIB" VARCHAR(4000));
+
+CREATE TABLE "APP"."PART_PRIVS" ("PART_GRANT_ID" BIGINT NOT NULL, 
"CREATE_TIME" INTEGER NOT NULL, "GRANT_OPTION" SMALLINT NOT NULL, "GRANTOR" 
VARCHAR(128), "GRANTOR_TYPE" VARCHAR(128), "PART_ID" BIGINT, "PRINCIPAL_NAME" 
VARCHAR(128), "PRINCIPAL_TYPE" VARCHAR(128), "PART_PRIV" VARCHAR(128));
+
+CREATE TABLE "APP"."ROLE_MAP" ("ROLE_GRANT_ID" BIGINT NOT NULL, "ADD_TIME" 
INTEGER NOT NULL, "GRANT_OPTION" SMALLINT NOT NULL, "GRANTOR" VARCHAR(128), 
"GRANTOR_TYPE" VARCHAR(128), "PRINCIPAL_NAME" VARCHAR(128), "PRINCIPAL_TYPE" 
VARCHAR(128), "ROLE_ID" BIGINT);
+
+CREATE TABLE "APP"."TYPES" ("TYPES_ID" BIGINT NOT NULL, "TYPE_NAME" 
VARCHAR(128), "TYPE1" VARCHAR(767), "TYPE2" VARCHAR(767));
+
+CREATE TABLE "APP"."GLOBAL_PRIVS" ("USER_GRANT_ID" BIGINT NOT NULL, 
"CREATE_TIME" INTEGER NOT NULL, "GRANT_OPTION" SMALLINT NOT NULL, "GRANTOR" 
VARCHAR(128), 

[22/50] [abbrv] hive git commit: HIVE-17982 Move metastore specific itests

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/83cfbaf0/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java
deleted file mode 100644
index dfd80bc..000
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-import junit.framework.Assert;
-import junit.framework.TestCase;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.io.HiveInputFormat;
-import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
-
-/**
- * TestRetryingHMSHandler. Test case for
- * {@link org.apache.hadoop.hive.metastore.RetryingHMSHandler}
- */
-public class TestRetryingHMSHandler extends TestCase {
-  private HiveConf hiveConf;
-  private HiveMetaStoreClient msc;
-
-  @Override
-  protected void setUp() throws Exception {
-
-super.setUp();
-System.setProperty("hive.metastore.pre.event.listeners",
-AlternateFailurePreListener.class.getName());
-int port = MetaStoreTestUtils.startMetaStoreWithRetry();
-hiveConf = new HiveConf(this.getClass());
-hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + 
port);
-hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
-hiveConf.setIntVar(HiveConf.ConfVars.HMSHANDLERATTEMPTS, 2);
-hiveConf.setTimeVar(HiveConf.ConfVars.HMSHANDLERINTERVAL, 0, 
TimeUnit.MILLISECONDS);
-hiveConf.setBoolVar(HiveConf.ConfVars.HMSHANDLERFORCERELOADCONF, false);
-msc = new HiveMetaStoreClient(hiveConf);
-  }
-
-  @Override
-  protected void tearDown() throws Exception {
-super.tearDown();
-  }
-
-  // Create a database and a table in that database.  Because the 
AlternateFailurePreListener is
-  // being used each attempt to create something should require two calls by 
the RetryingHMSHandler
-  public void testRetryingHMSHandler() throws Exception {
-String dbName = "hive4159";
-String tblName = "tmptbl";
-
-Database db = new Database();
-db.setName(dbName);
-msc.createDatabase(db);
-
-Assert.assertEquals(2, AlternateFailurePreListener.getCallCount());
-
-ArrayList cols = new ArrayList(2);
-cols.add(new FieldSchema("c1", serdeConstants.STRING_TYPE_NAME, ""));
-cols.add(new FieldSchema("c2", serdeConstants.INT_TYPE_NAME, ""));
-
-Map params = new HashMap();
-params.put("test_param_1", "Use this for comments etc");
-
-Map serdParams = new HashMap();
-serdParams.put(serdeConstants.SERIALIZATION_FORMAT, "1");
-
-StorageDescriptor sd = new StorageDescriptor();
-
-sd.setCols(cols);
-sd.setCompressed(false);
-sd.setNumBuckets(1);
-sd.setParameters(params);
-sd.setBucketCols(new ArrayList(2));
-sd.getBucketCols().add("name");
-sd.setSerdeInfo(new SerDeInfo());
-sd.getSerdeInfo().setName(tblName);
-sd.getSerdeInfo().setParameters(serdParams);
-sd.getSerdeInfo().getParameters()
-.put(serdeConstants.SERIALIZATION_FORMAT, "1");
-sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName());
-

[28/50] [abbrv] hive git commit: HIVE-17983 Make the standalone metastore generate tarballs etc.

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/b99b8c7a/standalone-metastore/src/main/sql/postgres/hive-schema-2.3.0.postgres.sql
--
diff --git 
a/standalone-metastore/src/main/sql/postgres/hive-schema-2.3.0.postgres.sql 
b/standalone-metastore/src/main/sql/postgres/hive-schema-2.3.0.postgres.sql
new file mode 100644
index 000..0dca1a0
--- /dev/null
+++ b/standalone-metastore/src/main/sql/postgres/hive-schema-2.3.0.postgres.sql
@@ -0,0 +1,1593 @@
+--
+-- PostgreSQL database dump
+--
+
+SET statement_timeout = 0;
+SET client_encoding = 'UTF8';
+SET standard_conforming_strings = off;
+SET check_function_bodies = false;
+SET client_min_messages = warning;
+SET escape_string_warning = off;
+
+SET search_path = public, pg_catalog;
+
+SET default_tablespace = '';
+
+SET default_with_oids = false;
+
+--
+-- Name: BUCKETING_COLS; Type: TABLE; Schema: public; Owner: hiveuser; 
Tablespace:
+--
+
+CREATE TABLE "BUCKETING_COLS" (
+"SD_ID" bigint NOT NULL,
+"BUCKET_COL_NAME" character varying(256) DEFAULT NULL::character varying,
+"INTEGER_IDX" bigint NOT NULL
+);
+
+
+--
+-- Name: CDS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "CDS" (
+"CD_ID" bigint NOT NULL
+);
+
+
+--
+-- Name: COLUMNS_V2; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "COLUMNS_V2" (
+"CD_ID" bigint NOT NULL,
+"COMMENT" character varying(4000),
+"COLUMN_NAME" character varying(767) NOT NULL,
+"TYPE_NAME" text,
+"INTEGER_IDX" integer NOT NULL
+);
+
+
+--
+-- Name: DATABASE_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; 
Tablespace:
+--
+
+CREATE TABLE "DATABASE_PARAMS" (
+"DB_ID" bigint NOT NULL,
+"PARAM_KEY" character varying(180) NOT NULL,
+"PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: DBS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "DBS" (
+"DB_ID" bigint NOT NULL,
+"DESC" character varying(4000) DEFAULT NULL::character varying,
+"DB_LOCATION_URI" character varying(4000) NOT NULL,
+"NAME" character varying(128) DEFAULT NULL::character varying,
+"OWNER_NAME" character varying(128) DEFAULT NULL::character varying,
+"OWNER_TYPE" character varying(10) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: DB_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "DB_PRIVS" (
+"DB_GRANT_ID" bigint NOT NULL,
+"CREATE_TIME" bigint NOT NULL,
+"DB_ID" bigint,
+"GRANT_OPTION" smallint NOT NULL,
+"GRANTOR" character varying(128) DEFAULT NULL::character varying,
+"GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
+"PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
+"PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
+"DB_PRIV" character varying(128) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: GLOBAL_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; 
Tablespace:
+--
+
+CREATE TABLE "GLOBAL_PRIVS" (
+"USER_GRANT_ID" bigint NOT NULL,
+"CREATE_TIME" bigint NOT NULL,
+"GRANT_OPTION" smallint NOT NULL,
+"GRANTOR" character varying(128) DEFAULT NULL::character varying,
+"GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
+"PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
+"PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
+"USER_PRIV" character varying(128) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: IDXS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "IDXS" (
+"INDEX_ID" bigint NOT NULL,
+"CREATE_TIME" bigint NOT NULL,
+"DEFERRED_REBUILD" boolean NOT NULL,
+"INDEX_HANDLER_CLASS" character varying(4000) DEFAULT NULL::character 
varying,
+"INDEX_NAME" character varying(128) DEFAULT NULL::character varying,
+"INDEX_TBL_ID" bigint,
+"LAST_ACCESS_TIME" bigint NOT NULL,
+"ORIG_TBL_ID" bigint,
+"SD_ID" bigint
+);
+
+
+--
+-- Name: INDEX_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; 
Tablespace:
+--
+
+CREATE TABLE "INDEX_PARAMS" (
+"INDEX_ID" bigint NOT NULL,
+"PARAM_KEY" character varying(256) NOT NULL,
+"PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: NUCLEUS_TABLES; Type: TABLE; Schema: public; Owner: hiveuser; 
Tablespace:
+--
+
+CREATE TABLE "NUCLEUS_TABLES" (
+"CLASS_NAME" character varying(128) NOT NULL,
+"TABLE_NAME" character varying(128) NOT NULL,
+"TYPE" character varying(4) NOT NULL,
+"OWNER" character varying(2) NOT NULL,
+"VERSION" character varying(20) NOT NULL,
+"INTERFACE_NAME" character varying(255) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: PARTITIONS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "PARTITIONS" (
+

[13/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
deleted file mode 100644
index 22e246f..000
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
+++ /dev/null
@@ -1,487 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore.tools;
-
-import java.net.URI;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.ObjectStore;
-
-/**
- * This class provides Hive admins a tool to
- * - execute JDOQL against the metastore using DataNucleus
- * - perform HA name node upgrade
- */
-
-public class HiveMetaTool {
-
-  private static final Logger LOG = 
LoggerFactory.getLogger(HiveMetaTool.class.getName());
-  private final Options cmdLineOptions = new Options();
-  private ObjectStore objStore;
-  private boolean isObjStoreInitialized;
-
-  public HiveMetaTool() {
-this.isObjStoreInitialized = false;
-  }
-
-  @SuppressWarnings("static-access")
-  private void init() {
-
-System.out.println("Initializing HiveMetaTool..");
-
-Option help = new Option("help", "print this message");
-Option listFSRoot = new Option("listFSRoot", "print the current FS root 
locations");
-Option executeJDOQL =
-OptionBuilder.withArgName("query-string")
-.hasArgs()
-.withDescription("execute the given JDOQL query")
-.create("executeJDOQL");
-
-/* Ideally we want to specify the different arguments to updateLocation as 
separate argNames.
- * However if we did that, HelpFormatter swallows all but the last 
argument. Note that this is
- * a know issue with the HelpFormatter class that has not been fixed. We 
specify all arguments
- * with a single argName to workaround this HelpFormatter bug.
- */
-Option updateFSRootLoc =
-OptionBuilder
-.withArgName("new-loc> " + "

[44/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ISchema.java
--
diff --git 
a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ISchema.java
 
b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ISchema.java
new file mode 100644
index 000..92d8b52
--- /dev/null
+++ 
b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ISchema.java
@@ -0,0 +1,1162 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hadoop.hive.metastore.api;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
+@org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public class 
ISchema implements org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("ISchema");
+
+  private static final org.apache.thrift.protocol.TField 
SCHEMA_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("schemaType", 
org.apache.thrift.protocol.TType.I32, (short)1);
+  private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new 
org.apache.thrift.protocol.TField("name", 
org.apache.thrift.protocol.TType.STRING, (short)2);
+  private static final org.apache.thrift.protocol.TField DB_NAME_FIELD_DESC = 
new org.apache.thrift.protocol.TField("dbName", 
org.apache.thrift.protocol.TType.STRING, (short)3);
+  private static final org.apache.thrift.protocol.TField 
COMPATIBILITY_FIELD_DESC = new 
org.apache.thrift.protocol.TField("compatibility", 
org.apache.thrift.protocol.TType.I32, (short)4);
+  private static final org.apache.thrift.protocol.TField 
VALIDATION_LEVEL_FIELD_DESC = new 
org.apache.thrift.protocol.TField("validationLevel", 
org.apache.thrift.protocol.TType.I32, (short)5);
+  private static final org.apache.thrift.protocol.TField CAN_EVOLVE_FIELD_DESC 
= new org.apache.thrift.protocol.TField("canEvolve", 
org.apache.thrift.protocol.TType.BOOL, (short)6);
+  private static final org.apache.thrift.protocol.TField 
SCHEMA_GROUP_FIELD_DESC = new org.apache.thrift.protocol.TField("schemaGroup", 
org.apache.thrift.protocol.TType.STRING, (short)7);
+  private static final org.apache.thrift.protocol.TField 
DESCRIPTION_FIELD_DESC = new org.apache.thrift.protocol.TField("description", 
org.apache.thrift.protocol.TType.STRING, (short)8);
+
+  private static final Map schemes = 
new HashMap();
+  static {
+schemes.put(StandardScheme.class, new ISchemaStandardSchemeFactory());
+schemes.put(TupleScheme.class, new ISchemaTupleSchemeFactory());
+  }
+
+  private SchemaType schemaType; // required
+  private String name; // required
+  private String dbName; // required
+  private SchemaCompatibility compatibility; // required
+  private SchemaValidation validationLevel; // required
+  private boolean canEvolve; // required
+  private String schemaGroup; // optional
+  private String description; // optional
+
+  /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+/**
+ * 
+ * @see SchemaType
+ */
+SCHEMA_TYPE((short)1, "schemaType"),
+NAME((short)2, "name"),
+DB_NAME((short)3, "dbName"),
+/**
+ * 
+ * @see SchemaCompatibility
+ */
+COMPATIBILITY((short)4, "compatibility"),
+/**
+ * 
+ * @see SchemaValidation
+ */
+VALIDATION_LEVEL((short)5, "validationLevel"),
+CAN_EVOLVE((short)6, "canEvolve"),
+SCHEMA_GROUP((short)7, "schemaGroup"),

[20/50] [abbrv] hive git commit: HIVE-17982 Move metastore specific itests

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/83cfbaf0/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
new file mode 100644
index 000..d4cedb0
--- /dev/null
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
@@ -0,0 +1,264 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.ValidTxnList;
+import org.apache.hadoop.hive.common.ValidReadTxnList;
+import org.apache.hadoop.hive.metastore.api.DataOperationType;
+import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse;
+import org.apache.hadoop.hive.metastore.api.LockResponse;
+import org.apache.hadoop.hive.metastore.api.LockState;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.List;
+
+/**
+ * Unit tests for {@link 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient}.  For now this just has
+ * transaction and locking tests.  The goal here is not to test all
+ * functionality possible through the interface, as all permutations of DB
+ * operations should be tested in the appropriate DB handler classes.  The
+ * goal is to test that we can properly pass the messages through the thrift
+ * service.
+ *
+ * This is in the ql directory rather than the metastore directory because it
+ * required the hive-exec jar, and hive-exec jar already depends on
+ * hive-metastore jar, thus I can't make hive-metastore depend on hive-exec.
+ */
+public class TestHiveMetaStoreTxns {
+
+  private final Configuration conf = MetastoreConf.newMetastoreConf();
+  private IMetaStoreClient client;
+
+  @Test
+  public void testTxns() throws Exception {
+List tids = client.openTxns("me", 3).getTxn_ids();
+Assert.assertEquals(1L, (long) tids.get(0));
+Assert.assertEquals(2L, (long) tids.get(1));
+Assert.assertEquals(3L, (long) tids.get(2));
+client.rollbackTxn(1);
+client.commitTxn(2);
+ValidTxnList validTxns = client.getValidTxns();
+Assert.assertFalse(validTxns.isTxnValid(1));
+Assert.assertTrue(validTxns.isTxnValid(2));
+Assert.assertFalse(validTxns.isTxnValid(3));
+Assert.assertFalse(validTxns.isTxnValid(4));
+  }
+
+  @Test
+  public void testOpenTxnNotExcluded() throws Exception {
+List tids = client.openTxns("me", 3).getTxn_ids();
+Assert.assertEquals(1L, (long) tids.get(0));
+Assert.assertEquals(2L, (long) tids.get(1));
+Assert.assertEquals(3L, (long) tids.get(2));
+client.rollbackTxn(1);
+client.commitTxn(2);
+ValidTxnList validTxns = client.getValidTxns(3);
+Assert.assertFalse(validTxns.isTxnValid(1));
+Assert.assertTrue(validTxns.isTxnValid(2));
+Assert.assertTrue(validTxns.isTxnValid(3));
+Assert.assertFalse(validTxns.isTxnValid(4));
+  }
+
+  @Test
+  public void testTxnRange() throws Exception {
+ValidTxnList validTxns = client.getValidTxns();
+Assert.assertEquals(ValidTxnList.RangeResponse.NONE,
+validTxns.isTxnRangeValid(1L, 3L));
+List tids = client.openTxns("me", 5).getTxn_ids();
+
+HeartbeatTxnRangeResponse rsp = client.heartbeatTxnRange(1, 5);
+Assert.assertEquals(0, rsp.getNosuch().size());
+Assert.assertEquals(0, rsp.getAborted().size());
+
+client.rollbackTxn(1L);
+client.commitTxn(2L);
+client.commitTxn(3L);
+client.commitTxn(4L);
+validTxns = client.getValidTxns();
+System.out.println("validTxns = " + validTxns);
+Assert.assertEquals(ValidTxnList.RangeResponse.ALL,
+validTxns.isTxnRangeValid(2L, 2L));
+Assert.assertEquals(ValidTxnList.RangeResponse.ALL,
+validTxns.isTxnRangeValid(2L, 3L));
+

[21/50] [abbrv] hive git commit: HIVE-17982 Move metastore specific itests

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/83cfbaf0/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
new file mode 100644
index 000..2599ab1
--- /dev/null
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -0,0 +1,3071 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.lang.reflect.Field;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
+import org.apache.hadoop.hive.metastore.client.builder.TableBuilder;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.apache.hadoop.hive.metastore.utils.FileUtils;
+import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
+import org.datanucleus.api.jdo.JDOPersistenceManager;
+import org.datanucleus.api.jdo.JDOPersistenceManagerFactory;
+import org.junit.Assert;
+import org.junit.Before;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.metastore.api.AggrStats;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Function;
+import org.apache.hadoop.hive.metastore.api.FunctionType;
+import org.apache.hadoop.hive.metastore.api.GetAllFunctionsResponse;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.ResourceType;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.Type;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.thrift.TException;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static 

[38/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
--
diff --git 
a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py 
b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
index 863031d..87df6d0 100644
--- a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
+++ b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
@@ -211,6 +211,100 @@ class EventRequestType:
 "DELETE": 3,
   }
 
+class SerdeType:
+  HIVE = 1
+  SCHEMA_REGISTRY = 2
+
+  _VALUES_TO_NAMES = {
+1: "HIVE",
+2: "SCHEMA_REGISTRY",
+  }
+
+  _NAMES_TO_VALUES = {
+"HIVE": 1,
+"SCHEMA_REGISTRY": 2,
+  }
+
+class SchemaType:
+  HIVE = 1
+  AVRO = 2
+
+  _VALUES_TO_NAMES = {
+1: "HIVE",
+2: "AVRO",
+  }
+
+  _NAMES_TO_VALUES = {
+"HIVE": 1,
+"AVRO": 2,
+  }
+
+class SchemaCompatibility:
+  NONE = 1
+  BACKWARD = 2
+  FORWARD = 3
+  BOTH = 4
+
+  _VALUES_TO_NAMES = {
+1: "NONE",
+2: "BACKWARD",
+3: "FORWARD",
+4: "BOTH",
+  }
+
+  _NAMES_TO_VALUES = {
+"NONE": 1,
+"BACKWARD": 2,
+"FORWARD": 3,
+"BOTH": 4,
+  }
+
+class SchemaValidation:
+  LATEST = 1
+  ALL = 2
+
+  _VALUES_TO_NAMES = {
+1: "LATEST",
+2: "ALL",
+  }
+
+  _NAMES_TO_VALUES = {
+"LATEST": 1,
+"ALL": 2,
+  }
+
+class SchemaVersionState:
+  INITIATED = 1
+  START_REVIEW = 2
+  CHANGES_REQUIRED = 3
+  REVIEWED = 4
+  ENABLED = 5
+  DISABLED = 6
+  ARCHIVED = 7
+  DELETED = 8
+
+  _VALUES_TO_NAMES = {
+1: "INITIATED",
+2: "START_REVIEW",
+3: "CHANGES_REQUIRED",
+4: "REVIEWED",
+5: "ENABLED",
+6: "DISABLED",
+7: "ARCHIVED",
+8: "DELETED",
+  }
+
+  _NAMES_TO_VALUES = {
+"INITIATED": 1,
+"START_REVIEW": 2,
+"CHANGES_REQUIRED": 3,
+"REVIEWED": 4,
+"ENABLED": 5,
+"DISABLED": 6,
+"ARCHIVED": 7,
+"DELETED": 8,
+  }
+
 class FunctionType:
   JAVA = 1
 
@@ -2883,6 +2977,10 @@ class SerDeInfo:
- name
- serializationLib
- parameters
+   - description
+   - serializerClass
+   - deserializerClass
+   - serdeType
   """
 
   thrift_spec = (
@@ -2890,12 +2988,20 @@ class SerDeInfo:
 (1, TType.STRING, 'name', None, None, ), # 1
 (2, TType.STRING, 'serializationLib', None, None, ), # 2
 (3, TType.MAP, 'parameters', (TType.STRING,None,TType.STRING,None), None, 
), # 3
+(4, TType.STRING, 'description', None, None, ), # 4
+(5, TType.STRING, 'serializerClass', None, None, ), # 5
+(6, TType.STRING, 'deserializerClass', None, None, ), # 6
+(7, TType.I32, 'serdeType', None, None, ), # 7
   )
 
-  def __init__(self, name=None, serializationLib=None, parameters=None,):
+  def __init__(self, name=None, serializationLib=None, parameters=None, 
description=None, serializerClass=None, deserializerClass=None, 
serdeType=None,):
 self.name = name
 self.serializationLib = serializationLib
 self.parameters = parameters
+self.description = description
+self.serializerClass = serializerClass
+self.deserializerClass = deserializerClass
+self.serdeType = serdeType
 
   def read(self, iprot):
 if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and 
isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is 
not None and fastbinary is not None:
@@ -2927,6 +3033,26 @@ class SerDeInfo:
   iprot.readMapEnd()
 else:
   iprot.skip(ftype)
+  elif fid == 4:
+if ftype == TType.STRING:
+  self.description = iprot.readString()
+else:
+  iprot.skip(ftype)
+  elif fid == 5:
+if ftype == TType.STRING:
+  self.serializerClass = iprot.readString()
+else:
+  iprot.skip(ftype)
+  elif fid == 6:
+if ftype == TType.STRING:
+  self.deserializerClass = iprot.readString()
+else:
+  iprot.skip(ftype)
+  elif fid == 7:
+if ftype == TType.I32:
+  self.serdeType = iprot.readI32()
+else:
+  iprot.skip(ftype)
   else:
 iprot.skip(ftype)
   iprot.readFieldEnd()
@@ -2953,6 +3079,22 @@ class SerDeInfo:
 oprot.writeString(viter100)
   oprot.writeMapEnd()
   oprot.writeFieldEnd()
+if self.description is not None:
+  oprot.writeFieldBegin('description', TType.STRING, 4)
+  oprot.writeString(self.description)
+  oprot.writeFieldEnd()
+if self.serializerClass is not None:
+  oprot.writeFieldBegin('serializerClass', TType.STRING, 5)
+  oprot.writeString(self.serializerClass)
+  oprot.writeFieldEnd()
+if self.deserializerClass is not None:
+  oprot.writeFieldBegin('deserializerClass', TType.STRING, 6)
+  oprot.writeString(self.deserializerClass)
+  oprot.writeFieldEnd()
+if self.serdeType is not None:
+  oprot.writeFieldBegin('serdeType', TType.I32, 7)
+

[36/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/PreAlterISchemaEvent.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/PreAlterISchemaEvent.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/PreAlterISchemaEvent.java
new file mode 100644
index 000..3df3780
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/PreAlterISchemaEvent.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.events;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hive.metastore.IHMSHandler;
+import org.apache.hadoop.hive.metastore.api.ISchema;
+
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class PreAlterISchemaEvent extends PreEventContext {
+
+  private final ISchema oldSchema, newSchema;
+
+  public PreAlterISchemaEvent(IHMSHandler handler, ISchema oldSchema, ISchema 
newSchema) {
+super(PreEventType.ALTER_ISCHEMA, handler);
+this.oldSchema = oldSchema;
+this.newSchema = newSchema;
+  }
+
+  public ISchema getOldSchema() {
+return oldSchema;
+  }
+
+  public ISchema getNewSchema() {
+return newSchema;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/PreAlterSchemaVersionEvent.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/PreAlterSchemaVersionEvent.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/PreAlterSchemaVersionEvent.java
new file mode 100644
index 000..63ddb3b
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/PreAlterSchemaVersionEvent.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.events;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hive.metastore.IHMSHandler;
+import org.apache.hadoop.hive.metastore.api.SchemaVersion;
+
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class PreAlterSchemaVersionEvent extends PreEventContext {
+
+  private final SchemaVersion oldSchemaVersion, newSchemaVersion;
+
+  public PreAlterSchemaVersionEvent(IHMSHandler handler, SchemaVersion 
oldSchemaVersion,
+SchemaVersion newSchemaVersion) {
+super(PreEventType.ALTER_SCHEMA_VERSION, handler);
+this.oldSchemaVersion = oldSchemaVersion;
+this.newSchemaVersion = newSchemaVersion;
+  }
+
+  public SchemaVersion getOldSchemaVersion() {
+return oldSchemaVersion;
+  }
+
+  public SchemaVersion getNewSchemaVersion() {
+return newSchemaVersion;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/PreCreateISchemaEvent.java
--
diff --git 

[09/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
new file mode 100644
index 000..573ac01
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -0,0 +1,1825 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+
+import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.ValidTxnList;
+import org.apache.hadoop.hive.common.classification.RetrySemantics;
+import org.apache.hadoop.hive.metastore.annotation.NoReconnect;
+import org.apache.hadoop.hive.metastore.api.AggrStats;
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.CmRecycleRequest;
+import org.apache.hadoop.hive.metastore.api.CmRecycleResponse;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.CompactionResponse;
+import org.apache.hadoop.hive.metastore.api.CompactionType;
+import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
+import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
+import org.apache.hadoop.hive.metastore.api.DataOperationType;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FireEventRequest;
+import org.apache.hadoop.hive.metastore.api.FireEventResponse;
+import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest;
+import org.apache.hadoop.hive.metastore.api.Function;
+import org.apache.hadoop.hive.metastore.api.GetAllFunctionsResponse;
+import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse;
+import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest;
+import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleResponse;
+import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest;
+import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse;
+import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.InvalidInputException;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
+import org.apache.hadoop.hive.metastore.api.LockRequest;
+import org.apache.hadoop.hive.metastore.api.LockResponse;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.MetadataPpdResult;
+import org.apache.hadoop.hive.metastore.api.NoSuchLockException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
+import org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest;
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
+import org.apache.hadoop.hive.metastore.api.NotificationEventsCountResponse;
+import org.apache.hadoop.hive.metastore.api.NotificationEventsCountRequest;
+import 

[48/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
--
diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h 
b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
index dc9540d..7c8448c 100644
--- a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
+++ b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
@@ -201,6 +201,20 @@ class ThriftHiveMetastoreIf : virtual public  
::facebook::fb303::FacebookService
   virtual void create_or_update_wm_mapping(WMCreateOrUpdateMappingResponse& 
_return, const WMCreateOrUpdateMappingRequest& request) = 0;
   virtual void drop_wm_mapping(WMDropMappingResponse& _return, const 
WMDropMappingRequest& request) = 0;
   virtual void 
create_or_drop_wm_trigger_to_pool_mapping(WMCreateOrDropTriggerToPoolMappingResponse&
 _return, const WMCreateOrDropTriggerToPoolMappingRequest& request) = 0;
+  virtual void create_ischema(const ISchema& schema) = 0;
+  virtual void alter_ischema(const std::string& schemaName, const ISchema& 
newSchema) = 0;
+  virtual void get_ischema(ISchema& _return, const std::string& schemaName) = 
0;
+  virtual void drop_ischema(const std::string& schemaName) = 0;
+  virtual void add_schema_version(const SchemaVersion& schemaVersion) = 0;
+  virtual void get_schema_version(SchemaVersion& _return, const std::string& 
schemaName, const int32_t version) = 0;
+  virtual void get_schema_latest_version(SchemaVersion& _return, const 
std::string& schemaName) = 0;
+  virtual void get_schema_all_versions(std::vector & _return, 
const std::string& schemaName) = 0;
+  virtual void drop_schema_version(const std::string& schemaName, const 
int32_t version) = 0;
+  virtual void get_schemas_by_cols(FindSchemasByColsResp& _return, const 
FindSchemasByColsRqst& rqst) = 0;
+  virtual void map_schema_version_to_serde(const std::string& schemaName, 
const int32_t version, const std::string& serdeName) = 0;
+  virtual void set_schema_version_state(const std::string& schemaName, const 
int32_t version, const SchemaVersionState::type state) = 0;
+  virtual void add_serde(const SerDeInfo& serde) = 0;
+  virtual void get_serde(SerDeInfo& _return, const std::string& serdeName) = 0;
 };
 
 class ThriftHiveMetastoreIfFactory : virtual public  
::facebook::fb303::FacebookServiceIfFactory {
@@ -795,6 +809,48 @@ class ThriftHiveMetastoreNull : virtual public 
ThriftHiveMetastoreIf , virtual p
   void 
create_or_drop_wm_trigger_to_pool_mapping(WMCreateOrDropTriggerToPoolMappingResponse&
 /* _return */, const WMCreateOrDropTriggerToPoolMappingRequest& /* request */) 
{
 return;
   }
+  void create_ischema(const ISchema& /* schema */) {
+return;
+  }
+  void alter_ischema(const std::string& /* schemaName */, const ISchema& /* 
newSchema */) {
+return;
+  }
+  void get_ischema(ISchema& /* _return */, const std::string& /* schemaName 
*/) {
+return;
+  }
+  void drop_ischema(const std::string& /* schemaName */) {
+return;
+  }
+  void add_schema_version(const SchemaVersion& /* schemaVersion */) {
+return;
+  }
+  void get_schema_version(SchemaVersion& /* _return */, const std::string& /* 
schemaName */, const int32_t /* version */) {
+return;
+  }
+  void get_schema_latest_version(SchemaVersion& /* _return */, const 
std::string& /* schemaName */) {
+return;
+  }
+  void get_schema_all_versions(std::vector & /* _return */, 
const std::string& /* schemaName */) {
+return;
+  }
+  void drop_schema_version(const std::string& /* schemaName */, const int32_t 
/* version */) {
+return;
+  }
+  void get_schemas_by_cols(FindSchemasByColsResp& /* _return */, const 
FindSchemasByColsRqst& /* rqst */) {
+return;
+  }
+  void map_schema_version_to_serde(const std::string& /* schemaName */, const 
int32_t /* version */, const std::string& /* serdeName */) {
+return;
+  }
+  void set_schema_version_state(const std::string& /* schemaName */, const 
int32_t /* version */, const SchemaVersionState::type /* state */) {
+return;
+  }
+  void add_serde(const SerDeInfo& /* serde */) {
+return;
+  }
+  void get_serde(SerDeInfo& /* _return */, const std::string& /* serdeName */) 
{
+return;
+  }
 };
 
 typedef struct _ThriftHiveMetastore_getMetaConf_args__isset {
@@ -22905,228 +22961,1917 @@ class 
ThriftHiveMetastore_create_or_drop_wm_trigger_to_pool_mapping_presult {
 
 };
 
-class ThriftHiveMetastoreClient : virtual public ThriftHiveMetastoreIf, public 
 ::facebook::fb303::FacebookServiceClient {
+typedef struct _ThriftHiveMetastore_create_ischema_args__isset {
+  _ThriftHiveMetastore_create_ischema_args__isset() : schema(false) {}
+  bool schema :1;
+} _ThriftHiveMetastore_create_ischema_args__isset;
+
+class ThriftHiveMetastore_create_ischema_args {
  public:
-  ThriftHiveMetastoreClient(boost::shared_ptr< 

[05/50] [abbrv] hive git commit: HIVE-15939: Make cast expressions comply more to sql2011 (Zoltan Haindrich, reviewed by Ashutosh Chauhan)

2017-12-07 Thread gates
HIVE-15939: Make cast expressions comply more to sql2011 (Zoltan Haindrich, 
reviewed by Ashutosh Chauhan)

Signed-off-by: Zoltan Haindrich 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/95dadac9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/95dadac9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/95dadac9

Branch: refs/heads/standalone-metastore
Commit: 95dadac9fa81702ce3af0d8759bee5082f2f2013
Parents: c2fc0fb
Author: Zoltan Haindrich 
Authored: Wed Dec 6 09:36:54 2017 +0100
Committer: Zoltan Haindrich 
Committed: Wed Dec 6 09:47:55 2017 +0100

--
 .../ql/exec/vector/VectorizationContext.java|  56 +--
 .../vector/expressions/CastStringToBoolean.java |  49 ++
 .../vector/expressions/FuncStringToLong.java| 145 +
 .../vector/expressions/VectorExpression.java|   7 -
 .../apache/hadoop/hive/ql/udf/UDFToBoolean.java |  11 +-
 .../expressions/TestVectorMathFunctions.java|  24 +++
 .../vector/expressions/TestVectorTypeCasts.java |  17 ++
 .../queries/clientpositive/udf_to_boolean.q |   6 +
 .../vector_udf_string_to_boolean.q  |  23 +++
 .../clientpositive/llap/vectorized_casts.q.out  |   4 +-
 .../results/clientpositive/udf_to_boolean.q.out |  54 +++
 .../clientpositive/vector_empty_where.q.out |   2 +-
 .../vector_udf_string_to_boolean.q.out  | 156 +++
 .../clientpositive/vectorized_casts.q.out   |   4 +-
 .../PrimitiveObjectInspectorUtils.java  |  59 ++-
 .../TestPrimitiveObjectInspectorUtils.java  |   6 +
 16 files changed, 549 insertions(+), 74 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/95dadac9/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 5c7d7ee..0ad6816 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -52,47 +52,11 @@ import 
org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type;
 import 
org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
 import 
org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.InputExpressionType;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.*;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFBloomFilter;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFBloomFilterMerge;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFCount;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFCountMerge;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFCountStar;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFSumDecimal;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFSumDecimal64ToDecimal;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFSumTimestamp;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgDecimal;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgDecimalComplete;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgDecimalFinal;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgDecimalPartial2;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgDouble;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgDoubleComplete;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgFinal;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgLong;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgLongComplete;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgPartial2;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgTimestamp;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgTimestampComplete;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxDecimal;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxDouble;
-import 

[50/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
HIVE-17990 Add Thrift and DB storage for Schema Registry objects


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bd212257
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bd212257
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bd212257

Branch: refs/heads/standalone-metastore
Commit: bd212257f2c8f5472894e22501b17a56fd86318c
Parents: b99b8c7
Author: Alan Gates 
Authored: Thu Oct 19 16:49:38 2017 -0700
Committer: Alan Gates 
Committed: Wed Dec 6 18:15:46 2017 -0800

--
 .../listener/DummyRawStoreFailEvent.java|73 +
 standalone-metastore/pom.xml| 3 +-
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  | 28222 ++---
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h|  2427 +-
 .../ThriftHiveMetastore_server.skeleton.cpp |70 +
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |  5044 +--
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |   465 +-
 .../metastore/api/FindSchemasByColsResp.java|   449 +
 .../api/FindSchemasByColsRespEntry.java |   497 +
 .../metastore/api/FindSchemasByColsRqst.java|   605 +
 .../hadoop/hive/metastore/api/ISchema.java  |  1162 +
 .../hive/metastore/api/SchemaCompatibility.java |51 +
 .../hadoop/hive/metastore/api/SchemaType.java   |45 +
 .../hive/metastore/api/SchemaValidation.java|45 +
 .../hive/metastore/api/SchemaVersion.java   |  1407 +
 .../hive/metastore/api/SchemaVersionState.java  |63 +
 .../hadoop/hive/metastore/api/SerDeInfo.java|   443 +-
 .../hadoop/hive/metastore/api/SerdeType.java|45 +
 .../hive/metastore/api/ThriftHiveMetastore.java | 19230 +--
 .../gen-php/metastore/ThriftHiveMetastore.php   | 27808 +---
 .../src/gen/thrift/gen-php/metastore/Types.php  |  1026 +
 .../hive_metastore/ThriftHiveMetastore-remote   |98 +
 .../hive_metastore/ThriftHiveMetastore.py   |  5280 ++-
 .../gen/thrift/gen-py/hive_metastore/ttypes.py  |   739 +-
 .../gen/thrift/gen-rb/hive_metastore_types.rb   |   186 +-
 .../gen/thrift/gen-rb/thrift_hive_metastore.rb  |   932 +
 .../hadoop/hive/metastore/HiveMetaStore.java|   476 +-
 .../hive/metastore/HiveMetaStoreClient.java |74 +-
 .../hadoop/hive/metastore/IMetaStoreClient.java |   158 +
 .../hive/metastore/MetaStoreEventListener.java  |26 +
 .../metastore/MetaStoreListenerNotifier.java|42 +
 .../hadoop/hive/metastore/ObjectStore.java  |   408 +-
 .../apache/hadoop/hive/metastore/RawStore.java  |   135 +
 .../hive/metastore/cache/CachedStore.java   |75 +
 .../client/builder/DatabaseBuilder.java | 2 +-
 .../client/builder/ISchemaBuilder.java  |93 +
 .../client/builder/SchemaVersionBuilder.java|90 +
 .../client/builder/SerdeAndColsBuilder.java |   124 +
 .../builder/StorageDescriptorBuilder.java   |57 +-
 .../metastore/events/AddSchemaVersionEvent.java |40 +
 .../metastore/events/AlterISchemaEvent.java |45 +
 .../events/AlterSchemaVersionEvent.java |46 +
 .../metastore/events/CreateISchemaEvent.java|39 +
 .../hive/metastore/events/DropISchemaEvent.java |39 +
 .../events/DropSchemaVersionEvent.java  |40 +
 .../events/PreAddSchemaVersionEvent.java|39 +
 .../metastore/events/PreAlterISchemaEvent.java  |44 +
 .../events/PreAlterSchemaVersionEvent.java  |45 +
 .../metastore/events/PreCreateISchemaEvent.java |39 +
 .../metastore/events/PreDropISchemaEvent.java   |39 +
 .../events/PreDropSchemaVersionEvent.java   |39 +
 .../hive/metastore/events/PreEventContext.java  |10 +-
 .../metastore/events/PreReadISchemaEvent.java   |39 +
 .../events/PreReadhSchemaVersionEvent.java  |36 +
 .../hive/metastore/messaging/EventMessage.java  | 8 +-
 .../metastore/messaging/MessageFactory.java | 7 +
 .../hadoop/hive/metastore/model/MISchema.java   |   107 +
 .../hive/metastore/model/MSchemaVersion.java|   127 +
 .../hadoop/hive/metastore/model/MSerDeInfo.java |48 +-
 .../main/resources/datanucleus-log4j.properties |17 +
 .../src/main/resources/package.jdo  |77 +
 .../main/sql/derby/hive-schema-3.0.0.derby.sql  |30 +-
 .../sql/derby/upgrade-2.3.0-to-3.0.0.derby.sql  |34 +
 .../main/sql/mssql/hive-schema-3.0.0.mssql.sql  |33 +-
 .../sql/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql  |33 +
 .../main/sql/mysql/hive-schema-3.0.0.mysql.sql  |38 +
 .../sql/mysql/upgrade-2.3.0-to-3.0.0.mysql.sql  |38 +
 .../sql/oracle/hive-schema-3.0.0.oracle.sql |33 +-
 .../oracle/upgrade-2.3.0-to-3.0.0.oracle.sql|34 +
 .../sql/postgres/hive-schema-3.0.0.postgres.sql |34 +-
 .../upgrade-2.3.0-to-3.0.0.postgres.sql |34 +
 .../src/main/thrift/hive_metastore.thrift   |   112 +-
 

[16/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
deleted file mode 100644
index 4afc03b..000
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ /dev/null
@@ -1,2782 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore;
-
-import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
-import static org.apache.hadoop.hive.metastore.MetaStoreUtils.isIndexTable;
-
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationHandler;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.lang.reflect.Proxy;
-import java.net.InetAddress;
-import java.net.URI;
-import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.NoSuchElementException;
-import java.util.Random;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.security.PrivilegedExceptionAction;
-
-import javax.security.auth.login.LoginException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.ObjectPair;
-import org.apache.hadoop.hive.common.StatsSetupConst;
-import org.apache.hadoop.hive.common.ValidTxnList;
-import org.apache.hadoop.hive.common.auth.HiveAuthUtils;
-import org.apache.hadoop.hive.common.classification.InterfaceAudience;
-import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
-import 
org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.conf.HiveConfUtil;
-import org.apache.hadoop.hive.metastore.api.*;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.apache.hadoop.hive.metastore.txn.TxnUtils;
-import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
-import org.apache.hadoop.hive.shims.Utils;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.thrift.TApplicationException;
-import org.apache.thrift.TException;
-import org.apache.thrift.protocol.TBinaryProtocol;
-import org.apache.thrift.protocol.TCompactProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.transport.TFramedTransport;
-import org.apache.thrift.transport.TSocket;
-import org.apache.thrift.transport.TTransport;
-import org.apache.thrift.transport.TTransportException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Lists;
-
-/**
- * Hive Metastore Client.
- * The public implementation of IMetaStoreClient. Methods not inherited from 
IMetaStoreClient
- * are not public and can change. Hence this is marked as unstable.
- * For users who require retry mechanism when the connection between metastore 
and client is
- * broken, RetryingMetaStoreClient class should be used.
- */
-@Public
-@Unstable
-public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
-  /**
-   * Capabilities of the current client. If this client talks to a MetaStore 
server in a manner
-   * implying the usage of some expanded features that require client-side 
support that this client
-   * doesn't have (e.g. a getting a table of a 

[17/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes.  This closes 
#272  (Alan Gates, reviewed by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d79c4595
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d79c4595
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d79c4595

Branch: refs/heads/standalone-metastore
Commit: d79c45951d0382dc42d5ce0c9474d994948522b7
Parents: 95dadac
Author: Alan Gates 
Authored: Wed Dec 6 13:19:00 2017 -0800
Committer: Alan Gates 
Committed: Wed Dec 6 13:19:00 2017 -0800

--
 .../hive/accumulo/AccumuloStorageHandler.java   |2 +-
 .../hadoop/hive/druid/DruidStorageHandler.java  |2 +-
 .../apache/hadoop/hive/hbase/HBaseMetaHook.java |2 +-
 .../hadoop/hive/hbase/HBaseStorageHandler.java  |1 +
 hcatalog/core/pom.xml   |2 +-
 .../hive/hcatalog/common/HiveClientCache.java   |2 +-
 .../mapreduce/FileOutputCommitterContainer.java |4 +-
 .../mapreduce/FileOutputFormatContainer.java|4 +-
 .../hive/hcatalog/mapreduce/InternalUtil.java   |2 +-
 .../apache/hive/hcatalog/cli/TestPermsGrp.java  |1 -
 .../mapreduce/TestHCatPartitionPublish.java |1 -
 .../listener/DbNotificationListener.java|   41 +-
 .../streaming/DelimitedInputWriter.java |3 +-
 .../hcatalog/streaming/StrictJsonWriter.java|2 +-
 .../hcatalog/streaming/StrictRegexWriter.java   |2 +-
 hcatalog/webhcat/java-client/pom.xml|2 +-
 .../hive/hcatalog/api/HCatClientHMSImpl.java|5 +-
 hcatalog/webhcat/svr/pom.xml|2 +-
 itests/hcatalog-unit/pom.xml|2 +-
 .../hive/hcatalog/hbase/ManyMiniCluster.java|4 +-
 itests/hive-blobstore/pom.xml   |4 +-
 itests/hive-minikdc/pom.xml |4 +-
 itests/hive-unit-hadoop2/pom.xml|2 +-
 itests/hive-unit/pom.xml|2 +-
 .../AbstractTestAuthorizationApiAuthorizer.java |1 +
 .../hadoop/hive/metastore/TestFilterHooks.java  |4 +
 .../hive/metastore/TestHiveMetaStore.java   |3 +-
 ...TestHiveMetaStoreWithEnvironmentContext.java |1 +
 .../hive/metastore/TestMarkPartitionRemote.java |1 +
 .../metastore/TestMetaStoreAuthorization.java   |1 +
 .../TestMetaStoreEndFunctionListener.java   |1 +
 .../metastore/TestMetaStoreEventListener.java   |1 +
 .../TestMetaStoreEventListenerOnlyOnCommit.java |1 +
 .../metastore/TestMetaStoreInitListener.java|1 +
 .../metastore/TestMetaStoreListenersError.java  |1 +
 .../hive/metastore/TestMetaStoreMetrics.java|1 +
 .../hive/metastore/TestRemoteHiveMetaStore.java |1 +
 .../TestRemoteHiveMetaStoreIpAddress.java   |1 +
 .../hive/metastore/TestRetryingHMSHandler.java  |1 +
 .../hive/ql/parse/TestReplicationScenarios.java |1 +
 .../TestMetastoreAuthorizationProvider.java |4 +
 .../hive/serde2/TestSerdeWithFieldComments.java |4 +-
 .../thrift/TestThriftCliServiceMessageSize.java |   14 +-
 itests/qtest-accumulo/pom.xml   |4 +-
 itests/qtest-spark/pom.xml  |4 +-
 itests/qtest/pom.xml|4 +-
 itests/util/pom.xml |2 +-
 llap-server/pom.xml |2 +-
 metastore/pom.xml   |   11 -
 .../hive/metastore/HiveMetaStoreClient.java | 2782 --
 .../hive/metastore/HiveMetaStoreUtils.java  |  213 ++
 .../hadoop/hive/metastore/IMetaStoreClient.java | 1826 
 .../hadoop/hive/metastore/MetaStoreUtils.java   | 1095 ---
 .../hive/metastore/RetryingMetaStoreClient.java |  340 ---
 .../metastore/SerDeStorageSchemaReader.java |4 +-
 .../hive/metastore/messaging/EventUtils.java|  202 --
 .../messaging/event/filters/AndFilter.java  |   39 -
 .../messaging/event/filters/BasicFilter.java|   33 -
 .../event/filters/DatabaseAndTableFilter.java   |   52 -
 .../event/filters/EventBoundaryFilter.java  |   34 -
 .../event/filters/MessageFormatFilter.java  |   36 -
 .../hive/metastore/tools/HiveMetaTool.java  |  487 ---
 .../metastore/AlternateFailurePreListener.java  |   62 -
 .../metastore/DummyEndFunctionListener.java |   47 -
 .../hadoop/hive/metastore/DummyListener.java|  144 -
 .../metastore/DummyMetaStoreInitListener.java   |   44 -
 .../hadoop/hive/metastore/DummyPreListener.java |   49 -
 .../DummyRawStoreControlledCommit.java  | 1053 ---
 .../InjectableBehaviourObjectStore.java |  104 -
 .../hive/metastore/IpAddressListener.java   |  103 -
 .../hive/metastore/MetaStoreTestUtils.java  |  199 --
 .../MockPartitionExpressionForMetastore.java|   59 -
 

[27/50] [abbrv] hive git commit: HIVE-17983 Make the standalone metastore generate tarballs etc.

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/b99b8c7a/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
--
diff --git 
a/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql 
b/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
new file mode 100644
index 000..4bb3631
--- /dev/null
+++ b/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
@@ -0,0 +1,1699 @@
+--
+-- PostgreSQL database dump
+--
+
+SET statement_timeout = 0;
+SET client_encoding = 'UTF8';
+SET standard_conforming_strings = off;
+SET check_function_bodies = false;
+SET client_min_messages = warning;
+SET escape_string_warning = off;
+
+SET search_path = public, pg_catalog;
+
+SET default_tablespace = '';
+
+SET default_with_oids = false;
+
+--
+-- Name: BUCKETING_COLS; Type: TABLE; Schema: public; Owner: hiveuser; 
Tablespace:
+--
+
+CREATE TABLE "BUCKETING_COLS" (
+"SD_ID" bigint NOT NULL,
+"BUCKET_COL_NAME" character varying(256) DEFAULT NULL::character varying,
+"INTEGER_IDX" bigint NOT NULL
+);
+
+
+--
+-- Name: CDS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "CDS" (
+"CD_ID" bigint NOT NULL
+);
+
+
+--
+-- Name: COLUMNS_V2; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "COLUMNS_V2" (
+"CD_ID" bigint NOT NULL,
+"COMMENT" character varying(4000),
+"COLUMN_NAME" character varying(767) NOT NULL,
+"TYPE_NAME" text,
+"INTEGER_IDX" integer NOT NULL
+);
+
+
+--
+-- Name: DATABASE_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; 
Tablespace:
+--
+
+CREATE TABLE "DATABASE_PARAMS" (
+"DB_ID" bigint NOT NULL,
+"PARAM_KEY" character varying(180) NOT NULL,
+"PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: DBS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "DBS" (
+"DB_ID" bigint NOT NULL,
+"DESC" character varying(4000) DEFAULT NULL::character varying,
+"DB_LOCATION_URI" character varying(4000) NOT NULL,
+"NAME" character varying(128) DEFAULT NULL::character varying,
+"OWNER_NAME" character varying(128) DEFAULT NULL::character varying,
+"OWNER_TYPE" character varying(10) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: DB_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "DB_PRIVS" (
+"DB_GRANT_ID" bigint NOT NULL,
+"CREATE_TIME" bigint NOT NULL,
+"DB_ID" bigint,
+"GRANT_OPTION" smallint NOT NULL,
+"GRANTOR" character varying(128) DEFAULT NULL::character varying,
+"GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
+"PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
+"PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
+"DB_PRIV" character varying(128) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: GLOBAL_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; 
Tablespace:
+--
+
+CREATE TABLE "GLOBAL_PRIVS" (
+"USER_GRANT_ID" bigint NOT NULL,
+"CREATE_TIME" bigint NOT NULL,
+"GRANT_OPTION" smallint NOT NULL,
+"GRANTOR" character varying(128) DEFAULT NULL::character varying,
+"GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying,
+"PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying,
+"PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying,
+"USER_PRIV" character varying(128) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: IDXS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "IDXS" (
+"INDEX_ID" bigint NOT NULL,
+"CREATE_TIME" bigint NOT NULL,
+"DEFERRED_REBUILD" boolean NOT NULL,
+"INDEX_HANDLER_CLASS" character varying(4000) DEFAULT NULL::character 
varying,
+"INDEX_NAME" character varying(128) DEFAULT NULL::character varying,
+"INDEX_TBL_ID" bigint,
+"LAST_ACCESS_TIME" bigint NOT NULL,
+"ORIG_TBL_ID" bigint,
+"SD_ID" bigint
+);
+
+
+--
+-- Name: INDEX_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; 
Tablespace:
+--
+
+CREATE TABLE "INDEX_PARAMS" (
+"INDEX_ID" bigint NOT NULL,
+"PARAM_KEY" character varying(256) NOT NULL,
+"PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: NUCLEUS_TABLES; Type: TABLE; Schema: public; Owner: hiveuser; 
Tablespace:
+--
+
+CREATE TABLE "NUCLEUS_TABLES" (
+"CLASS_NAME" character varying(128) NOT NULL,
+"TABLE_NAME" character varying(128) NOT NULL,
+"TYPE" character varying(4) NOT NULL,
+"OWNER" character varying(2) NOT NULL,
+"VERSION" character varying(20) NOT NULL,
+"INTERFACE_NAME" character varying(255) DEFAULT NULL::character varying
+);
+
+
+--
+-- Name: PARTITIONS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "PARTITIONS" (
+

[03/50] [abbrv] hive git commit: HIVE-18210: create resource plan allows duplicates (Harish Jaiprakash reviewed by Sergey Shelukhin)

2017-12-07 Thread gates
HIVE-18210: create resource plan allows duplicates (Harish Jaiprakash reviewed 
by Sergey Shelukhin)

Signed-off-by: Zoltan Haindrich 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a1f54df4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a1f54df4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a1f54df4

Branch: refs/heads/standalone-metastore
Commit: a1f54df470537fb2a8a80855bf244d757bfabb9c
Parents: 1968a9d
Author: Harish Jaiprakash 
Authored: Wed Dec 6 09:35:13 2017 +0100
Committer: Zoltan Haindrich 
Committed: Wed Dec 6 09:47:55 2017 +0100

--
 metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql| 8 
 metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql | 8 
 2 files changed, 8 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a1f54df4/metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql
--
diff --git a/metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql 
b/metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql
index 34fcfe6..02288cb 100644
--- a/metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql
+++ b/metastore/scripts/upgrade/mysql/046-HIVE-17566.mysql.sql
@@ -5,7 +5,7 @@ CREATE TABLE IF NOT EXISTS WM_RESOURCEPLAN (
 `STATUS` varchar(20) NOT NULL,
 `DEFAULT_POOL_ID` bigint(20),
 PRIMARY KEY (`RP_ID`),
-KEY `UNIQUE_WM_RESOURCEPLAN` (`NAME`)
+UNIQUE KEY `UNIQUE_WM_RESOURCEPLAN` (`NAME`)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
 
 CREATE TABLE IF NOT EXISTS WM_POOL
@@ -17,7 +17,7 @@ CREATE TABLE IF NOT EXISTS WM_POOL
 `QUERY_PARALLELISM` int(11),
 `SCHEDULING_POLICY` varchar(767),
 PRIMARY KEY (`POOL_ID`),
-KEY `UNIQUE_WM_POOL` (`RP_ID`, `PATH`),
+UNIQUE KEY `UNIQUE_WM_POOL` (`RP_ID`, `PATH`),
 CONSTRAINT `WM_POOL_FK1` FOREIGN KEY (`RP_ID`) REFERENCES 
`WM_RESOURCEPLAN` (`RP_ID`)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
 
@@ -31,7 +31,7 @@ CREATE TABLE IF NOT EXISTS WM_TRIGGER
 `TRIGGER_EXPRESSION` varchar(1024),
 `ACTION_EXPRESSION` varchar(1024),
 PRIMARY KEY (`TRIGGER_ID`),
-KEY `UNIQUE_WM_TRIGGER` (`RP_ID`, `NAME`),
+UNIQUE KEY `UNIQUE_WM_TRIGGER` (`RP_ID`, `NAME`),
 CONSTRAINT `WM_TRIGGER_FK1` FOREIGN KEY (`RP_ID`) REFERENCES 
`WM_RESOURCEPLAN` (`RP_ID`)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
 
@@ -53,7 +53,7 @@ CREATE TABLE IF NOT EXISTS WM_MAPPING
 `POOL_ID` bigint(20),
 `ORDERING` int,
 PRIMARY KEY (`MAPPING_ID`),
-KEY `UNIQUE_WM_MAPPING` (`RP_ID`, `ENTITY_TYPE`, `ENTITY_NAME`),
+UNIQUE KEY `UNIQUE_WM_MAPPING` (`RP_ID`, `ENTITY_TYPE`, `ENTITY_NAME`),
 CONSTRAINT `WM_MAPPING_FK1` FOREIGN KEY (`RP_ID`) REFERENCES 
`WM_RESOURCEPLAN` (`RP_ID`),
 CONSTRAINT `WM_MAPPING_FK2` FOREIGN KEY (`POOL_ID`) REFERENCES `WM_POOL` 
(`POOL_ID`)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;

http://git-wip-us.apache.org/repos/asf/hive/blob/a1f54df4/metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql
--
diff --git a/metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql 
b/metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql
index ec95c17..915af8b 100644
--- a/metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql
+++ b/metastore/scripts/upgrade/mysql/hive-schema-3.0.0.mysql.sql
@@ -856,7 +856,7 @@ CREATE TABLE IF NOT EXISTS WM_RESOURCEPLAN (
 `STATUS` varchar(20) NOT NULL,
 `DEFAULT_POOL_ID` bigint(20),
 PRIMARY KEY (`RP_ID`),
-KEY `UNIQUE_WM_RESOURCEPLAN` (`NAME`)
+UNIQUE KEY `UNIQUE_WM_RESOURCEPLAN` (`NAME`)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
 
 CREATE TABLE IF NOT EXISTS WM_POOL
@@ -868,7 +868,7 @@ CREATE TABLE IF NOT EXISTS WM_POOL
 `QUERY_PARALLELISM` int(11),
 `SCHEDULING_POLICY` varchar(767),
 PRIMARY KEY (`POOL_ID`),
-KEY `UNIQUE_WM_POOL` (`RP_ID`, `PATH`),
+UNIQUE KEY `UNIQUE_WM_POOL` (`RP_ID`, `PATH`),
 CONSTRAINT `WM_POOL_FK1` FOREIGN KEY (`RP_ID`) REFERENCES 
`WM_RESOURCEPLAN` (`RP_ID`)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
 
@@ -882,7 +882,7 @@ CREATE TABLE IF NOT EXISTS WM_TRIGGER
 `TRIGGER_EXPRESSION` varchar(1024),
 `ACTION_EXPRESSION` varchar(1024),
 PRIMARY KEY (`TRIGGER_ID`),
-KEY `UNIQUE_WM_TRIGGER` (`RP_ID`, `NAME`),
+UNIQUE KEY `UNIQUE_WM_TRIGGER` (`RP_ID`, `NAME`),
 CONSTRAINT `WM_TRIGGER_FK1` FOREIGN KEY (`RP_ID`) REFERENCES 
`WM_RESOURCEPLAN` (`RP_ID`)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
 
@@ -904,7 +904,7 @@ CREATE TABLE IF NOT EXISTS WM_MAPPING
 `POOL_ID` bigint(20),
 `ORDERING` int,
 PRIMARY KEY (`MAPPING_ID`),
-KEY `UNIQUE_WM_MAPPING` (`RP_ID`, `ENTITY_TYPE`, `ENTITY_NAME`),
+UNIQUE KEY 

[42/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
--
diff --git 
a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
 
b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
index fc57141..192d0db 100644
--- 
a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
+++ 
b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
@@ -400,6 +400,34 @@ import org.slf4j.LoggerFactory;
 
 public WMCreateOrDropTriggerToPoolMappingResponse 
create_or_drop_wm_trigger_to_pool_mapping(WMCreateOrDropTriggerToPoolMappingRequest
 request) throws AlreadyExistsException, NoSuchObjectException, 
InvalidObjectException, MetaException, org.apache.thrift.TException;
 
+public void create_ischema(ISchema schema) throws AlreadyExistsException, 
NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+public void alter_ischema(String schemaName, ISchema newSchema) throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+public ISchema get_ischema(String schemaName) throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+public void drop_ischema(String schemaName) throws NoSuchObjectException, 
InvalidOperationException, MetaException, org.apache.thrift.TException;
+
+public void add_schema_version(SchemaVersion schemaVersion) throws 
AlreadyExistsException, NoSuchObjectException, MetaException, 
org.apache.thrift.TException;
+
+public SchemaVersion get_schema_version(String schemaName, int version) 
throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+public SchemaVersion get_schema_latest_version(String schemaName) throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+public List get_schema_all_versions(String schemaName) 
throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+public void drop_schema_version(String schemaName, int version) throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+public FindSchemasByColsResp get_schemas_by_cols(FindSchemasByColsRqst 
rqst) throws MetaException, org.apache.thrift.TException;
+
+public void map_schema_version_to_serde(String schemaName, int version, 
String serdeName) throws NoSuchObjectException, MetaException, 
org.apache.thrift.TException;
+
+public void set_schema_version_state(String schemaName, int version, 
SchemaVersionState state) throws NoSuchObjectException, 
InvalidOperationException, MetaException, org.apache.thrift.TException;
+
+public void add_serde(SerDeInfo serde) throws AlreadyExistsException, 
MetaException, org.apache.thrift.TException;
+
+public SerDeInfo get_serde(String serdeName) throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException;
+
   }
 
   @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public interface 
AsyncIface extends com.facebook.fb303.FacebookService .AsyncIface {
@@ -762,6 +790,34 @@ import org.slf4j.LoggerFactory;
 
 public void 
create_or_drop_wm_trigger_to_pool_mapping(WMCreateOrDropTriggerToPoolMappingRequest
 request, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
+public void create_ischema(ISchema schema, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void alter_ischema(String schemaName, ISchema newSchema, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void get_ischema(String schemaName, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void drop_ischema(String schemaName, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void add_schema_version(SchemaVersion schemaVersion, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void get_schema_version(String schemaName, int version, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void get_schema_latest_version(String schemaName, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void get_schema_all_versions(String schemaName, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void drop_schema_version(String schemaName, int 

[43/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SchemaVersion.java
--
diff --git 
a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SchemaVersion.java
 
b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SchemaVersion.java
new file mode 100644
index 000..7107e59
--- /dev/null
+++ 
b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SchemaVersion.java
@@ -0,0 +1,1407 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hadoop.hive.metastore.api;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
+@org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public class 
SchemaVersion implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, 
Comparable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("SchemaVersion");
+
+  private static final org.apache.thrift.protocol.TField 
SCHEMA_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("schemaName", 
org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField VERSION_FIELD_DESC = 
new org.apache.thrift.protocol.TField("version", 
org.apache.thrift.protocol.TType.I32, (short)2);
+  private static final org.apache.thrift.protocol.TField CREATED_AT_FIELD_DESC 
= new org.apache.thrift.protocol.TField("createdAt", 
org.apache.thrift.protocol.TType.I64, (short)3);
+  private static final org.apache.thrift.protocol.TField COLS_FIELD_DESC = new 
org.apache.thrift.protocol.TField("cols", 
org.apache.thrift.protocol.TType.LIST, (short)4);
+  private static final org.apache.thrift.protocol.TField STATE_FIELD_DESC = 
new org.apache.thrift.protocol.TField("state", 
org.apache.thrift.protocol.TType.I32, (short)5);
+  private static final org.apache.thrift.protocol.TField 
DESCRIPTION_FIELD_DESC = new org.apache.thrift.protocol.TField("description", 
org.apache.thrift.protocol.TType.STRING, (short)6);
+  private static final org.apache.thrift.protocol.TField 
SCHEMA_TEXT_FIELD_DESC = new org.apache.thrift.protocol.TField("schemaText", 
org.apache.thrift.protocol.TType.STRING, (short)7);
+  private static final org.apache.thrift.protocol.TField 
FINGERPRINT_FIELD_DESC = new org.apache.thrift.protocol.TField("fingerprint", 
org.apache.thrift.protocol.TType.STRING, (short)8);
+  private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new 
org.apache.thrift.protocol.TField("name", 
org.apache.thrift.protocol.TType.STRING, (short)9);
+  private static final org.apache.thrift.protocol.TField SER_DE_FIELD_DESC = 
new org.apache.thrift.protocol.TField("serDe", 
org.apache.thrift.protocol.TType.STRUCT, (short)10);
+
+  private static final Map schemes = 
new HashMap();
+  static {
+schemes.put(StandardScheme.class, new 
SchemaVersionStandardSchemeFactory());
+schemes.put(TupleScheme.class, new SchemaVersionTupleSchemeFactory());
+  }
+
+  private String schemaName; // required
+  private int version; // required
+  private long createdAt; // required
+  private List cols; // required
+  private SchemaVersionState state; // optional
+  private String description; // optional
+  private String schemaText; // optional
+  private String fingerprint; // optional
+  private String name; // optional
+  private SerDeInfo serDe; // optional
+
+  /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+

[37/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index fc254c6..0c0e408 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -2739,11 +2739,83 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient, AutoCloseable {
   public void createOrDropTriggerToPoolMapping(String resourcePlanName, String 
triggerName,
   String poolPath, boolean shouldDrop) throws AlreadyExistsException, 
NoSuchObjectException,
   InvalidObjectException, MetaException, TException {
-WMCreateOrDropTriggerToPoolMappingRequest request = new 
WMCreateOrDropTriggerToPoolMappingRequest();
+WMCreateOrDropTriggerToPoolMappingRequest request =
+new WMCreateOrDropTriggerToPoolMappingRequest();
 request.setResourcePlanName(resourcePlanName);
 request.setTriggerName(triggerName);
 request.setPoolPath(poolPath);
 request.setDrop(shouldDrop);
 client.create_or_drop_wm_trigger_to_pool_mapping(request);
   }
+
+  public void createISchema(ISchema schema) throws TException {
+client.create_ischema(schema);
+  }
+
+  @Override
+  public void alterISchema(String schemaName, ISchema newSchema) throws 
TException {
+client.alter_ischema(schemaName, newSchema);
+  }
+
+  @Override
+  public ISchema getISchema(String name) throws TException {
+return client.get_ischema(name);
+  }
+
+  @Override
+  public void dropISchema(String name) throws TException {
+client.drop_ischema(name);
+  }
+
+  @Override
+  public void addSchemaVersion(SchemaVersion schemaVersion) throws TException {
+client.add_schema_version(schemaVersion);
+  }
+
+  @Override
+  public SchemaVersion getSchemaVersion(String schemaName, int version) throws 
TException {
+return client.get_schema_version(schemaName, version);
+  }
+
+  @Override
+  public SchemaVersion getSchemaLatestVersion(String schemaName) throws 
TException {
+return client.get_schema_latest_version(schemaName);
+  }
+
+  @Override
+  public List getSchemaAllVersions(String schemaName) throws 
TException {
+return client.get_schema_all_versions(schemaName);
+  }
+
+  @Override
+  public void dropSchemaVersion(String schemaName, int version) throws 
TException {
+client.drop_schema_version(schemaName, version);
+  }
+
+  @Override
+  public FindSchemasByColsResp getSchemaByCols(FindSchemasByColsRqst rqst) 
throws TException {
+return client.get_schemas_by_cols(rqst);
+  }
+
+  @Override
+  public void mapSchemaVersionToSerde(String schemaName, int version, String 
serdeName)
+  throws TException {
+client.map_schema_version_to_serde(schemaName, version, serdeName);
+  }
+
+  @Override
+  public void setSchemaVersionState(String schemaName, int version, 
SchemaVersionState state)
+  throws TException {
+client.set_schema_version_state(schemaName, version, state);
+  }
+
+  @Override
+  public void addSerDe(SerDeInfo serDeInfo) throws TException {
+client.add_serde(serDeInfo);
+  }
+
+  @Override
+  public SerDeInfo getSerDe(String serDeName) throws TException {
+return client.get_serde(serDeName);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index 573ac01..6e0c1a7 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -47,6 +47,8 @@ import org.apache.hadoop.hive.metastore.api.DataOperationType;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FindSchemasByColsResp;
+import org.apache.hadoop.hive.metastore.api.FindSchemasByColsRqst;
 import org.apache.hadoop.hive.metastore.api.FireEventRequest;
 import org.apache.hadoop.hive.metastore.api.FireEventResponse;
 import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest;
@@ -60,6 +62,7 @@ import 
org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse;
 import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse;
 

[08/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
index d18ddc8..b46cc38 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
@@ -463,6 +463,9 @@ public class MetastoreConf {
 "hive.metastore.event.message.factory",
 "org.apache.hadoop.hive.metastore.messaging.json.JSONMessageFactory",
 "Factory class for making encoding and decoding messages in the events 
generated."),
+EVENT_DB_LISTENER_TTL("metastore.event.db.listener.timetolive",
+"hive.metastore.event.db.listener.timetolive", 86400, TimeUnit.SECONDS,
+"time after which events will be removed from the database listener 
queue"),
 
EVENT_DB_NOTIFICATION_API_AUTH("metastore.metastore.event.db.notification.api.auth",
 "hive.metastore.event.db.notification.api.auth", true,
 "Should metastore do authorization against database notification 
related APIs such as get_next_notification.\n" +
@@ -799,6 +802,19 @@ public class MetastoreConf {
 "internal use only, true when in testing tez"),
 // We need to track this as some listeners pass it through our config and 
we need to honor
 // the system properties.
+HIVE_AUTHORIZATION_MANAGER("hive.security.authorization.manager",
+"hive.security.authorization.manager",
+
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory",
+"The Hive client authorization manager class name. The user defined 
authorization class should implement \n" +
+"interface 
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider."),
+
HIVE_METASTORE_AUTHENTICATOR_MANAGER("hive.security.metastore.authenticator.manager",
+"hive.security.metastore.authenticator.manager",
+
"org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator",
+"authenticator manager class name to be used in the metastore for 
authentication. \n" +
+"The user defined authenticator should implement interface 
org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider."),
+
HIVE_METASTORE_AUTHORIZATION_AUTH_READS("hive.security.metastore.authorization.auth.reads",
+"hive.security.metastore.authorization.auth.reads", true,
+"If this is true, metastore authorizer authorizes read actions on 
database, table"),
 HIVE_METASTORE_AUTHORIZATION_MANAGER(NO_SUCH_KEY,
 "hive.security.metastore.authorization.manager",
 
"org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider",

http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
new file mode 100644
index 000..7d8c1d4
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
@@ -0,0 +1,202 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hive.metastore.messaging;
+
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+import org.apache.hadoop.hive.metastore.api.NotificationEventsCountRequest;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import 
org.apache.hadoop.hive.metastore.messaging.event.filters.DatabaseAndTableFilter;
+import org.apache.thrift.TException;
+
+import java.io.IOException;
+import 

[41/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
--
diff --git 
a/standalone-metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php 
b/standalone-metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
index 6ca56cb..3ca6f9a 100644
--- 
a/standalone-metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
+++ 
b/standalone-metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
@@ -1381,6 +1381,106 @@ interface ThriftHiveMetastoreIf extends 
\FacebookServiceIf {
* @throws \metastore\MetaException
*/
   public function 
create_or_drop_wm_trigger_to_pool_mapping(\metastore\WMCreateOrDropTriggerToPoolMappingRequest
 $request);
+  /**
+   * @param \metastore\ISchema $schema
+   * @throws \metastore\AlreadyExistsException
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\MetaException
+   */
+  public function create_ischema(\metastore\ISchema $schema);
+  /**
+   * @param string $schemaName
+   * @param \metastore\ISchema $newSchema
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\MetaException
+   */
+  public function alter_ischema($schemaName, \metastore\ISchema $newSchema);
+  /**
+   * @param string $schemaName
+   * @return \metastore\ISchema
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\MetaException
+   */
+  public function get_ischema($schemaName);
+  /**
+   * @param string $schemaName
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\InvalidOperationException
+   * @throws \metastore\MetaException
+   */
+  public function drop_ischema($schemaName);
+  /**
+   * @param \metastore\SchemaVersion $schemaVersion
+   * @throws \metastore\AlreadyExistsException
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\MetaException
+   */
+  public function add_schema_version(\metastore\SchemaVersion $schemaVersion);
+  /**
+   * @param string $schemaName
+   * @param int $version
+   * @return \metastore\SchemaVersion
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\MetaException
+   */
+  public function get_schema_version($schemaName, $version);
+  /**
+   * @param string $schemaName
+   * @return \metastore\SchemaVersion
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\MetaException
+   */
+  public function get_schema_latest_version($schemaName);
+  /**
+   * @param string $schemaName
+   * @return \metastore\SchemaVersion[]
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\MetaException
+   */
+  public function get_schema_all_versions($schemaName);
+  /**
+   * @param string $schemaName
+   * @param int $version
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\MetaException
+   */
+  public function drop_schema_version($schemaName, $version);
+  /**
+   * @param \metastore\FindSchemasByColsRqst $rqst
+   * @return \metastore\FindSchemasByColsResp
+   * @throws \metastore\MetaException
+   */
+  public function get_schemas_by_cols(\metastore\FindSchemasByColsRqst $rqst);
+  /**
+   * @param string $schemaName
+   * @param int $version
+   * @param string $serdeName
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\MetaException
+   */
+  public function map_schema_version_to_serde($schemaName, $version, 
$serdeName);
+  /**
+   * @param string $schemaName
+   * @param int $version
+   * @param int $state
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\InvalidOperationException
+   * @throws \metastore\MetaException
+   */
+  public function set_schema_version_state($schemaName, $version, $state);
+  /**
+   * @param \metastore\SerDeInfo $serde
+   * @throws \metastore\AlreadyExistsException
+   * @throws \metastore\MetaException
+   */
+  public function add_serde(\metastore\SerDeInfo $serde);
+  /**
+   * @param string $serdeName
+   * @return \metastore\SerDeInfo
+   * @throws \metastore\NoSuchObjectException
+   * @throws \metastore\MetaException
+   */
+  public function get_serde($serdeName);
 }
 
 class ThriftHiveMetastoreClient extends \FacebookServiceClient implements 
\metastore\ThriftHiveMetastoreIf {
@@ -11583,327 +11683,4322 @@ class ThriftHiveMetastoreClient extends 
\FacebookServiceClient implements \metas
 throw new \Exception("create_or_drop_wm_trigger_to_pool_mapping failed: 
unknown result");
   }
 
-}
-
-// HELPER FUNCTIONS AND STRUCTURES
+  public function create_ischema(\metastore\ISchema $schema)
+  {
+$this->send_create_ischema($schema);
+$this->recv_create_ischema();
+  }
 
-class ThriftHiveMetastore_getMetaConf_args {
-  static $_TSPEC;
+  public function send_create_ischema(\metastore\ISchema $schema)
+  {
+$args = new \metastore\ThriftHiveMetastore_create_ischema_args();
+$args->schema = $schema;
+

[30/50] [abbrv] hive git commit: HIVE-17983 Make the standalone metastore generate tarballs etc.

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/b99b8c7a/standalone-metastore/src/main/sql/mysql/hive-schema-2.3.0.mysql.sql
--
diff --git 
a/standalone-metastore/src/main/sql/mysql/hive-schema-2.3.0.mysql.sql 
b/standalone-metastore/src/main/sql/mysql/hive-schema-2.3.0.mysql.sql
new file mode 100644
index 000..45fe6ec
--- /dev/null
+++ b/standalone-metastore/src/main/sql/mysql/hive-schema-2.3.0.mysql.sql
@@ -0,0 +1,970 @@
+-- MySQL dump 10.13  Distrib 5.5.25, for osx10.6 (i386)
+--
+-- Host: localhostDatabase: test
+-- --
+-- Server version  5.5.25
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, 
FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `BUCKETING_COLS`
+--
+
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE IF NOT EXISTS `BUCKETING_COLS` (
+  `SD_ID` bigint(20) NOT NULL,
+  `BUCKET_COL_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin 
DEFAULT NULL,
+  `INTEGER_IDX` int(11) NOT NULL,
+  PRIMARY KEY (`SD_ID`,`INTEGER_IDX`),
+  KEY `BUCKETING_COLS_N49` (`SD_ID`),
+  CONSTRAINT `BUCKETING_COLS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` 
(`SD_ID`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `CDS`
+--
+
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE IF NOT EXISTS `CDS` (
+  `CD_ID` bigint(20) NOT NULL,
+  PRIMARY KEY (`CD_ID`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `COLUMNS_V2`
+--
+
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE IF NOT EXISTS `COLUMNS_V2` (
+  `CD_ID` bigint(20) NOT NULL,
+  `COMMENT` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
+  `COLUMN_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
+  `TYPE_NAME` MEDIUMTEXT DEFAULT NULL,
+  `INTEGER_IDX` int(11) NOT NULL,
+  PRIMARY KEY (`CD_ID`,`COLUMN_NAME`),
+  KEY `COLUMNS_V2_N49` (`CD_ID`),
+  CONSTRAINT `COLUMNS_V2_FK1` FOREIGN KEY (`CD_ID`) REFERENCES `CDS` (`CD_ID`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `DATABASE_PARAMS`
+--
+
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE IF NOT EXISTS `DATABASE_PARAMS` (
+  `DB_ID` bigint(20) NOT NULL,
+  `PARAM_KEY` varchar(180) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
+  `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT 
NULL,
+  PRIMARY KEY (`DB_ID`,`PARAM_KEY`),
+  KEY `DATABASE_PARAMS_N49` (`DB_ID`),
+  CONSTRAINT `DATABASE_PARAMS_FK1` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` 
(`DB_ID`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `DBS`
+--
+
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE IF NOT EXISTS `DBS` (
+  `DB_ID` bigint(20) NOT NULL,
+  `DESC` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
+  `DB_LOCATION_URI` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin NOT 
NULL,
+  `NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
+  `OWNER_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT 
NULL,
+  `OWNER_TYPE` varchar(10) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT 
NULL,
+  PRIMARY KEY (`DB_ID`),
+  UNIQUE KEY `UNIQUE_DATABASE` (`NAME`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `DB_PRIVS`
+--
+
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE IF NOT EXISTS `DB_PRIVS` (
+  `DB_GRANT_ID` bigint(20) NOT NULL,
+  `CREATE_TIME` int(11) NOT NULL,
+  `DB_ID` bigint(20) DEFAULT NULL,
+  `GRANT_OPTION` smallint(6) NOT NULL,
+  `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
+  `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT 

[40/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php
--
diff --git a/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php 
b/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php
index e78a851..3342454 100644
--- a/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php
+++ b/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php
@@ -142,6 +142,67 @@ final class EventRequestType {
   );
 }
 
+final class SerdeType {
+  const HIVE = 1;
+  const SCHEMA_REGISTRY = 2;
+  static public $__names = array(
+1 => 'HIVE',
+2 => 'SCHEMA_REGISTRY',
+  );
+}
+
+final class SchemaType {
+  const HIVE = 1;
+  const AVRO = 2;
+  static public $__names = array(
+1 => 'HIVE',
+2 => 'AVRO',
+  );
+}
+
+final class SchemaCompatibility {
+  const NONE = 1;
+  const BACKWARD = 2;
+  const FORWARD = 3;
+  const BOTH = 4;
+  static public $__names = array(
+1 => 'NONE',
+2 => 'BACKWARD',
+3 => 'FORWARD',
+4 => 'BOTH',
+  );
+}
+
+final class SchemaValidation {
+  const LATEST = 1;
+  const ALL = 2;
+  static public $__names = array(
+1 => 'LATEST',
+2 => 'ALL',
+  );
+}
+
+final class SchemaVersionState {
+  const INITIATED = 1;
+  const START_REVIEW = 2;
+  const CHANGES_REQUIRED = 3;
+  const REVIEWED = 4;
+  const ENABLED = 5;
+  const DISABLED = 6;
+  const ARCHIVED = 7;
+  const DELETED = 8;
+  static public $__names = array(
+1 => 'INITIATED',
+2 => 'START_REVIEW',
+3 => 'CHANGES_REQUIRED',
+4 => 'REVIEWED',
+5 => 'ENABLED',
+6 => 'DISABLED',
+7 => 'ARCHIVED',
+8 => 'DELETED',
+  );
+}
+
 final class FunctionType {
   const JAVA = 1;
   static public $__names = array(
@@ -4021,6 +4082,22 @@ class SerDeInfo {
* @var array
*/
   public $parameters = null;
+  /**
+   * @var string
+   */
+  public $description = null;
+  /**
+   * @var string
+   */
+  public $serializerClass = null;
+  /**
+   * @var string
+   */
+  public $deserializerClass = null;
+  /**
+   * @var int
+   */
+  public $serdeType = null;
 
   public function __construct($vals=null) {
 if (!isset(self::$_TSPEC)) {
@@ -4045,6 +4122,22 @@ class SerDeInfo {
 'type' => TType::STRING,
 ),
   ),
+4 => array(
+  'var' => 'description',
+  'type' => TType::STRING,
+  ),
+5 => array(
+  'var' => 'serializerClass',
+  'type' => TType::STRING,
+  ),
+6 => array(
+  'var' => 'deserializerClass',
+  'type' => TType::STRING,
+  ),
+7 => array(
+  'var' => 'serdeType',
+  'type' => TType::I32,
+  ),
 );
 }
 if (is_array($vals)) {
@@ -4057,6 +4150,18 @@ class SerDeInfo {
   if (isset($vals['parameters'])) {
 $this->parameters = $vals['parameters'];
   }
+  if (isset($vals['description'])) {
+$this->description = $vals['description'];
+  }
+  if (isset($vals['serializerClass'])) {
+$this->serializerClass = $vals['serializerClass'];
+  }
+  if (isset($vals['deserializerClass'])) {
+$this->deserializerClass = $vals['deserializerClass'];
+  }
+  if (isset($vals['serdeType'])) {
+$this->serdeType = $vals['serdeType'];
+  }
 }
   }
 
@@ -4113,6 +4218,34 @@ class SerDeInfo {
 $xfer += $input->skip($ftype);
   }
   break;
+case 4:
+  if ($ftype == TType::STRING) {
+$xfer += $input->readString($this->description);
+  } else {
+$xfer += $input->skip($ftype);
+  }
+  break;
+case 5:
+  if ($ftype == TType::STRING) {
+$xfer += $input->readString($this->serializerClass);
+  } else {
+$xfer += $input->skip($ftype);
+  }
+  break;
+case 6:
+  if ($ftype == TType::STRING) {
+$xfer += $input->readString($this->deserializerClass);
+  } else {
+$xfer += $input->skip($ftype);
+  }
+  break;
+case 7:
+  if ($ftype == TType::I32) {
+$xfer += $input->readI32($this->serdeType);
+  } else {
+$xfer += $input->skip($ftype);
+  }
+  break;
 default:
   $xfer += $input->skip($ftype);
   break;
@@ -4154,6 +4287,26 @@ class SerDeInfo {
   }
   $xfer += $output->writeFieldEnd();
 }
+if ($this->description !== null) {
+  $xfer += $output->writeFieldBegin('description', TType::STRING, 4);
+  $xfer += $output->writeString($this->description);
+  $xfer += $output->writeFieldEnd();
+}
+if ($this->serializerClass !== null) {
+  $xfer += $output->writeFieldBegin('serializerClass', TType::STRING, 5);
+  $xfer += $output->writeString($this->serializerClass);

[29/50] [abbrv] hive git commit: HIVE-17983 Make the standalone metastore generate tarballs etc.

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/b99b8c7a/standalone-metastore/src/main/sql/oracle/hive-schema-2.3.0.oracle.sql
--
diff --git 
a/standalone-metastore/src/main/sql/oracle/hive-schema-2.3.0.oracle.sql 
b/standalone-metastore/src/main/sql/oracle/hive-schema-2.3.0.oracle.sql
new file mode 100644
index 000..bda635f
--- /dev/null
+++ b/standalone-metastore/src/main/sql/oracle/hive-schema-2.3.0.oracle.sql
@@ -0,0 +1,926 @@
+-- Table SEQUENCE_TABLE is an internal table required by DataNucleus.
+-- NOTE: Some versions of SchemaTool do not automatically generate this table.
+-- See http://www.datanucleus.org/servlet/jira/browse/NUCRDBMS-416
+CREATE TABLE SEQUENCE_TABLE
+(
+   SEQUENCE_NAME VARCHAR2(255) NOT NULL,
+   NEXT_VAL NUMBER NOT NULL
+);
+
+ALTER TABLE SEQUENCE_TABLE ADD CONSTRAINT PART_TABLE_PK PRIMARY KEY 
(SEQUENCE_NAME);
+
+-- Table NUCLEUS_TABLES is an internal table required by DataNucleus.
+-- This table is required if datanucleus.autoStartMechanism=SchemaTable
+-- NOTE: Some versions of SchemaTool do not automatically generate this table.
+-- See http://www.datanucleus.org/servlet/jira/browse/NUCRDBMS-416
+CREATE TABLE NUCLEUS_TABLES
+(
+   CLASS_NAME VARCHAR2(128) NOT NULL,
+   TABLE_NAME VARCHAR2(128) NOT NULL,
+   TYPE VARCHAR2(4) NOT NULL,
+   OWNER VARCHAR2(2) NOT NULL,
+   VERSION VARCHAR2(20) NOT NULL,
+   INTERFACE_NAME VARCHAR2(255) NULL
+);
+
+ALTER TABLE NUCLEUS_TABLES ADD CONSTRAINT NUCLEUS_TABLES_PK PRIMARY KEY 
(CLASS_NAME);
+
+-- Table PART_COL_PRIVS for classes 
[org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege]
+CREATE TABLE PART_COL_PRIVS
+(
+PART_COLUMN_GRANT_ID NUMBER NOT NULL,
+"COLUMN_NAME" VARCHAR2(767) NULL,
+CREATE_TIME NUMBER (10) NOT NULL,
+GRANT_OPTION NUMBER (5) NOT NULL,
+GRANTOR VARCHAR2(128) NULL,
+GRANTOR_TYPE VARCHAR2(128) NULL,
+PART_ID NUMBER NULL,
+PRINCIPAL_NAME VARCHAR2(128) NULL,
+PRINCIPAL_TYPE VARCHAR2(128) NULL,
+PART_COL_PRIV VARCHAR2(128) NULL
+);
+
+ALTER TABLE PART_COL_PRIVS ADD CONSTRAINT PART_COL_PRIVS_PK PRIMARY KEY 
(PART_COLUMN_GRANT_ID);
+
+-- Table CDS.
+CREATE TABLE CDS
+(
+CD_ID NUMBER NOT NULL
+);
+
+ALTER TABLE CDS ADD CONSTRAINT CDS_PK PRIMARY KEY (CD_ID);
+
+-- Table COLUMNS_V2 for join relationship
+CREATE TABLE COLUMNS_V2
+(
+CD_ID NUMBER NOT NULL,
+"COMMENT" VARCHAR2(256) NULL,
+"COLUMN_NAME" VARCHAR2(767) NOT NULL,
+TYPE_NAME CLOB NOT NULL,
+INTEGER_IDX NUMBER(10) NOT NULL
+);
+
+ALTER TABLE COLUMNS_V2 ADD CONSTRAINT COLUMNS_V2_PK PRIMARY KEY 
(CD_ID,"COLUMN_NAME");
+
+-- Table PARTITION_KEY_VALS for join relationship
+CREATE TABLE PARTITION_KEY_VALS
+(
+PART_ID NUMBER NOT NULL,
+PART_KEY_VAL VARCHAR2(256) NULL,
+INTEGER_IDX NUMBER(10) NOT NULL
+);
+
+ALTER TABLE PARTITION_KEY_VALS ADD CONSTRAINT PARTITION_KEY_VALS_PK PRIMARY 
KEY (PART_ID,INTEGER_IDX);
+
+-- Table DBS for classes [org.apache.hadoop.hive.metastore.model.MDatabase]
+CREATE TABLE DBS
+(
+DB_ID NUMBER NOT NULL,
+"DESC" VARCHAR2(4000) NULL,
+DB_LOCATION_URI VARCHAR2(4000) NOT NULL,
+"NAME" VARCHAR2(128) NULL,
+OWNER_NAME VARCHAR2(128) NULL,
+OWNER_TYPE VARCHAR2(10) NULL
+);
+
+ALTER TABLE DBS ADD CONSTRAINT DBS_PK PRIMARY KEY (DB_ID);
+
+-- Table PARTITION_PARAMS for join relationship
+CREATE TABLE PARTITION_PARAMS
+(
+PART_ID NUMBER NOT NULL,
+PARAM_KEY VARCHAR2(256) NOT NULL,
+PARAM_VALUE VARCHAR2(4000) NULL
+);
+
+ALTER TABLE PARTITION_PARAMS ADD CONSTRAINT PARTITION_PARAMS_PK PRIMARY KEY 
(PART_ID,PARAM_KEY);
+
+-- Table SERDES for classes [org.apache.hadoop.hive.metastore.model.MSerDeInfo]
+CREATE TABLE SERDES
+(
+SERDE_ID NUMBER NOT NULL,
+"NAME" VARCHAR2(128) NULL,
+SLIB VARCHAR2(4000) NULL
+);
+
+ALTER TABLE SERDES ADD CONSTRAINT SERDES_PK PRIMARY KEY (SERDE_ID);
+
+-- Table TYPES for classes [org.apache.hadoop.hive.metastore.model.MType]
+CREATE TABLE TYPES
+(
+TYPES_ID NUMBER NOT NULL,
+TYPE_NAME VARCHAR2(128) NULL,
+TYPE1 VARCHAR2(767) NULL,
+TYPE2 VARCHAR2(767) NULL
+);
+
+ALTER TABLE TYPES ADD CONSTRAINT TYPES_PK PRIMARY KEY (TYPES_ID);
+
+-- Table PARTITION_KEYS for join relationship
+CREATE TABLE PARTITION_KEYS
+(
+TBL_ID NUMBER NOT NULL,
+PKEY_COMMENT VARCHAR2(4000) NULL,
+PKEY_NAME VARCHAR2(128) NOT NULL,
+PKEY_TYPE VARCHAR2(767) NOT NULL,
+INTEGER_IDX NUMBER(10) NOT NULL
+);
+
+ALTER TABLE PARTITION_KEYS ADD CONSTRAINT PARTITION_KEY_PK PRIMARY KEY 
(TBL_ID,PKEY_NAME);
+
+-- Table ROLES for classes [org.apache.hadoop.hive.metastore.model.MRole]
+CREATE TABLE ROLES
+(
+ROLE_ID NUMBER NOT NULL,
+CREATE_TIME NUMBER (10) NOT NULL,
+OWNER_NAME VARCHAR2(128) NULL,
+ROLE_NAME VARCHAR2(128) NULL
+);
+
+ALTER TABLE ROLES ADD CONSTRAINT ROLES_PK PRIMARY KEY (ROLE_ID);
+
+-- Table PARTITIONS for classes 
[org.apache.hadoop.hive.metastore.model.MPartition]
+CREATE TABLE PARTITIONS
+(
+

[06/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
index 7c54354..19279a5 100644
--- 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.metastore;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.junit.Test;
 
 /**
@@ -37,7 +36,7 @@ public class TestMetaStoreConnectionUrlHook {
 MetastoreConf.setVar(conf, ConfVars.CONNECTURLHOOK, 
DummyJdoConnectionUrlHook.class.getName());
 MetastoreConf.setVar(conf, ConfVars.CONNECTURLKEY, 
DummyJdoConnectionUrlHook.initialUrl);
 MetastoreConf.setVar(conf, ConfVars.RAW_STORE_IMPL, 
DummyRawStoreForJdoConnection.class.getName());
-MetaStoreUtils.setConfForStandloneMode(conf);
+MetaStoreTestUtils.setConfForStandloneMode(conf);
 
 // Instantiating the HMSHandler with hive.metastore.checkForDefaultDb will 
cause it to
 // initialize an instance of the DummyRawStoreForJdoConnection

http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestOldSchema.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestOldSchema.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestOldSchema.java
new file mode 100644
index 000..bf8556d
--- /dev/null
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestOldSchema.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.ndv.hll.HyperLogLog;
+import org.apache.hadoop.hive.metastore.api.AggrStats;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
+import org.apache.hadoop.hive.metastore.api.InvalidInputException;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestOldSchema {
+  private ObjectStore store = null;
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(TestOldSchema.class.getName());
+
+  public static class MockPartitionExpressionProxy implements 
PartitionExpressionProxy {
+@Override
+public String convertExprToFilter(byte[] 

[14/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
deleted file mode 100644
index 15bd803..000
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ /dev/null
@@ -1,1095 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore;
-
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.net.InetSocketAddress;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import com.google.common.base.Predicates;
-import com.google.common.collect.Maps;
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hive.common.JavaUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
-import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.SerDeUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.StructField;
-import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.security.SaslRpcServer;
-import org.apache.hive.common.util.ReflectionUtil;
-
-public class MetaStoreUtils {
-
-  private static final Logger LOG = LoggerFactory.getLogger("hive.log");
-
-  // Right now we only support one special character '/'.
-  // More special characters can be added accordingly in the future.
-  // NOTE:
-  // If the following array is updated, please also be sure to update the
-  // configuration parameter documentation
-  // HIVE_SUPPORT_SPECICAL_CHARACTERS_IN_TABLE_NAMES in HiveConf as well.
-  public static final char[] specialCharactersInTableNames = new char[] { '/' 
};
-
-  public static void populateQuickStats(FileStatus[] fileStatus, Map params) {
-
org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.populateQuickStats(fileStatus,
 params);
-  }
-
-  public static boolean updateTableStatsFast(Table tbl, FileStatus[] 
fileStatus, boolean newDir,
- boolean forceRecompute, 
EnvironmentContext environmentContext) throws MetaException {
-return 
org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.updateTableStatsFast(
-tbl, fileStatus, newDir, forceRecompute, environmentContext);
-  }
-
-  public static boolean updatePartitionStatsFast(Partition part, Warehouse wh, 

[24/50] [abbrv] hive git commit: HIVE-17982 Move metastore specific itests

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/83cfbaf0/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
deleted file mode 100644
index f344c47..000
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ /dev/null
@@ -1,3515 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore;
-
-import java.lang.reflect.Field;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-
-import junit.framework.TestCase;
-
-import org.datanucleus.api.jdo.JDOPersistenceManager;
-import org.datanucleus.api.jdo.JDOPersistenceManagerFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hive.common.FileUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.AggrStats;
-import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
-import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
-import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Function;
-import org.apache.hadoop.hive.metastore.api.FunctionType;
-import org.apache.hadoop.hive.metastore.api.GetAllFunctionsResponse;
-import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.ResourceType;
-import org.apache.hadoop.hive.metastore.api.ResourceUri;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.SkewedInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.Type;
-import org.apache.hadoop.hive.metastore.api.UnknownDBException;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.io.HiveInputFormat;
-import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.thrift.TException;
-import 

[23/50] [abbrv] hive git commit: HIVE-17982 Move metastore specific itests

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/83cfbaf0/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
deleted file mode 100644
index a19cc86..000
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
+++ /dev/null
@@ -1,270 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.hive.common.ValidTxnList;
-import org.apache.hadoop.hive.common.ValidReadTxnList;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.DataOperationType;
-import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse;
-import org.apache.hadoop.hive.metastore.api.LockResponse;
-import org.apache.hadoop.hive.metastore.api.LockState;
-import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.util.List;
-
-/**
- * Unit tests for {@link 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient}.  For now this just has
- * transaction and locking tests.  The goal here is not to test all
- * functionality possible through the interface, as all permutations of DB
- * operations should be tested in the appropriate DB handler classes.  The
- * goal is to test that we can properly pass the messages through the thrift
- * service.
- *
- * This is in the ql directory rather than the metastore directory because it
- * required the hive-exec jar, and hive-exec jar already depends on
- * hive-metastore jar, thus I can't make hive-metastore depend on hive-exec.
- */
-public class TestHiveMetaStoreTxns {
-
-  private final HiveConf conf = new HiveConf();
-  private IMetaStoreClient client;
-
-  public TestHiveMetaStoreTxns() throws Exception {
-TxnDbUtil.setConfValues(conf);
-LogManager.getRootLogger().setLevel(Level.DEBUG);
-tearDown();
-  }
-
-  @Test
-  public void testTxns() throws Exception {
-List tids = client.openTxns("me", 3).getTxn_ids();
-Assert.assertEquals(1L, (long) tids.get(0));
-Assert.assertEquals(2L, (long) tids.get(1));
-Assert.assertEquals(3L, (long) tids.get(2));
-client.rollbackTxn(1);
-client.commitTxn(2);
-ValidTxnList validTxns = client.getValidTxns();
-Assert.assertFalse(validTxns.isTxnValid(1));
-Assert.assertTrue(validTxns.isTxnValid(2));
-Assert.assertFalse(validTxns.isTxnValid(3));
-Assert.assertFalse(validTxns.isTxnValid(4));
-  }
-
-  @Test
-  public void testOpenTxnNotExcluded() throws Exception {
-List tids = client.openTxns("me", 3).getTxn_ids();
-Assert.assertEquals(1L, (long) tids.get(0));
-Assert.assertEquals(2L, (long) tids.get(1));
-Assert.assertEquals(3L, (long) tids.get(2));
-client.rollbackTxn(1);
-client.commitTxn(2);
-ValidTxnList validTxns = client.getValidTxns(3);
-Assert.assertFalse(validTxns.isTxnValid(1));
-Assert.assertTrue(validTxns.isTxnValid(2));
-Assert.assertTrue(validTxns.isTxnValid(3));
-Assert.assertFalse(validTxns.isTxnValid(4));
-  }
-
-  @Test
-  public void testTxnRange() throws Exception {
-ValidTxnList validTxns = client.getValidTxns();
-Assert.assertEquals(ValidTxnList.RangeResponse.NONE,
-validTxns.isTxnRangeValid(1L, 3L));
-List tids = client.openTxns("me", 5).getTxn_ids();
-
-HeartbeatTxnRangeResponse rsp = client.heartbeatTxnRange(1, 5);
-Assert.assertEquals(0, rsp.getNosuch().size());
-Assert.assertEquals(0, rsp.getAborted().size());
-
-client.rollbackTxn(1L);
-client.commitTxn(2L);
-client.commitTxn(3L);
-client.commitTxn(4L);
-validTxns = client.getValidTxns();
-System.out.println("validTxns = " + validTxns);
-Assert.assertEquals(ValidTxnList.RangeResponse.ALL,
-validTxns.isTxnRangeValid(2L, 

[26/50] [abbrv] hive git commit: HIVE-17983 Make the standalone metastore generate tarballs etc.

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/b99b8c7a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/tools/TestSchemaToolForMetastore.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/tools/TestSchemaToolForMetastore.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/tools/TestSchemaToolForMetastore.java
new file mode 100644
index 000..a8f7d2a
--- /dev/null
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/tools/TestSchemaToolForMetastore.java
@@ -0,0 +1,467 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.tools;
+
+import java.io.BufferedWriter;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.net.URI;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.Random;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.HiveMetaException;
+import org.apache.hadoop.hive.metastore.IMetaStoreSchemaInfo;
+import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfoFactory;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestSchemaToolForMetastore {
+  private static final Logger LOG = 
LoggerFactory.getLogger(TestMetastoreSchemaTool.class);
+
+  private MetastoreSchemaTool schemaTool;
+  private Connection conn;
+  private Configuration conf;
+  private String testMetastoreDB;
+  private PrintStream errStream;
+  private PrintStream outStream;
+
+  @Before
+  public void setUp() throws HiveMetaException, IOException {
+testMetastoreDB = System.getProperty("java.io.tmpdir") +
+File.separator + "test_metastore-" + new Random().nextInt();
+System.setProperty(ConfVars.CONNECTURLKEY.toString(),
+"jdbc:derby:" + testMetastoreDB + ";create=true");
+conf = MetastoreConf.newMetastoreConf();
+schemaTool = new MetastoreSchemaTool(
+System.getProperty("test.tmp.dir", "target/tmp"), conf, "derby");
+schemaTool.setUserName(MetastoreConf.getVar(schemaTool.getConf(), 
ConfVars.CONNECTION_USER_NAME));
+schemaTool.setPassWord(MetastoreConf.getPassword(schemaTool.getConf(), 
ConfVars.PWD));
+System.setProperty("beeLine.system.exit", "true");
+errStream = System.err;
+outStream = System.out;
+conn = schemaTool.getConnectionToMetastore(false);
+  }
+
+  @After
+  public void tearDown() throws IOException, SQLException {
+File metaStoreDir = new File(testMetastoreDB);
+if (metaStoreDir.exists()) {
+  FileUtils.forceDeleteOnExit(metaStoreDir);
+}
+System.setOut(outStream);
+System.setErr(errStream);
+if (conn != null) {
+  conn.close();
+}
+  }
+
+  // Test the sequence validation functionality
+  @Test
+  public void testValidateSequences() throws Exception {
+schemaTool.doInit();
+
+// Test empty database
+boolean isValid = schemaTool.validateSequences(conn);
+Assert.assertTrue(isValid);
+
+// Test valid case
+String[] scripts = new String[] {
+"insert into SEQUENCE_TABLE 
values('org.apache.hadoop.hive.metastore.model.MDatabase', 100);",
+"insert into DBS values(99, 'test db1', 'hdfs:///tmp', 'db1', 'test', 
'test');"
+};
+File scriptFile = generateTestScript(scripts);
+schemaTool.runSqlLine(scriptFile.getPath());
+isValid = schemaTool.validateSequences(conn);
+Assert.assertTrue(isValid);
+
+// Test invalid case
+scripts = new String[] {
+"delete from SEQUENCE_TABLE;",
+"delete from DBS;",
+"insert into SEQUENCE_TABLE 
values('org.apache.hadoop.hive.metastore.model.MDatabase', 100);",
+"insert into DBS values(102, 'test db1', 'hdfs:///tmp', 'db1', 'test', 

[15/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java
new file mode 100644
index 000..a66c135
--- /dev/null
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hive.common.util.ReflectionUtil;
+
+public class HiveMetaStoreUtils {
+
+  protected static final Logger LOG = LoggerFactory.getLogger("hive.log");
+
+  /**
+   * getDeserializer
+   *
+   * Get the Deserializer for a table.
+   *
+   * @param conf
+   *  - hadoop config
+   * @param table
+   *  the table
+   * @return
+   *   Returns instantiated deserializer by looking up class name of 
deserializer stored in
+   *   storage descriptor of passed in table. Also, initializes the 
deserializer with schema
+   *   of table.
+   * @exception MetaException
+   *  if any problems instantiating the Deserializer
+   *
+   *  todo - this should move somewhere into serde.jar
+   *
+   */
+  static public Deserializer getDeserializer(Configuration conf,
+  org.apache.hadoop.hive.metastore.api.Table table, boolean skipConfError) 
throws
+  MetaException {
+String lib = table.getSd().getSerdeInfo().getSerializationLib();
+if (lib == null) {
+  return null;
+}
+return getDeserializer(conf, table, skipConfError, lib);
+  }
+
+  public static Deserializer getDeserializer(Configuration conf,
+  org.apache.hadoop.hive.metastore.api.Table table, boolean skipConfError,
+  String lib) throws MetaException {
+try {
+  Deserializer deserializer = 
ReflectionUtil.newInstance(conf.getClassByName(lib).
+  asSubclass(Deserializer.class), conf);
+  if (skipConfError) {
+SerDeUtils.initializeSerDeWithoutErrorCheck(deserializer, conf,
+MetaStoreUtils.getTableMetadata(table), null);
+  } else {
+SerDeUtils.initializeSerDe(deserializer, conf, 
MetaStoreUtils.getTableMetadata(table), null);
+  }
+  return deserializer;
+} catch (RuntimeException e) {
+  throw e;
+} catch (Throwable e) {
+  LOG.error("error in initSerDe: " + e.getClass().getName() + " "
+  + e.getMessage(), e);
+  throw new MetaException(e.getClass().getName() + " " + e.getMessage());
+}
+  }
+
+  public static Class getDeserializerClass(
+  Configuration conf, org.apache.hadoop.hive.metastore.api.Table table) 
throws Exception {
+String lib = table.getSd().getSerdeInfo().getSerializationLib();
+return lib == null ? null : 
conf.getClassByName(lib).asSubclass(Deserializer.class);
+  }
+
+  /**
+   * getDeserializer
+   *
+   * Get the Deserializer for a partition.
+   *
+   * @param conf
+   *  - hadoop config
+   * @param part
+   *  the partition
+   * @param table the table
+   * @return
+  

[39/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
--
diff --git 
a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
 
b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
index 808ee09..1e4c4fd 100644
--- 
a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
+++ 
b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
@@ -1412,6 +1412,111 @@ class Iface(fb303.FacebookService.Iface):
 """
 pass
 
+  def create_ischema(self, schema):
+"""
+Parameters:
+ - schema
+"""
+pass
+
+  def alter_ischema(self, schemaName, newSchema):
+"""
+Parameters:
+ - schemaName
+ - newSchema
+"""
+pass
+
+  def get_ischema(self, schemaName):
+"""
+Parameters:
+ - schemaName
+"""
+pass
+
+  def drop_ischema(self, schemaName):
+"""
+Parameters:
+ - schemaName
+"""
+pass
+
+  def add_schema_version(self, schemaVersion):
+"""
+Parameters:
+ - schemaVersion
+"""
+pass
+
+  def get_schema_version(self, schemaName, version):
+"""
+Parameters:
+ - schemaName
+ - version
+"""
+pass
+
+  def get_schema_latest_version(self, schemaName):
+"""
+Parameters:
+ - schemaName
+"""
+pass
+
+  def get_schema_all_versions(self, schemaName):
+"""
+Parameters:
+ - schemaName
+"""
+pass
+
+  def drop_schema_version(self, schemaName, version):
+"""
+Parameters:
+ - schemaName
+ - version
+"""
+pass
+
+  def get_schemas_by_cols(self, rqst):
+"""
+Parameters:
+ - rqst
+"""
+pass
+
+  def map_schema_version_to_serde(self, schemaName, version, serdeName):
+"""
+Parameters:
+ - schemaName
+ - version
+ - serdeName
+"""
+pass
+
+  def set_schema_version_state(self, schemaName, version, state):
+"""
+Parameters:
+ - schemaName
+ - version
+ - state
+"""
+pass
+
+  def add_serde(self, serde):
+"""
+Parameters:
+ - serde
+"""
+pass
+
+  def get_serde(self, serdeName):
+"""
+Parameters:
+ - serdeName
+"""
+pass
+
 
 class Client(fb303.FacebookService.Client, Iface):
   """
@@ -7871,6 +7976,500 @@ class Client(fb303.FacebookService.Client, Iface):
   raise result.o4
 raise TApplicationException(TApplicationException.MISSING_RESULT, 
"create_or_drop_wm_trigger_to_pool_mapping failed: unknown result")
 
+  def create_ischema(self, schema):
+"""
+Parameters:
+ - schema
+"""
+self.send_create_ischema(schema)
+self.recv_create_ischema()
+
+  def send_create_ischema(self, schema):
+self._oprot.writeMessageBegin('create_ischema', TMessageType.CALL, 
self._seqid)
+args = create_ischema_args()
+args.schema = schema
+args.write(self._oprot)
+self._oprot.writeMessageEnd()
+self._oprot.trans.flush()
+
+  def recv_create_ischema(self):
+iprot = self._iprot
+(fname, mtype, rseqid) = iprot.readMessageBegin()
+if mtype == TMessageType.EXCEPTION:
+  x = TApplicationException()
+  x.read(iprot)
+  iprot.readMessageEnd()
+  raise x
+result = create_ischema_result()
+result.read(iprot)
+iprot.readMessageEnd()
+if result.o1 is not None:
+  raise result.o1
+if result.o2 is not None:
+  raise result.o2
+if result.o3 is not None:
+  raise result.o3
+return
+
+  def alter_ischema(self, schemaName, newSchema):
+"""
+Parameters:
+ - schemaName
+ - newSchema
+"""
+self.send_alter_ischema(schemaName, newSchema)
+self.recv_alter_ischema()
+
+  def send_alter_ischema(self, schemaName, newSchema):
+self._oprot.writeMessageBegin('alter_ischema', TMessageType.CALL, 
self._seqid)
+args = alter_ischema_args()
+args.schemaName = schemaName
+args.newSchema = newSchema
+args.write(self._oprot)
+self._oprot.writeMessageEnd()
+self._oprot.trans.flush()
+
+  def recv_alter_ischema(self):
+iprot = self._iprot
+(fname, mtype, rseqid) = iprot.readMessageBegin()
+if mtype == TMessageType.EXCEPTION:
+  x = TApplicationException()
+  x.read(iprot)
+  iprot.readMessageEnd()
+  raise x
+result = alter_ischema_result()
+result.read(iprot)
+iprot.readMessageEnd()
+if result.o1 is not None:
+  raise result.o1
+if result.o2 is not None:
+  raise result.o2
+return
+
+  def get_ischema(self, schemaName):
+"""
+Parameters:
+ - schemaName
+"""
+self.send_get_ischema(schemaName)
+return self.recv_get_ischema()
+
+  def send_get_ischema(self, schemaName):
+self._oprot.writeMessageBegin('get_ischema', TMessageType.CALL, 
self._seqid)
+args = get_ischema_args()
+

[35/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreSchemaMethods.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreSchemaMethods.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreSchemaMethods.java
new file mode 100644
index 000..0ceb84a
--- /dev/null
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreSchemaMethods.java
@@ -0,0 +1,887 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FindSchemasByColsResp;
+import org.apache.hadoop.hive.metastore.api.FindSchemasByColsRespEntry;
+import org.apache.hadoop.hive.metastore.api.FindSchemasByColsRqst;
+import org.apache.hadoop.hive.metastore.api.ISchema;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.SchemaCompatibility;
+import org.apache.hadoop.hive.metastore.api.SchemaType;
+import org.apache.hadoop.hive.metastore.api.SchemaValidation;
+import org.apache.hadoop.hive.metastore.api.SchemaVersion;
+import org.apache.hadoop.hive.metastore.api.SchemaVersionState;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.SerdeType;
+import org.apache.hadoop.hive.metastore.client.builder.ISchemaBuilder;
+import org.apache.hadoop.hive.metastore.client.builder.SchemaVersionBuilder;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.apache.hadoop.hive.metastore.events.AddSchemaVersionEvent;
+import org.apache.hadoop.hive.metastore.events.AlterISchemaEvent;
+import org.apache.hadoop.hive.metastore.events.AlterSchemaVersionEvent;
+import org.apache.hadoop.hive.metastore.events.CreateISchemaEvent;
+import org.apache.hadoop.hive.metastore.events.DropISchemaEvent;
+import org.apache.hadoop.hive.metastore.events.DropSchemaVersionEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import org.apache.hadoop.hive.metastore.messaging.EventMessage;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
+import org.apache.thrift.TException;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+// This does the testing using a remote metastore, as that finds more issues 
in thrift
+public class TestHiveMetaStoreSchemaMethods {
+  private static Map events;
+  private static Map transactionalEvents;
+  private static Map preEvents;
+
+  private static IMetaStoreClient client;
+
+
+  @BeforeClass
+  public static void startMetastore() throws Exception {
+Configuration conf = MetastoreConf.newMetastoreConf();
+int port = MetaStoreTestUtils.findFreePort();
+MetastoreConf.setVar(conf, ConfVars.THRIFT_URIS, "thrift://localhost:" + 
port);
+MetastoreConf.setClass(conf, ConfVars.EVENT_LISTENERS, 
SchemaEventListener.class,
+MetaStoreEventListener.class);
+MetastoreConf.setClass(conf, ConfVars.TRANSACTIONAL_EVENT_LISTENERS, 
TransactionalSchemaEventListener.class,
+MetaStoreEventListener.class);
+MetastoreConf.setClass(conf, ConfVars.PRE_EVENT_LISTENERS, 
SchemaPreEventListener.class,
+MetaStorePreEventListener.class);
+MetaStoreTestUtils.setConfForStandloneMode(conf);
+MetaStoreTestUtils.startMetaStore(port, 

[33/50] [abbrv] hive git commit: HIVE-17983 Make the standalone metastore generate tarballs etc.

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/b99b8c7a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java
new file mode 100644
index 000..882e3be
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java
@@ -0,0 +1,1309 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.tools;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.OptionGroup;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.io.output.NullOutputStream;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.tuple.Pair;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.HiveMetaException;
+import org.apache.hadoop.hive.metastore.IMetaStoreSchemaInfo;
+import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfoFactory;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import 
org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.MetaStoreConnectionInfo;
+import 
org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.NestedScriptParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.ImmutableMap;
+import sqlline.SqlLine;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.net.URI;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class MetastoreSchemaTool {
+  private static final Logger LOG = 
LoggerFactory.getLogger(MetastoreSchemaTool.class);
+  private static final String PASSWD_MASK = "[passwd stripped]";
+
+  @VisibleForTesting
+  public static String homeDir;
+
+  private String userName = null;
+  private String passWord = null;
+  private boolean dryRun = false;
+  private boolean verbose = false;
+  private String dbOpts = null;
+  private String url = null;
+  private String driver = null;
+  private URI[] validationServers = null; // The list of servers the 
database/partition/table can locate on
+  private String hiveUser; // Hive username, for use when creating the user, 
not for connecting
+  private String hivePasswd; // Hive password, for use when creating the user, 
not for connecting
+  private String hiveDb; // Hive database, for use when creating the user, not 
for connecting
+  private final Configuration conf;
+  private final String dbType;
+  private final IMetaStoreSchemaInfo metaStoreSchemaInfo;
+  private boolean needsQuotedIdentifier;
+
+  private static String findHomeDir() {
+// If METASTORE_HOME is set, use it, else use HIVE_HOME for backwards 
compatibility.
+homeDir = homeDir == null ? System.getenv("METASTORE_HOME") : homeDir;
+return homeDir == null ? System.getenv("HIVE_HOME") : homeDir;
+  }
+
+  private 

[31/50] [abbrv] hive git commit: HIVE-17983 Make the standalone metastore generate tarballs etc.

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/b99b8c7a/standalone-metastore/src/main/sql/mssql/hive-schema-2.3.0.mssql.sql
--
diff --git 
a/standalone-metastore/src/main/sql/mssql/hive-schema-2.3.0.mssql.sql 
b/standalone-metastore/src/main/sql/mssql/hive-schema-2.3.0.mssql.sql
new file mode 100644
index 000..c117a32
--- /dev/null
+++ b/standalone-metastore/src/main/sql/mssql/hive-schema-2.3.0.mssql.sql
@@ -0,0 +1,1023 @@
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+
+--
+-- DataNucleus SchemaTool (ran at 08/04/2014 15:10:15)
+--
+-- Complete schema required for the following classes:-
+-- org.apache.hadoop.hive.metastore.model.MColumnDescriptor
+-- org.apache.hadoop.hive.metastore.model.MDBPrivilege
+-- org.apache.hadoop.hive.metastore.model.MDatabase
+-- org.apache.hadoop.hive.metastore.model.MDelegationToken
+-- org.apache.hadoop.hive.metastore.model.MFieldSchema
+-- org.apache.hadoop.hive.metastore.model.MFunction
+-- org.apache.hadoop.hive.metastore.model.MGlobalPrivilege
+-- org.apache.hadoop.hive.metastore.model.MIndex
+-- org.apache.hadoop.hive.metastore.model.MMasterKey
+-- org.apache.hadoop.hive.metastore.model.MOrder
+-- org.apache.hadoop.hive.metastore.model.MPartition
+-- org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege
+-- org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics
+-- org.apache.hadoop.hive.metastore.model.MPartitionEvent
+-- org.apache.hadoop.hive.metastore.model.MPartitionPrivilege
+-- org.apache.hadoop.hive.metastore.model.MResourceUri
+-- org.apache.hadoop.hive.metastore.model.MRole
+-- org.apache.hadoop.hive.metastore.model.MRoleMap
+-- org.apache.hadoop.hive.metastore.model.MSerDeInfo
+-- org.apache.hadoop.hive.metastore.model.MStorageDescriptor
+-- org.apache.hadoop.hive.metastore.model.MStringList
+-- org.apache.hadoop.hive.metastore.model.MTable
+-- org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege
+-- org.apache.hadoop.hive.metastore.model.MTableColumnStatistics
+-- org.apache.hadoop.hive.metastore.model.MTablePrivilege
+-- org.apache.hadoop.hive.metastore.model.MType
+-- org.apache.hadoop.hive.metastore.model.MVersionTable
+--
+-- Table MASTER_KEYS for classes 
[org.apache.hadoop.hive.metastore.model.MMasterKey]
+CREATE TABLE MASTER_KEYS
+(
+KEY_ID int NOT NULL,
+MASTER_KEY nvarchar(767) NULL
+);
+
+ALTER TABLE MASTER_KEYS ADD CONSTRAINT MASTER_KEYS_PK PRIMARY KEY (KEY_ID);
+
+-- Table IDXS for classes [org.apache.hadoop.hive.metastore.model.MIndex]
+CREATE TABLE IDXS
+(
+INDEX_ID bigint NOT NULL,
+CREATE_TIME int NOT NULL,
+DEFERRED_REBUILD bit NOT NULL,
+INDEX_HANDLER_CLASS nvarchar(4000) NULL,
+INDEX_NAME nvarchar(128) NULL,
+INDEX_TBL_ID bigint NULL,
+LAST_ACCESS_TIME int NOT NULL,
+ORIG_TBL_ID bigint NULL,
+SD_ID bigint NULL
+);
+
+ALTER TABLE IDXS ADD CONSTRAINT IDXS_PK PRIMARY KEY (INDEX_ID);
+
+-- Table PART_COL_STATS for classes 
[org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics]
+CREATE TABLE PART_COL_STATS
+(
+CS_ID bigint NOT NULL,
+AVG_COL_LEN float NULL,
+"COLUMN_NAME" nvarchar(767) NOT NULL,
+COLUMN_TYPE nvarchar(128) NOT NULL,
+DB_NAME nvarchar(128) NOT NULL,
+BIG_DECIMAL_HIGH_VALUE nvarchar(255) NULL,
+BIG_DECIMAL_LOW_VALUE nvarchar(255) NULL,
+DOUBLE_HIGH_VALUE float NULL,
+DOUBLE_LOW_VALUE float NULL,
+LAST_ANALYZED bigint NOT NULL,
+LONG_HIGH_VALUE bigint NULL,
+LONG_LOW_VALUE bigint NULL,
+MAX_COL_LEN bigint NULL,
+NUM_DISTINCTS bigint NULL,
+NUM_FALSES bigint NULL,
+NUM_NULLS bigint NOT NULL,
+NUM_TRUES bigint NULL,
+PART_ID bigint NULL,
+PARTITION_NAME nvarchar(767) NOT NULL,
+"TABLE_NAME" nvarchar(256) NOT NULL
+);
+
+ALTER TABLE PART_COL_STATS ADD CONSTRAINT PART_COL_STATS_PK PRIMARY KEY 
(CS_ID);
+
+CREATE INDEX PCS_STATS_IDX ON PART_COL_STATS 
(DB_NAME,TABLE_NAME,COLUMN_NAME,PARTITION_NAME);
+
+-- Table 

[46/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
--
diff --git 
a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp 
b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
index 913e3cc..f026ff9 100644
--- a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
+++ b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
@@ -149,6 +149,72 @@ const char* _kEventRequestTypeNames[] = {
 };
 const std::map 
_EventRequestType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(3, 
_kEventRequestTypeValues, _kEventRequestTypeNames), 
::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
+int _kSerdeTypeValues[] = {
+  SerdeType::HIVE,
+  SerdeType::SCHEMA_REGISTRY
+};
+const char* _kSerdeTypeNames[] = {
+  "HIVE",
+  "SCHEMA_REGISTRY"
+};
+const std::map 
_SerdeType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(2, 
_kSerdeTypeValues, _kSerdeTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, 
NULL));
+
+int _kSchemaTypeValues[] = {
+  SchemaType::HIVE,
+  SchemaType::AVRO
+};
+const char* _kSchemaTypeNames[] = {
+  "HIVE",
+  "AVRO"
+};
+const std::map 
_SchemaType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(2, 
_kSchemaTypeValues, _kSchemaTypeNames), ::apache::thrift::TEnumIterator(-1, 
NULL, NULL));
+
+int _kSchemaCompatibilityValues[] = {
+  SchemaCompatibility::NONE,
+  SchemaCompatibility::BACKWARD,
+  SchemaCompatibility::FORWARD,
+  SchemaCompatibility::BOTH
+};
+const char* _kSchemaCompatibilityNames[] = {
+  "NONE",
+  "BACKWARD",
+  "FORWARD",
+  "BOTH"
+};
+const std::map 
_SchemaCompatibility_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(4, 
_kSchemaCompatibilityValues, _kSchemaCompatibilityNames), 
::apache::thrift::TEnumIterator(-1, NULL, NULL));
+
+int _kSchemaValidationValues[] = {
+  SchemaValidation::LATEST,
+  SchemaValidation::ALL
+};
+const char* _kSchemaValidationNames[] = {
+  "LATEST",
+  "ALL"
+};
+const std::map 
_SchemaValidation_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(2, 
_kSchemaValidationValues, _kSchemaValidationNames), 
::apache::thrift::TEnumIterator(-1, NULL, NULL));
+
+int _kSchemaVersionStateValues[] = {
+  SchemaVersionState::INITIATED,
+  SchemaVersionState::START_REVIEW,
+  SchemaVersionState::CHANGES_REQUIRED,
+  SchemaVersionState::REVIEWED,
+  SchemaVersionState::ENABLED,
+  SchemaVersionState::DISABLED,
+  SchemaVersionState::ARCHIVED,
+  SchemaVersionState::DELETED
+};
+const char* _kSchemaVersionStateNames[] = {
+  "INITIATED",
+  "START_REVIEW",
+  "CHANGES_REQUIRED",
+  "REVIEWED",
+  "ENABLED",
+  "DISABLED",
+  "ARCHIVED",
+  "DELETED"
+};
+const std::map 
_SchemaVersionState_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(8, 
_kSchemaVersionStateValues, _kSchemaVersionStateNames), 
::apache::thrift::TEnumIterator(-1, NULL, NULL));
+
 int _kFunctionTypeValues[] = {
   FunctionType::JAVA
 };
@@ -3999,6 +4065,26 @@ void SerDeInfo::__set_parameters(const 
std::map & val)
   this->parameters = val;
 }
 
+void SerDeInfo::__set_description(const std::string& val) {
+  this->description = val;
+__isset.description = true;
+}
+
+void SerDeInfo::__set_serializerClass(const std::string& val) {
+  this->serializerClass = val;
+__isset.serializerClass = true;
+}
+
+void SerDeInfo::__set_deserializerClass(const std::string& val) {
+  this->deserializerClass = val;
+__isset.deserializerClass = true;
+}
+
+void SerDeInfo::__set_serdeType(const SerdeType::type val) {
+  this->serdeType = val;
+__isset.serdeType = true;
+}
+
 uint32_t SerDeInfo::read(::apache::thrift::protocol::TProtocol* iprot) {
 
   apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
@@ -4059,6 +4145,40 @@ uint32_t 
SerDeInfo::read(::apache::thrift::protocol::TProtocol* iprot) {
   xfer += iprot->skip(ftype);
 }
 break;
+  case 4:
+if (ftype == ::apache::thrift::protocol::T_STRING) {
+  xfer += iprot->readString(this->description);
+  this->__isset.description = true;
+} else {
+  xfer += iprot->skip(ftype);
+}
+break;
+  case 5:
+if (ftype == ::apache::thrift::protocol::T_STRING) {
+  xfer += iprot->readString(this->serializerClass);
+  this->__isset.serializerClass = true;
+} else {
+  xfer += iprot->skip(ftype);
+}
+break;
+  case 6:
+if (ftype == ::apache::thrift::protocol::T_STRING) {
+  xfer += iprot->readString(this->deserializerClass);
+  this->__isset.deserializerClass = true;
+} else {
+  xfer += iprot->skip(ftype);
+}
+break;
+  case 7:
+if (ftype == ::apache::thrift::protocol::T_I32) {
+  int32_t ecast144;
+ 

[34/50] [abbrv] hive git commit: HIVE-17983 Make the standalone metastore generate tarballs etc.

2017-12-07 Thread gates
HIVE-17983 Make the standalone metastore generate tarballs etc.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b99b8c7a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b99b8c7a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b99b8c7a

Branch: refs/heads/standalone-metastore
Commit: b99b8c7a8faf168f0e8215cb77b129be7870d0f9
Parents: 83cfbaf
Author: Alan Gates 
Authored: Thu Oct 19 16:49:38 2017 -0700
Committer: Alan Gates 
Committed: Wed Dec 6 17:42:51 2017 -0800

--
 .../org/apache/hive/beeline/HiveSchemaTool.java |6 +-
 bin/ext/metastore.sh|   41 -
 binary-package-licenses/README  |1 +
 .../org/apache/hive/beeline/TestSchemaTool.java |   16 +-
 standalone-metastore/DEV-README |   40 +
 .../binary-package-licenses/NOTICE  |4 +
 .../com.google.protobuf-LICENSE |   42 +
 .../javax.transaction.transaction-api-LICENSE   |  128 ++
 .../binary-package-licenses/javolution-LICENSE  |   25 +
 .../binary-package-licenses/jline-LICENSE   |   32 +
 .../binary-package-licenses/org.antlr-LICENSE   |   27 +
 .../binary-package-licenses/sqlline-LICENSE |   33 +
 standalone-metastore/pom.xml|   97 +-
 standalone-metastore/src/assembly/bin.xml   |  126 ++
 standalone-metastore/src/assembly/src.xml   |   53 +
 .../hive/metastore/IMetaStoreSchemaInfo.java|7 +
 .../hive/metastore/MetaStoreSchemaInfo.java |   16 +-
 .../hive/metastore/tools/HiveSchemaHelper.java  |   80 +-
 .../metastore/tools/MetastoreSchemaTool.java| 1309 ++
 .../src/main/resources/metastore-log4j2.xml |   30 +
 standalone-metastore/src/main/scripts/base  |  238 +++
 .../src/main/scripts/ext/metastore.sh   |   41 +
 .../src/main/scripts/ext/schemaTool.sh  |   33 +
 .../src/main/scripts/metastore-config.sh|   70 +
 .../src/main/scripts/schematool |   21 +
 .../src/main/scripts/start-metastore|   21 +
 .../main/sql/derby/hive-schema-2.3.0.derby.sql  |  456 +
 .../main/sql/derby/hive-schema-3.0.0.derby.sql  |  508 ++
 .../sql/derby/upgrade-2.3.0-to-3.0.0.derby.sql  |   46 +
 .../src/main/sql/derby/upgrade.order.derby  |1 +
 .../src/main/sql/mssql/create-user.mssql.sql|5 +
 .../main/sql/mssql/hive-schema-2.3.0.mssql.sql  | 1023 +++
 .../main/sql/mssql/hive-schema-3.0.0.mssql.sql  | 1112 
 .../sql/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql  |  106 ++
 .../src/main/sql/mssql/upgrade.order.mssql  |1 +
 .../src/main/sql/mysql/create-user.mysql.sql|8 +
 .../main/sql/mysql/hive-schema-2.3.0.mysql.sql  |  970 ++
 .../main/sql/mysql/hive-schema-3.0.0.mysql.sql  | 1045 +++
 .../sql/mysql/upgrade-2.3.0-to-3.0.0.mysql.sql  |   90 +
 .../src/main/sql/mysql/upgrade.order.mysql  |1 +
 .../src/main/sql/oracle/create-user.oracle.sql  |3 +
 .../sql/oracle/hive-schema-2.3.0.oracle.sql |  926 ++
 .../sql/oracle/hive-schema-3.0.0.oracle.sql | 1014 +++
 .../oracle/upgrade-2.3.0-to-3.0.0.oracle.sql|  107 ++
 .../src/main/sql/oracle/upgrade.order.oracle|1 +
 .../main/sql/postgres/create-user.postgres.sql  |2 +
 .../sql/postgres/hive-schema-2.3.0.postgres.sql | 1593 
 .../sql/postgres/hive-schema-3.0.0.postgres.sql | 1699 ++
 .../upgrade-2.3.0-to-3.0.0.postgres.sql |  121 ++
 .../main/sql/postgres/upgrade.order.postgres|1 +
 .../hive/metastore/dbinstall/DbInstallBase.java |  280 +++
 .../hive/metastore/dbinstall/ITestMysql.java|   82 +
 .../hive/metastore/dbinstall/ITestOracle.java   |   83 +
 .../hive/metastore/dbinstall/ITestPostgres.java |   82 +
 .../metastore/dbinstall/ITestSqlServer.java |   84 +
 .../tools/TestMetastoreSchemaTool.java  |   67 +
 .../tools/TestSchemaToolForMetastore.java   |  467 +
 57 files changed, 14431 insertions(+), 90 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b99b8c7a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java 
b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
index 74591ac..ca05b2a 100644
--- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
+++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
@@ -159,12 +159,12 @@ public class HiveSchemaTool {
 
   private NestedScriptParser getDbCommandParser(String dbType, String 
metaDbType) {
 return HiveSchemaHelper.getDbCommandParser(dbType, dbOpts, userName,
-   passWord, hiveConf, metaDbType);
+   

[47/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage for Schema Registry objects

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/bd212257/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
--
diff --git 
a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
 
b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
index bf4bd7a..5cddcba 100644
--- 
a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
+++ 
b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
@@ -917,6 +917,76 @@ class ThriftHiveMetastoreHandler : virtual public 
ThriftHiveMetastoreIf {
 printf("create_or_drop_wm_trigger_to_pool_mapping\n");
   }
 
+  void create_ischema(const ISchema& schema) {
+// Your implementation goes here
+printf("create_ischema\n");
+  }
+
+  void alter_ischema(const std::string& schemaName, const ISchema& newSchema) {
+// Your implementation goes here
+printf("alter_ischema\n");
+  }
+
+  void get_ischema(ISchema& _return, const std::string& schemaName) {
+// Your implementation goes here
+printf("get_ischema\n");
+  }
+
+  void drop_ischema(const std::string& schemaName) {
+// Your implementation goes here
+printf("drop_ischema\n");
+  }
+
+  void add_schema_version(const SchemaVersion& schemaVersion) {
+// Your implementation goes here
+printf("add_schema_version\n");
+  }
+
+  void get_schema_version(SchemaVersion& _return, const std::string& 
schemaName, const int32_t version) {
+// Your implementation goes here
+printf("get_schema_version\n");
+  }
+
+  void get_schema_latest_version(SchemaVersion& _return, const std::string& 
schemaName) {
+// Your implementation goes here
+printf("get_schema_latest_version\n");
+  }
+
+  void get_schema_all_versions(std::vector & _return, const 
std::string& schemaName) {
+// Your implementation goes here
+printf("get_schema_all_versions\n");
+  }
+
+  void drop_schema_version(const std::string& schemaName, const int32_t 
version) {
+// Your implementation goes here
+printf("drop_schema_version\n");
+  }
+
+  void get_schemas_by_cols(FindSchemasByColsResp& _return, const 
FindSchemasByColsRqst& rqst) {
+// Your implementation goes here
+printf("get_schemas_by_cols\n");
+  }
+
+  void map_schema_version_to_serde(const std::string& schemaName, const 
int32_t version, const std::string& serdeName) {
+// Your implementation goes here
+printf("map_schema_version_to_serde\n");
+  }
+
+  void set_schema_version_state(const std::string& schemaName, const int32_t 
version, const SchemaVersionState::type state) {
+// Your implementation goes here
+printf("set_schema_version_state\n");
+  }
+
+  void add_serde(const SerDeInfo& serde) {
+// Your implementation goes here
+printf("add_serde\n");
+  }
+
+  void get_serde(SerDeInfo& _return, const std::string& serdeName) {
+// Your implementation goes here
+printf("get_serde\n");
+  }
+
 };
 
 int main(int argc, char **argv) {



[19/50] [abbrv] hive git commit: HIVE-17982 Move metastore specific itests

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/83cfbaf0/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
new file mode 100644
index 000..180a666
--- /dev/null
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import static org.junit.Assert.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+// Validate the metastore client call validatePartitionNameCharacters to 
ensure it throws
+// an exception if partition fields contain Unicode characters or commas
+
+public class TestPartitionNameWhitelistValidation {
+
+  private static final String partitionValidationPattern = 
"[\\x20-\\x7E&&[^,]]*";
+  private static Configuration conf;
+  private static HiveMetaStoreClient msc;
+
+  @BeforeClass
+  public static void setupBeforeClass() throws Exception {
+System.setProperty(ConfVars.PARTITION_NAME_WHITELIST_PATTERN.toString(), 
partitionValidationPattern);
+conf = MetastoreConf.newMetastoreConf();
+MetaStoreTestUtils.setConfForStandloneMode(conf);
+msc = new HiveMetaStoreClient(conf);
+  }
+
+  // Runs an instance of DisallowUnicodePreEventListener
+  // Returns whether or not it succeeded
+  private boolean runValidation(List partVals) {
+try {
+  msc.validatePartitionNameCharacters(partVals);
+} catch (Exception e) {
+  return false;
+}
+
+return true;
+ }
+
+  // Sample data
+  private List getPartValsWithUnicode() {
+List partVals = new ArrayList<>();
+partVals.add("klâwen");
+partVals.add("tägelîch");
+
+return partVals;
+  }
+
+  private List getPartValsWithCommas() {
+List partVals = new ArrayList<>();
+partVals.add("a,b");
+partVals.add("c,d,e,f");
+
+return partVals;
+  }
+
+  private List getPartValsWithValidCharacters() {
+List partVals = new ArrayList<>();
+partVals.add("part1");
+partVals.add("part2");
+
+return partVals;
+  }
+
+  @Test
+  public void testAddPartitionWithCommas() {
+assertFalse("Add a partition with commas in name",
+runValidation(getPartValsWithCommas()));
+  }
+
+  @Test
+  public void testAddPartitionWithUnicode() {
+assertFalse("Add a partition with unicode characters in name",
+runValidation(getPartValsWithUnicode()));
+  }
+
+  @Test
+  public void testAddPartitionWithValidPartVal() {
+assertTrue("Add a partition with unicode characters in name",
+runValidation(getPartValsWithValidCharacters()));
+  }
+
+  @Test
+  public void testAppendPartitionWithUnicode() {
+assertFalse("Append a partition with unicode characters in name",
+runValidation(getPartValsWithUnicode()));
+  }
+
+  @Test
+  public void testAppendPartitionWithCommas() {
+assertFalse("Append a partition with unicode characters in name",
+runValidation(getPartValsWithCommas()));
+  }
+
+  @Test
+  public void testAppendPartitionWithValidCharacters() {
+assertTrue("Append a partition with no unicode characters in name",
+runValidation(getPartValsWithValidCharacters()));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/83cfbaf0/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java

[18/50] [abbrv] hive git commit: HIVE-17981 Create a set of builders for Thrift classes

2017-12-07 Thread gates
HIVE-17981 Create a set of builders for Thrift classes


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/dad67c49
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/dad67c49
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/dad67c49

Branch: refs/heads/standalone-metastore
Commit: dad67c4997ffdded88a4487956528109824d
Parents: d79c459
Author: Alan Gates 
Authored: Tue Sep 12 13:18:09 2017 -0700
Committer: Alan Gates 
Committed: Wed Dec 6 15:05:28 2017 -0800

--
 .../client/builder/ConstraintBuilder.java   |  98 +
 .../client/builder/DatabaseBuilder.java |  89 
 .../GrantRevokePrivilegeRequestBuilder.java |  63 ++
 .../builder/HiveObjectPrivilegeBuilder.java |  63 ++
 .../client/builder/HiveObjectRefBuilder.java|  63 ++
 .../metastore/client/builder/IndexBuilder.java  | 104 +
 .../client/builder/PartitionBuilder.java| 102 +
 .../builder/PrivilegeGrantInfoBuilder.java  |  84 
 .../metastore/client/builder/RoleBuilder.java   |  55 +
 .../client/builder/SQLForeignKeyBuilder.java|  83 
 .../builder/SQLNotNullConstraintBuilder.java|  37 
 .../client/builder/SQLPrimaryKeyBuilder.java|  42 
 .../builder/SQLUniqueConstraintBuilder.java |  37 
 .../builder/StorageDescriptorBuilder.java   | 210 +++
 .../metastore/client/builder/TableBuilder.java  | 156 ++
 15 files changed, 1286 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/dad67c49/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/ConstraintBuilder.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/ConstraintBuilder.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/ConstraintBuilder.java
new file mode 100644
index 000..50e779a
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/ConstraintBuilder.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.client.builder;
+
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Table;
+
+/**
+ * Base builder for all types of constraints.  Database name, table name, and 
column name
+ * must be provided.
+ * @param  Type of builder extending this.
+ */
+abstract class ConstraintBuilder {
+  protected String dbName, tableName, columnName, constraintName;
+  protected int keySeq;
+  protected boolean enable, validate, rely;
+  private T child;
+
+  protected ConstraintBuilder() {
+keySeq = 1;
+enable = true;
+validate = rely = false;
+  }
+
+  protected void setChild(T child) {
+this.child = child;
+  }
+
+  protected void checkBuildable(String defaultConstraintName) throws 
MetaException {
+if (dbName == null || tableName == null || columnName == null) {
+  throw new MetaException("You must provide database name, table name, and 
column name");
+}
+if (constraintName == null) {
+  constraintName = dbName + "_" + tableName + "_" + columnName + "_" + 
defaultConstraintName;
+}
+  }
+
+  public T setDbName(String dbName) {
+this.dbName = dbName;
+return child;
+  }
+
+  public T setTableName(String tableName) {
+this.tableName = tableName;
+return child;
+  }
+
+  public T setDbAndTableName(Table table) {
+this.dbName = table.getDbName();
+this.tableName = table.getTableName();
+return child;
+  }
+
+  public T setColumnName(String columnName) {
+this.columnName = columnName;
+return child;
+  }
+
+  public T setConstraintName(String constraintName) {
+this.constraintName = constraintName;
+return child;
+  }
+
+  public T setKeySeq(int keySeq) {
+this.keySeq = keySeq;
+return child;
+  }

[02/50] [abbrv] hive git commit: HIVE-18185: update insert_values_orig_table_use_metadata.q.out (Bertalan Kondrat via Zoltan Haindrich)

2017-12-07 Thread gates
HIVE-18185: update insert_values_orig_table_use_metadata.q.out (Bertalan 
Kondrat via Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/25dc2c4a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/25dc2c4a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/25dc2c4a

Branch: refs/heads/standalone-metastore
Commit: 25dc2c4ac85c57f2717353a9a536f6db01b1c73c
Parents: a3060b3
Author: Bertalan Kondrat 
Authored: Wed Dec 6 09:29:33 2017 +0100
Committer: Zoltan Haindrich 
Committed: Wed Dec 6 09:47:55 2017 +0100

--
 .../insert_values_orig_table_use_metadata.q.out | 23 
 1 file changed, 14 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/25dc2c4a/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
 
b/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
index 92c4eaf..143742b 100644
--- 
a/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
+++ 
b/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
@@ -107,6 +107,7 @@ Table Parameters:
rawDataSize 0   
totalSize   0   
transactional   true
+   transactional_propertiesdefault 
  A masked pattern was here 
 
 # Storage Information   
@@ -172,6 +173,7 @@ Table Parameters:
rawDataSize 0   
totalSize   295399  
transactional   true
+   transactional_propertiesdefault 
  A masked pattern was here 
 
 # Storage Information   
@@ -373,8 +375,9 @@ Table Parameters:
numFiles1   
numRows 0   
rawDataSize 0   
-   totalSize   1555
+   totalSize   1554
transactional   true
+   transactional_propertiesdefault 
  A masked pattern was here 
 
 # Storage Information   
@@ -407,9 +410,9 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: acid_ivot
-  Statistics: Num rows: 1 Data size: 1555 Basic stats: 
COMPLETE Column stats: COMPLETE
+  Statistics: Num rows: 1 Data size: 1554 Basic stats: 
COMPLETE Column stats: COMPLETE
   Select Operator
-Statistics: Num rows: 1 Data size: 1555 Basic stats: 
COMPLETE Column stats: COMPLETE
+Statistics: Num rows: 1 Data size: 1554 Basic stats: 
COMPLETE Column stats: COMPLETE
 Group By Operator
   aggregations: count()
   mode: hash
@@ -507,6 +510,7 @@ Table Parameters:
rawDataSize 0   
totalSize   3109
transactional   true
+   transactional_propertiesdefault 
  A masked pattern was here 
 
 # Storage Information   
@@ -637,6 +641,7 @@ Table Parameters:
rawDataSize 0   
totalSize   298508  
transactional   true
+   transactional_propertiesdefault 
  A masked pattern was here 
 
 # Storage Information   
@@ -902,17 +907,17 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: sp
-  Statistics: Num rows: 1 Data size: 5820 Basic stats: 
COMPLETE Column stats: COMPLETE
+  Statistics: Num rows: 1 Data size: 5820 Basic stats: PARTIAL 
Column stats: COMPLETE
   Select Operator
-Statistics: Num rows: 1 Data size: 5820 Basic stats: 
COMPLETE Column stats: COMPLETE
+Statistics: Num rows: 1 Data size: 5820 Basic stats: 
PARTIAL Column stats: COMPLETE
 Group By Operator
   aggregations: count()
   mode: hash
   outputColumnNames: _col0
-  Statistics: Num rows: 1 Data size: 8 Basic stats: 
COMPLETE Column 

[10/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
new file mode 100644
index 000..fc254c6
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -0,0 +1,2749 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
+
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.net.InetAddress;
+import java.net.URI;
+import java.net.UnknownHostException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.NoSuchElementException;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.security.PrivilegedExceptionAction;
+
+import javax.security.auth.login.LoginException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.StatsSetupConst;
+import org.apache.hadoop.hive.common.ValidTxnList;
+import org.apache.hadoop.hive.metastore.api.*;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.metastore.txn.TxnUtils;
+import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.utils.ObjectPair;
+import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.thrift.TApplicationException;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TCompactProtocol;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.TFramedTransport;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.Lists;
+
+/**
+ * Hive Metastore Client.
+ * The public implementation of IMetaStoreClient. Methods not inherited from 
IMetaStoreClient
+ * are not public and can change. Hence this is marked as unstable.
+ * For users who require retry mechanism when the connection between metastore 
and client is
+ * broken, RetryingMetaStoreClient class should be used.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
+  /**
+   * Capabilities of the current client. If this client talks to a MetaStore 
server in a manner
+   * implying the usage of some expanded features that require client-side 
support that this client
+   * doesn't have (e.g. a getting a table of a new type), it will get back 
failures when the
+   * capability checking is enabled (the default).
+   */
+  public final static 

[12/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/metastore/src/test/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
--
diff --git 
a/metastore/src/test/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java 
b/metastore/src/test/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
deleted file mode 100644
index 7df7ac5..000
--- 
a/metastore/src/test/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.net.InetSocketAddress;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.hadoop.hive.metastore.events.EventCleanerTask;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class MetaStoreTestUtils {
-
-  private static final Logger LOG = LoggerFactory.getLogger("hive.log");
-  public static final int RETRY_COUNT = 10;
-
-  public static int startMetaStore() throws Exception {
-return 
MetaStoreTestUtils.startMetaStore(HadoopThriftAuthBridge.getBridge(), null);
-  }
-
-  public static int startMetaStore(final HadoopThriftAuthBridge bridge, 
HiveConf conf) throws Exception {
-int port = MetaStoreTestUtils.findFreePort();
-MetaStoreTestUtils.startMetaStore(port, bridge, conf);
-return port;
-  }
-
-  public static int startMetaStore(HiveConf conf) throws Exception {
-return startMetaStore(HadoopThriftAuthBridge.getBridge(), conf);
-  }
-
-  public static void startMetaStore(final int port, final 
HadoopThriftAuthBridge bridge) throws Exception {
-MetaStoreTestUtils.startMetaStore(port, bridge, null);
-  }
-
-  public static void startMetaStore(final int port,
-  final HadoopThriftAuthBridge bridge, HiveConf hiveConf)
-  throws Exception{
-if (hiveConf == null) {
-  hiveConf = new HiveConf(HMSHandler.class);
-}
-final HiveConf finalHiveConf = hiveConf;
-Thread thread = new Thread(new Runnable() {
-  @Override
-  public void run() {
-try {
-  HiveMetaStore.startMetaStore(port, bridge, finalHiveConf);
-} catch (Throwable e) {
-  LOG.error("Metastore Thrift Server threw an exception...", e);
-}
-  }
-});
-thread.setDaemon(true);
-thread.start();
-MetaStoreTestUtils.loopUntilHMSReady(port);
-  }
-
-  public static int startMetaStoreWithRetry(final HadoopThriftAuthBridge 
bridge) throws Exception {
-return MetaStoreTestUtils.startMetaStoreWithRetry(bridge, null);
-  }
-
-  public static int startMetaStoreWithRetry(HiveConf conf) throws Exception {
-return 
MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), 
conf);
-  }
-
-  public static int startMetaStoreWithRetry() throws Exception {
-return 
MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), 
null);
-  }
-
-  public static int startMetaStoreWithRetry(final HadoopThriftAuthBridge 
bridge, HiveConf conf)
-  throws Exception {
-Exception metaStoreException = null;
-int metaStorePort = 0;
-
-for (int tryCount = 0; tryCount < MetaStoreTestUtils.RETRY_COUNT; 
tryCount++) {
-  try {
-metaStorePort = MetaStoreTestUtils.findFreePort();
-MetaStoreTestUtils.startMetaStore(metaStorePort, bridge, conf);
-return metaStorePort;
-  } catch (ConnectException ce) {
-metaStoreException = ce;
-  }
-}
-
-throw metaStoreException;
-  }
-
-  /**
-   * A simple connect test to make sure that the metastore is up
-   * @throws Exception
-   */
-  public static void loopUntilHMSReady(int port) throws Exception {
-int retries = 0;
-Exception exc = null;
-while (true) {
-  try {
-Socket socket = new 

[01/50] [abbrv] hive git commit: HIVE-18138: Fix columnstats problem in case schema evolution (Zoltan Haindrich, reviewed by Ashutosh Chauhan) [Forced Update!]

2017-12-07 Thread gates
Repository: hive
Updated Branches:
  refs/heads/standalone-metastore 077d498d7 -> bd212257f (forced update)


HIVE-18138: Fix columnstats problem in case schema evolution (Zoltan Haindrich, 
reviewed by Ashutosh Chauhan)

Signed-off-by: Zoltan Haindrich 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c2fc0fb8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c2fc0fb8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c2fc0fb8

Branch: refs/heads/standalone-metastore
Commit: c2fc0fb88b794f07d9d4b1cd5f4ef3e1b8737911
Parents: a1f54df
Author: Zoltan Haindrich 
Authored: Wed Dec 6 09:35:48 2017 +0100
Committer: Zoltan Haindrich 
Committed: Wed Dec 6 09:47:55 2017 +0100

--
 .../hadoop/hive/metastore/HiveAlterHandler.java   | 14 +++---
 .../org/apache/hadoop/hive/metastore/ObjectStore.java |  8 +---
 2 files changed, 16 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c2fc0fb8/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
index b445723..13967d5 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
@@ -300,21 +300,29 @@ public class HiveAlterHandler implements AlterHandler {
   MetaStoreUtils.updateTableStatsFast(db, newt, wh, false, true, 
environmentContext);
 }
 
-if (cascade && isPartitionedTable) {
+if (isPartitionedTable) {
   //Currently only column related changes can be cascaded in alter 
table
   if(!MetaStoreUtils.areSameColumns(oldt.getSd().getCols(), 
newt.getSd().getCols())) {
 parts = msdb.getPartitions(dbname, name, -1);
 for (Partition part : parts) {
+  Partition oldPart = new Partition(part);
   List oldCols = part.getSd().getCols();
   part.getSd().setCols(newt.getSd().getCols());
   ColumnStatistics colStats = 
updateOrGetPartitionColumnStats(msdb, dbname, name,
   part.getValues(), oldCols, oldt, part, null);
   assert(colStats == null);
-  msdb.alterPartition(dbname, name, part.getValues(), part);
+  if (cascade) {
+msdb.alterPartition(dbname, name, part.getValues(), part);
+  } else {
+// update changed properties (stats)
+oldPart.setParameters(part.getParameters());
+msdb.alterPartition(dbname, name, part.getValues(), oldPart);
+  }
 }
 msdb.alterTable(dbname, name, newt);
   } else {
-LOG.warn("Alter table does not cascade changes to its 
partitions.");
+LOG.warn("Alter table not cascaded to partitions.");
+alterTableUpdateTableColumnStats(msdb, oldt, newt);
   }
 } else {
   alterTableUpdateTableColumnStats(msdb, oldt, newt);

http://git-wip-us.apache.org/repos/asf/hive/blob/c2fc0fb8/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 0818704..2e80c9d 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -3500,7 +3500,7 @@ public class ObjectStore implements RawStore, 
Configurable {
   LOG.debug("filter specified is {}, JDOQL filter is {}", filter, 
queryFilterString);
   if (LOG.isDebugEnabled()) {
 for (Entry entry : params.entrySet()) {
-  LOG.debug("key: {} value: {} class: {}", entry.getKey(), 
entry.getValue(), 
+  LOG.debug("key: {} value: {} class: {}", entry.getKey(), 
entry.getValue(),
  entry.getValue().getClass().getName());
 }
   }
@@ -7665,7 +7665,7 @@ public class ObjectStore implements RawStore, 
Configurable {
   private List getMTableColumnStatistics(Table table, 
List colNames, QueryWrapper queryWrapper)
   throws MetaException {
 if (colNames == null || colNames.isEmpty()) {
-  return null;
+  return Collections.emptyList();
 }
 
 boolean 

[04/50] [abbrv] hive git commit: HIVE-18189: Order by position does not work when cbo is disabled (Daniel Dai reviewed by Ashutosh Chauhan)

2017-12-07 Thread gates
HIVE-18189: Order by position does not work when cbo is disabled (Daniel Dai 
reviewed by Ashutosh Chauhan)

Signed-off-by: Zoltan Haindrich 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1968a9d4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1968a9d4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1968a9d4

Branch: refs/heads/standalone-metastore
Commit: 1968a9d458b7ba3fb61c47084ed19253cd2e18e8
Parents: 25dc2c4
Author: Daniel Dai 
Authored: Wed Dec 6 09:31:26 2017 +0100
Committer: Zoltan Haindrich 
Committed: Wed Dec 6 09:47:55 2017 +0100

--
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  46 +-
 .../queries/clientpositive/groupby_position.q   |  11 +
 .../clientpositive/groupby_position.q.out   | 941 ++
 .../clientpositive/spark/groupby_position.q.out | 943 +++
 4 files changed, 1939 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1968a9d4/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index f6bbac6..07742e0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -12841,7 +12841,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
   for (int child_pos = 0; child_pos < child_count; ++child_pos) {
 ASTNode node = (ASTNode) next.getChild(child_pos);
 int type = node.getToken().getType();
-if (type == HiveParser.TOK_SELECT) {
+if (type == HiveParser.TOK_SELECT || type == HiveParser.TOK_SELECTDI) {
   selectNode = node;
 } else if (type == HiveParser.TOK_GROUPBY) {
   groupbyNode = node;
@@ -12877,7 +12877,49 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
   }
 }
 
-// orderby position will be processed in genPlan
+// replace each of the position alias in ORDERBY with the actual 
column name,
+// if cbo is enabled, orderby position will be processed in genPlan
+if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED)
+&& orderbyNode != null) {
+  isAllCol = false;
+  for (int child_pos = 0; child_pos < selectNode.getChildCount(); 
++child_pos) {
+ASTNode node = (ASTNode) 
selectNode.getChild(child_pos).getChild(0);
+if (node != null && node.getToken().getType() == 
HiveParser.TOK_ALLCOLREF) {
+  isAllCol = true;
+}
+  }
+  for (int child_pos = 0; child_pos < orderbyNode.getChildCount(); 
++child_pos) {
+ASTNode colNode = null;
+ASTNode node = null;
+if (orderbyNode.getChildCount() > 0) {
+  colNode = (ASTNode) orderbyNode.getChild(child_pos).getChild(0);
+  if (colNode.getChildCount() > 0) {
+node = (ASTNode) colNode.getChild(0);
+  }
+}
+if (node != null && node.getToken().getType() == 
HiveParser.Number) {
+  if (isObyByPos) {
+if (!isAllCol) {
+  int pos = Integer.parseInt(node.getText());
+  if (pos > 0 && pos <= selectExpCnt && 
selectNode.getChild(pos - 1).getChildCount() > 0) {
+colNode.setChild(0, selectNode.getChild(pos - 
1).getChild(0));
+  } else {
+throw new SemanticException(
+  ErrorMsg.INVALID_POSITION_ALIAS_IN_ORDERBY.getMsg(
+  "Position alias: " + pos + " does not exist\n" +
+  "The Select List is indexed from 1 to " + selectExpCnt));
+  }
+} else {
+  throw new SemanticException(
+ErrorMsg.NO_SUPPORTED_ORDERBY_ALLCOLREF_POS.getMsg());
+}
+  } else { //if not using position alias and it is a number.
+warn("Using constant number " + node.getText() +
+  " in order by. If you try to use position alias when 
hive.orderby.position.alias is false, the position alias will be ignored.");
+  }
+}
+  }
+}
   }
 
   for (int i = next.getChildren().size() - 1; i >= 0; i--) {

http://git-wip-us.apache.org/repos/asf/hive/blob/1968a9d4/ql/src/test/queries/clientpositive/groupby_position.q
--
diff --git a/ql/src/test/queries/clientpositive/groupby_position.q 

[07/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
--
diff --git 
a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
new file mode 100644
index 000..24c59f2
--- /dev/null
+++ 
b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
@@ -0,0 +1,1053 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.AggrStats;
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
+import org.apache.hadoop.hive.metastore.api.Function;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.InvalidInputException;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+import org.apache.hadoop.hive.metastore.api.NotificationEventRequest;
+import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
+import org.apache.hadoop.hive.metastore.api.NotificationEventsCountRequest;
+import org.apache.hadoop.hive.metastore.api.NotificationEventsCountResponse;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PartitionEventType;
+import org.apache.hadoop.hive.metastore.api.PartitionValuesResponse;
+import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMTrigger;
+import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
+import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
+import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.TableMeta;
+import org.apache.hadoop.hive.metastore.api.Type;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
+import org.apache.hadoop.hive.metastore.api.UnknownTableException;
+import org.apache.hadoop.hive.metastore.api.WMMapping;
+import org.apache.hadoop.hive.metastore.api.WMPool;
+import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
+import org.apache.thrift.TException;
+
+/**
+ * A wrapper around {@link org.apache.hadoop.hive.metastore.ObjectStore}
+ * with the ability to control the result of commitTransaction().
+ * All other functions simply delegate to an embedded ObjectStore object.
+ * Ideally, we should have just extended ObjectStore instead of 

[11/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

2017-12-07 Thread gates
http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java
index b5891ab..9100c92 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java
@@ -32,7 +32,6 @@ import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.io.AcidUtils;

http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
index a3a35ff..85f198b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
@@ -31,11 +31,11 @@ import java.util.Stack;
 
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java
index b28315a..6dd08d2 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java
@@ -20,7 +20,6 @@
 
 package org.apache.hadoop.hive.ql.optimizer.physical;
 
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.GroupByOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorUtils;

http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index a06a672..a09b796 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -24,6 +24,7 @@ import com.google.common.collect.Lists;
 import org.antlr.runtime.tree.CommonTree;
 import org.antlr.runtime.tree.Tree;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -33,7 +34,6 @@ import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.Database;

http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
index 2400f9c..b253236 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
@@ -19,46 +19,13 @@
 package org.apache.hadoop.hive.ql.parse;
 
 
-import 

[25/50] [abbrv] hive git commit: HIVE-17982 Move metastore specific itests

2017-12-07 Thread gates
HIVE-17982 Move metastore specific itests


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/83cfbaf0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/83cfbaf0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/83cfbaf0

Branch: refs/heads/standalone-metastore
Commit: 83cfbaf00de12fd0afb2d85180cf770543f9e861
Parents: dad67c4
Author: Alan Gates 
Authored: Thu Oct 26 09:49:19 2017 -0700
Committer: Alan Gates 
Committed: Wed Dec 6 17:42:44 2017 -0800

--
 .../apache/hadoop/hive/metastore/FakeDerby.java |  424 ---
 .../hive/metastore/TestAcidTableSetup.java  |  243 ++
 .../hadoop/hive/metastore/TestAdminUser.java|   45 -
 .../metastore/TestEmbeddedHiveMetaStore.java|   54 -
 .../hadoop/hive/metastore/TestFilterHooks.java  |  281 --
 .../hive/metastore/TestHiveMetaStore.java   | 3515 --
 .../hive/metastore/TestHiveMetaStoreTxns.java   |  270 --
 ...TestHiveMetaStoreWithEnvironmentContext.java |  219 --
 .../hive/metastore/TestMarkPartition.java   |  107 -
 .../hive/metastore/TestMarkPartitionRemote.java |   32 -
 .../TestMetaStoreEndFunctionListener.java   |  143 -
 .../metastore/TestMetaStoreEventListener.java   |  524 ---
 .../TestMetaStoreEventListenerOnlyOnCommit.java |  104 -
 .../metastore/TestMetaStoreInitListener.java|   68 -
 .../metastore/TestMetaStoreListenersError.java  |   85 -
 .../metastore/TestObjectStoreInitRetry.java |  127 -
 .../TestPartitionNameWhitelistValidation.java   |  123 -
 .../hive/metastore/TestRemoteHiveMetaStore.java |   60 -
 .../TestRemoteHiveMetaStoreIpAddress.java   |   80 -
 .../TestRemoteUGIHiveMetaStoreIpAddress.java|   28 -
 .../hive/metastore/TestRetryingHMSHandler.java  |  123 -
 .../metastore/TestSetUGIOnBothClientServer.java |   31 -
 .../hive/metastore/TestSetUGIOnOnlyClient.java  |   31 -
 .../hive/metastore/TestSetUGIOnOnlyServer.java  |   31 -
 standalone-metastore/pom.xml|2 +
 .../metastore/client/builder/IndexBuilder.java  |5 +
 .../client/builder/PartitionBuilder.java|3 +-
 .../metastore/client/builder/TableBuilder.java  |   12 +-
 .../hive/metastore/conf/MetastoreConf.java  |2 +-
 .../apache/hadoop/hive/metastore/FakeDerby.java |  404 ++
 .../hive/metastore/MetaStoreTestUtils.java  |   31 +-
 .../hadoop/hive/metastore/TestAdminUser.java|   46 +
 .../metastore/TestEmbeddedHiveMetaStore.java|   48 +
 .../hadoop/hive/metastore/TestFilterHooks.java  |  303 ++
 .../hive/metastore/TestHiveMetaStore.java   | 3071 +++
 .../hive/metastore/TestHiveMetaStoreTxns.java   |  264 ++
 ...TestHiveMetaStoreWithEnvironmentContext.java |  188 +
 .../hive/metastore/TestMarkPartition.java   |  117 +
 .../hive/metastore/TestMarkPartitionRemote.java |   36 +
 .../TestMetaStoreEndFunctionListener.java   |  146 +
 .../metastore/TestMetaStoreEventListener.java   |  556 +++
 .../TestMetaStoreEventListenerOnlyOnCommit.java |  123 +
 .../TestMetaStoreEventListenerWithOldConf.java  |  178 +
 .../metastore/TestMetaStoreInitListener.java|   55 +
 .../metastore/TestMetaStoreListenersError.java  |   93 +
 .../hadoop/hive/metastore/TestObjectStore.java  |9 +-
 .../metastore/TestObjectStoreInitRetry.java |  132 +
 .../hadoop/hive/metastore/TestOldSchema.java|3 +-
 .../TestPartitionNameWhitelistValidation.java   |  122 +
 .../hive/metastore/TestRemoteHiveMetaStore.java |   62 +
 .../TestRemoteHiveMetaStoreIpAddress.java   |   65 +
 .../TestRemoteUGIHiveMetaStoreIpAddress.java|   28 +
 .../hive/metastore/TestRetryingHMSHandler.java  |   82 +
 .../metastore/TestSetUGIOnBothClientServer.java |   31 +
 .../hive/metastore/TestSetUGIOnOnlyClient.java  |   32 +
 .../hive/metastore/TestSetUGIOnOnlyServer.java  |   32 +
 .../hive/metastore/cache/TestCachedStore.java   |4 +-
 57 files changed, 6506 insertions(+), 6527 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/83cfbaf0/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/FakeDerby.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/FakeDerby.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/FakeDerby.java
deleted file mode 100644
index 51be504..000
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/FakeDerby.java
+++ /dev/null
@@ -1,424 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * 

hive git commit: HIVE-17333 Oracle does not allow change from VARCHAR2 to CLOB for upgrade (Naveen Gangam, reviewed by Aihua Xu)

2017-12-07 Thread ngangam
Repository: hive
Updated Branches:
  refs/heads/branch-2 1f7e3be05 -> 9ad4c8959


HIVE-17333 Oracle does not allow change from VARCHAR2 to CLOB for upgrade 
(Naveen Gangam, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9ad4c895
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9ad4c895
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9ad4c895

Branch: refs/heads/branch-2
Commit: 9ad4c8959886d1926cfaa3bbd9f0729ef2729f46
Parents: 1f7e3be
Author: Naveen Gangam 
Authored: Thu Dec 7 10:15:38 2017 -0500
Committer: Naveen Gangam 
Committed: Thu Dec 7 10:34:28 2017 -0500

--
 .../upgrade/oracle/039-HIVE-12274.oracle.sql| 27 +++-
 1 file changed, 21 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/9ad4c895/metastore/scripts/upgrade/oracle/039-HIVE-12274.oracle.sql
--
diff --git a/metastore/scripts/upgrade/oracle/039-HIVE-12274.oracle.sql 
b/metastore/scripts/upgrade/oracle/039-HIVE-12274.oracle.sql
index 4080685..af35684 100644
--- a/metastore/scripts/upgrade/oracle/039-HIVE-12274.oracle.sql
+++ b/metastore/scripts/upgrade/oracle/039-HIVE-12274.oracle.sql
@@ -1,8 +1,23 @@
 -- change PARAM_VALUE to CLOBs
-ALTER TABLE COLUMNS_V2 MODIFY (TYPE_NAME CLOB);;
-ALTER TABLE TABLE_PARAMS MODIFY (PARAM_VALUE CLOB);
-ALTER TABLE SERDE_PARAMS MODIFY (PARAM_VALUE CLOB);
-ALTER TABLE SD_PARAMS MODIFY (PARAM_VALUE CLOB);
+ALTER TABLE COLUMNS_V2 ADD (TEMP CLOB);
+UPDATE COLUMNS_V2 SET TEMP=TYPE_NAME;
+ALTER TABLE COLUMNS_V2 DROP COLUMN TYPE_NAME;
+ALTER TABLE COLUMNS_V2 RENAME COLUMN TEMP TO TYPE_NAME;
+
+ALTER TABLE TABLE_PARAMS ADD (TEMP CLOB);
+UPDATE TABLE_PARAMS SET TEMP=PARAM_VALUE, PARAM_VALUE=NULL;
+ALTER TABLE TABLE_PARAMS DROP COLUMN PARAM_VALUE;
+ALTER TABLE TABLE_PARAMS RENAME COLUMN TEMP TO PARAM_VALUE;
+
+ALTER TABLE SERDE_PARAMS ADD (TEMP CLOB);
+UPDATE SERDE_PARAMS SET TEMP=PARAM_VALUE, PARAM_VALUE=NULL;
+ALTER TABLE SERDE_PARAMS DROP COLUMN PARAM_VALUE;
+ALTER TABLE SERDE_PARAMS RENAME COLUMN TEMP TO PARAM_VALUE;
+
+ALTER TABLE SD_PARAMS ADD (TEMP CLOB);
+UPDATE SD_PARAMS SET TEMP=PARAM_VALUE, PARAM_VALUE=NULL;
+ALTER TABLE SD_PARAMS DROP COLUMN PARAM_VALUE;
+ALTER TABLE SD_PARAMS RENAME COLUMN TEMP TO PARAM_VALUE;
 
 -- Expand the hive table name length to 256
 ALTER TABLE TBLS MODIFY (TBL_NAME VARCHAR2(256));
@@ -13,9 +28,9 @@ ALTER TABLE PART_COL_STATS MODIFY (TABLE_NAME VARCHAR2(256));
 ALTER TABLE COMPLETED_TXN_COMPONENTS MODIFY (CTC_TABLE VARCHAR2(256));
 
 -- Expand the hive column name length to 767
-ALTER TABLE COLUMNS_V2 MODIFY (COLUMN_NAME VARCHAR(767) NOT NULL);
+ALTER TABLE COLUMNS_V2 MODIFY (COLUMN_NAME VARCHAR(767));
 ALTER TABLE PART_COL_PRIVS MODIFY (COLUMN_NAME VARCHAR2(767));
 ALTER TABLE TBL_COL_PRIVS MODIFY (COLUMN_NAME VARCHAR2(767));
 ALTER TABLE SORT_COLS MODIFY (COLUMN_NAME VARCHAR2(767));
 ALTER TABLE TAB_COL_STATS MODIFY (COLUMN_NAME VARCHAR2(767));
-ALTER TABLE PART_COL_STATS MODIFY (COLUMN_NAME VARCHAR2(767) NOT NULL);
+ALTER TABLE PART_COL_STATS MODIFY (COLUMN_NAME VARCHAR2(767));



hive git commit: HIVE-17333 Oracle does not allow change from VARCHAR2 to CLOB for upgrade (Naveen Gangam, reviewed by Aihua Xu)

2017-12-07 Thread ngangam
Repository: hive
Updated Branches:
  refs/heads/master 7ddd915bf -> 36f0d89f0


HIVE-17333 Oracle does not allow change from VARCHAR2 to CLOB for upgrade 
(Naveen Gangam, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/36f0d89f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/36f0d89f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/36f0d89f

Branch: refs/heads/master
Commit: 36f0d89f0a175e65c68b2b55458a81c6cf9da197
Parents: 7ddd915
Author: Naveen Gangam 
Authored: Thu Dec 7 10:15:38 2017 -0500
Committer: Naveen Gangam 
Committed: Thu Dec 7 10:25:25 2017 -0500

--
 .../upgrade/oracle/039-HIVE-12274.oracle.sql| 27 +++-
 1 file changed, 21 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/36f0d89f/metastore/scripts/upgrade/oracle/039-HIVE-12274.oracle.sql
--
diff --git a/metastore/scripts/upgrade/oracle/039-HIVE-12274.oracle.sql 
b/metastore/scripts/upgrade/oracle/039-HIVE-12274.oracle.sql
index 4080685..af35684 100644
--- a/metastore/scripts/upgrade/oracle/039-HIVE-12274.oracle.sql
+++ b/metastore/scripts/upgrade/oracle/039-HIVE-12274.oracle.sql
@@ -1,8 +1,23 @@
 -- change PARAM_VALUE to CLOBs
-ALTER TABLE COLUMNS_V2 MODIFY (TYPE_NAME CLOB);;
-ALTER TABLE TABLE_PARAMS MODIFY (PARAM_VALUE CLOB);
-ALTER TABLE SERDE_PARAMS MODIFY (PARAM_VALUE CLOB);
-ALTER TABLE SD_PARAMS MODIFY (PARAM_VALUE CLOB);
+ALTER TABLE COLUMNS_V2 ADD (TEMP CLOB);
+UPDATE COLUMNS_V2 SET TEMP=TYPE_NAME;
+ALTER TABLE COLUMNS_V2 DROP COLUMN TYPE_NAME;
+ALTER TABLE COLUMNS_V2 RENAME COLUMN TEMP TO TYPE_NAME;
+
+ALTER TABLE TABLE_PARAMS ADD (TEMP CLOB);
+UPDATE TABLE_PARAMS SET TEMP=PARAM_VALUE, PARAM_VALUE=NULL;
+ALTER TABLE TABLE_PARAMS DROP COLUMN PARAM_VALUE;
+ALTER TABLE TABLE_PARAMS RENAME COLUMN TEMP TO PARAM_VALUE;
+
+ALTER TABLE SERDE_PARAMS ADD (TEMP CLOB);
+UPDATE SERDE_PARAMS SET TEMP=PARAM_VALUE, PARAM_VALUE=NULL;
+ALTER TABLE SERDE_PARAMS DROP COLUMN PARAM_VALUE;
+ALTER TABLE SERDE_PARAMS RENAME COLUMN TEMP TO PARAM_VALUE;
+
+ALTER TABLE SD_PARAMS ADD (TEMP CLOB);
+UPDATE SD_PARAMS SET TEMP=PARAM_VALUE, PARAM_VALUE=NULL;
+ALTER TABLE SD_PARAMS DROP COLUMN PARAM_VALUE;
+ALTER TABLE SD_PARAMS RENAME COLUMN TEMP TO PARAM_VALUE;
 
 -- Expand the hive table name length to 256
 ALTER TABLE TBLS MODIFY (TBL_NAME VARCHAR2(256));
@@ -13,9 +28,9 @@ ALTER TABLE PART_COL_STATS MODIFY (TABLE_NAME VARCHAR2(256));
 ALTER TABLE COMPLETED_TXN_COMPONENTS MODIFY (CTC_TABLE VARCHAR2(256));
 
 -- Expand the hive column name length to 767
-ALTER TABLE COLUMNS_V2 MODIFY (COLUMN_NAME VARCHAR(767) NOT NULL);
+ALTER TABLE COLUMNS_V2 MODIFY (COLUMN_NAME VARCHAR(767));
 ALTER TABLE PART_COL_PRIVS MODIFY (COLUMN_NAME VARCHAR2(767));
 ALTER TABLE TBL_COL_PRIVS MODIFY (COLUMN_NAME VARCHAR2(767));
 ALTER TABLE SORT_COLS MODIFY (COLUMN_NAME VARCHAR2(767));
 ALTER TABLE TAB_COL_STATS MODIFY (COLUMN_NAME VARCHAR2(767));
-ALTER TABLE PART_COL_STATS MODIFY (COLUMN_NAME VARCHAR2(767) NOT NULL);
+ALTER TABLE PART_COL_STATS MODIFY (COLUMN_NAME VARCHAR2(767));