hive git commit: HIVE-20672 : Logging thread in LlapTaskSchedulerService should report every fixed interval (Jaume M, reviewed by Sergey Shelukhin)
Repository: hive Updated Branches: refs/heads/master fe3a457d7 -> f0434c5b5 HIVE-20672 : Logging thread in LlapTaskSchedulerService should report every fixed interval (Jaume M, reviewed by Sergey Shelukhin) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f0434c5b Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f0434c5b Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f0434c5b Branch: refs/heads/master Commit: f0434c5b5c591c7c1af0840c4dd3b08e53614869 Parents: fe3a457 Author: sergey Authored: Thu Oct 4 16:46:41 2018 -0700 Committer: sergey Committed: Thu Oct 4 16:46:41 2018 -0700 -- .../hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java | 7 +++ 1 file changed, 3 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/f0434c5b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java -- diff --git a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java index b748c7e..7e8299d 100644 --- a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java +++ b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java @@ -732,9 +732,9 @@ public class LlapTaskSchedulerService extends TaskScheduler { } writeLock.lock(); try { - scheduledLoggingExecutor.schedule(new Callable() { + scheduledLoggingExecutor.scheduleAtFixedRate(new Runnable() { @Override -public Void call() throws Exception { +public void run() { readLock.lock(); try { if (dagRunning) { @@ -743,9 +743,8 @@ public class LlapTaskSchedulerService extends TaskScheduler { } finally { readLock.unlock(); } - return null; } - }, 1L, TimeUnit.MILLISECONDS); + }, 0, 1L, TimeUnit.MILLISECONDS); nodeEnablerFuture = nodeEnabledExecutor.submit(nodeEnablerCallable); Futures.addCallback(nodeEnablerFuture, new LoggingFutureCallback("NodeEnablerThread", LOG));
[1/2] hive git commit: HIVE-20556: Expose an API to retrieve the TBL_ID from TBLS in the metastore tables (Jaume Marhuenda via Eugene Koifman)
Repository: hive Updated Branches: refs/heads/master 97f0513c4 -> fe3a457d7 http://git-wip-us.apache.org/repos/asf/hive/blob/fe3a457d/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java -- diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index 4937d9d..d6f0d8c 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java @@ -222,6 +222,9 @@ public abstract class TestHiveMetaStore { tbl = client.getTable(dbName, tblName); } + Assert.assertTrue(tbl.isSetId()); + tbl.unsetId(); + Partition part = makePartitionObject(dbName, tblName, vals, tbl, "/part1"); Partition part2 = makePartitionObject(dbName, tblName, vals2, tbl, "/part2"); Partition part3 = makePartitionObject(dbName, tblName, vals3, tbl, "/part3"); @@ -1272,6 +1275,7 @@ public abstract class TestHiveMetaStore { Table tbl2 = client.getTable(dbName, tblName); assertNotNull(tbl2); + Assert.assertTrue(tbl2.isSetId()); assertEquals(tbl2.getDbName(), dbName); assertEquals(tbl2.getTableName(), tblName); assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size()); @@ -1305,6 +1309,7 @@ public abstract class TestHiveMetaStore { assertTrue(fieldSchemasFull.contains(fs)); } + tbl2.unsetId(); client.createTable(tbl2); if (isThriftClient) { tbl2 = client.getTable(tbl2.getDbName(), tbl2.getTableName()); @@ -1664,6 +1669,56 @@ public abstract class TestHiveMetaStore { } @Test + public void testCreateAndGetTableWithDriver() throws Exception { +String dbName = "createDb"; +String tblName = "createTbl"; + +client.dropTable(dbName, tblName); +silentDropDatabase(dbName); +new DatabaseBuilder() +.setName(dbName) +.create(client, conf); + +createTable(dbName, tblName); +Table tblRead = client.getTable(dbName, tblName); +Assert.assertTrue(tblRead.isSetId()); +long firstTableId = tblRead.getId(); + +createTable(dbName, tblName + "_2"); +Table tblRead2 = client.getTable(dbName, tblName + "_2"); +Assert.assertTrue(tblRead2.isSetId()); +Assert.assertNotEquals(firstTableId, tblRead2.getId()); + } + + @Test + public void testCreateTableSettingId() throws Exception { +String dbName = "createDb"; +String tblName = "createTbl"; + +client.dropTable(dbName, tblName); +silentDropDatabase(dbName); +new DatabaseBuilder() +.setName(dbName) +.create(client, conf); + +Table table = new TableBuilder() +.setDbName(dbName) +.setTableName(tblName) +.addCol("foo", "string") +.addCol("bar", "string") +.build(conf); +table.setId(1); +try { + client.createTable(table); + Assert.fail("An error should happen when setting the id" + + " to create a table"); +} catch (InvalidObjectException e) { + Assert.assertTrue(e.getMessage().contains("Id shouldn't be set")); + Assert.assertTrue(e.getMessage().contains(tblName)); +} + } + + @Test public void testAlterTable() throws Exception { String dbName = "alterdb"; String invTblName = "alter-tbl"; http://git-wip-us.apache.org/repos/asf/hive/blob/fe3a457d/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStorePartitionSpecs.java -- diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStorePartitionSpecs.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStorePartitionSpecs.java index df83171..ebbd1c7 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStorePartitionSpecs.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStorePartitionSpecs.java @@ -222,6 +222,8 @@ public class TestHiveMetaStorePartitionSpecs { clearAndRecreateDB(hmsc); createTable(hmsc, true); Table table = hmsc.getTable(dbName, tableName); + Assert.assertTrue(table.isSetId()); + table.unsetId(); populatePartitions(hmsc, table, Arrays.asList("isLocatedInTablePath", "isLocatedOutsideTablePath")); // Clone the table,
[2/2] hive git commit: HIVE-20556: Expose an API to retrieve the TBL_ID from TBLS in the metastore tables (Jaume Marhuenda via Eugene Koifman)
HIVE-20556: Expose an API to retrieve the TBL_ID from TBLS in the metastore tables (Jaume Marhuenda via Eugene Koifman) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fe3a457d Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fe3a457d Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fe3a457d Branch: refs/heads/master Commit: fe3a457d7e6b8b9387b8fc0165d2e9efd9c6b364 Parents: 97f0513 Author: Jaume Marhuenda Authored: Thu Oct 4 14:39:04 2018 -0700 Committer: Eugene Koifman Committed: Thu Oct 4 14:39:04 2018 -0700 -- data/files/exported_table/_metadata | 2 +- .../TestAuthorizationPreEventListener.java | 3 + .../TestMetastoreAuthorizationProvider.java | 3 + .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 1 + .../hadoop/hive/ql/metadata/TestHive.java | 5 + .../ql/metadata/TestHiveMetaStoreChecker.java | 5 + .../apache/hadoop/hive/metastore/api/Table.java | 357 --- .../src/gen/thrift/gen-php/metastore/Types.php | 139 +--- .../gen/thrift/gen-py/hive_metastore/ttypes.py | 133 +++ .../gen/thrift/gen-rb/hive_metastore_types.rb | 46 +-- .../src/main/thrift/hive_metastore.thrift | 42 +-- .../hadoop/hive/metastore/HiveMetaStore.java| 5 + .../hadoop/hive/metastore/ObjectStore.java | 2 + .../hadoop/hive/metastore/model/MTable.java | 10 +- .../src/main/resources/package.jdo | 8 +- .../hive/metastore/TestHiveMetaStore.java | 55 +++ .../TestHiveMetaStorePartitionSpecs.java| 2 + .../TestTablesCreateDropAlterTruncate.java | 11 + 18 files changed, 534 insertions(+), 295 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/fe3a457d/data/files/exported_table/_metadata -- diff --git a/data/files/exported_table/_metadata b/data/files/exported_table/_metadata index 81fbf63..8d8f8bb 100644 --- a/data/files/exported_table/_metadata +++ b/data/files/exported_table/_metadata @@ -1 +1 @@ -{"partitions":[],"table":"{\"1\":{\"str\":\"j1_41\"},\"2\":{\"str\":\"default\"},\"3\":{\"str\":\"johndee\"},\"4\":{\"i32\":1371900915},\"5\":{\"i32\":0},\"6\":{\"i32\":0},\"7\":{\"rec\":{\"1\":{\"lst\":[\"rec\",2,{\"1\":{\"str\":\"a\"},\"2\":{\"str\":\"string\"}},{\"1\":{\"str\":\"b\"},\"2\":{\"str\":\"int\"}}]},\"2\":{\"str\":\"hdfs://hivebase01:8020/user/hive/warehouse/j1_41\"},\"3\":{\"str\":\"org.apache.hadoop.mapred.TextInputFormat\"},\"4\":{\"str\":\"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat\"},\"5\":{\"tf\":0},\"6\":{\"i32\":-1},\"7\":{\"rec\":{\"2\":{\"str\":\"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe\"},\"3\":{\"map\":[\"str\",\"str\",2,{\"serialization.format\":\",\",\"field.delim\":\",\"}]}}},\"8\":{\"lst\":[\"str\",0]},\"9\":{\"lst\":[\"rec\",0]},\"10\":{\"map\":[\"str\",\"str\",0,{}]}}},\"8\":{\"lst\":[\"rec\",0]},\"9\":{\"map\":[\"str\",\"str\",1,{\"transient_lastDdlTime\":\"1371900931\"}]},\"12\":{\"str\":\"MANAGED_TABLE\"}}","version":"0.1" } \ No newline at end of file +{"partitions":[],"table":"{\"2\":{\"str\":\"j1_41\"},\"3\":{\"str\":\"default\"},\"4\":{\"str\":\"johndee\"},\"5\":{\"i32\":1371900915},\"6\":{\"i32\":0},\"7\":{\"i32\":0},\"8\":{\"rec\":{\"1\":{\"lst\":[\"rec\",2,{\"1\":{\"str\":\"a\"},\"2\":{\"str\":\"string\"}},{\"1\":{\"str\":\"b\"},\"2\":{\"str\":\"int\"}}]},\"2\":{\"str\":\"hdfs://hivebase01:8020/user/hive/warehouse/j1_41\"},\"3\":{\"str\":\"org.apache.hadoop.mapred.TextInputFormat\"},\"4\":{\"str\":\"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat\"},\"5\":{\"tf\":0},\"6\":{\"i32\":-1},\"7\":{\"rec\":{\"2\":{\"str\":\"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe\"},\"3\":{\"map\":[\"str\",\"str\",2,{\"serialization.format\":\",\",\"field.delim\":\",\"}]}}},\"8\":{\"lst\":[\"str\",0]},\"9\":{\"lst\":[\"rec\",0]},\"10\":{\"map\":[\"str\",\"str\",0,{}]}}},\"9\":{\"lst\":[\"rec\",0]},\"10\":{\"map\":[\"str\",\"str\",1,{\"transient_lastDdlTime\":\"1371900931\"}]},\"13\":{\"str\":\"MANAGED_TABLE\"}}","version":"0.1 "} http://git-wip-us.apache.org/repos/asf/hive/blob/fe3a457d/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java -- diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java index 05c0009..1f6ec27 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
[2/3] hive git commit: HIVE-20563: Vectorization: CASE WHEN expression fails when THEN/ELSE type and result type are different (Matt McCline, reviewed by Teddy Choi)
http://git-wip-us.apache.org/repos/asf/hive/blob/97f0513c/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out -- diff --git a/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out b/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out index c64adbf..b11ad87 100644 --- a/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out @@ -140,23 +140,46 @@ STAGE PLANS: TableScan alias: timestamps Statistics: Num rows: 51 Data size: 12597 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:cdate:date, 1:ctimestamp1:timestamp, 2:stimestamp1:string, 3:ctimestamp2:timestamp, 4:ROW__ID:struct] Select Operator expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.9') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.9')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.9')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.9') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.9')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.9')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.9') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.9')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 +Select Vectorization: +className: VectorSelectOperator +native: true +projectedOutputColumnNums: [1, 3, 9, 10, 11, 8, 12, 7, 6, 17, 2] +selectExpressions: VectorUDFAdaptor(CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.9') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.9')) THEN ('Early 2010s') ELSE ('Unknown') END)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00) -> 5:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00) -> 6:boolean, TimestampColumnBetween(col 3:timestamp, left 2005-12-31 16:00:00.0, right 2010-12-31 15:59:59.9) -> 7:boolean, TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.9) -> 8:boolean) -> 9:string, VectorUDFAdaptor(CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.9')) THEN ('Old') WHEN ((ctimestamp2 < TIME STAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.9') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.9')) THEN ('Early 2010s') ELSE (null) END)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.9) -> 5:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00) -> 6:boolean, TimestampColumnBetween(col 3:timestamp, left 2005-12-31 16:00:00.0, right 2010-12-31 15:59:59.9) -> 7:boolean, TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.9) -> 8:boolean) ->
[1/3] hive git commit: HIVE-20563: Vectorization: CASE WHEN expression fails when THEN/ELSE type and result type are different (Matt McCline, reviewed by Teddy Choi)
Repository: hive Updated Branches: refs/heads/master 857259ed0 -> 97f0513c4 http://git-wip-us.apache.org/repos/asf/hive/blob/97f0513c/ql/src/test/results/clientpositive/vector_case_when_1.q.out -- diff --git a/ql/src/test/results/clientpositive/vector_case_when_1.q.out b/ql/src/test/results/clientpositive/vector_case_when_1.q.out index 270f5eb..9949de7 100644 --- a/ql/src/test/results/clientpositive/vector_case_when_1.q.out +++ b/ql/src/test/results/clientpositive/vector_case_when_1.q.out @@ -202,23 +202,44 @@ STAGE PLANS: TableScan alias: lineitem_test Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE +TableScan Vectorization: +native: true +vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2)/DECIMAL_64, 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct] Select Operator expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((CAST( l_shipinstruct AS STRING) = 'DEL IVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 1), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 1), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [4, 21, 22, 23, 20, 24, 25, 27, 28, 29, 30, 31, 32, 35, 37, 38, 19] + selectExpressions: VectorUDFAdaptor(CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string, VectorUDFAdaptor(CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 22:string, VectorUDFAdaptor(CASE WHEN ((l_quantity = 1)) THEN ('Single') W HEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 23:string,
[3/3] hive git commit: HIVE-20563: Vectorization: CASE WHEN expression fails when THEN/ELSE type and result type are different (Matt McCline, reviewed by Teddy Choi)
HIVE-20563: Vectorization: CASE WHEN expression fails when THEN/ELSE type and result type are different (Matt McCline, reviewed by Teddy Choi) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/97f0513c Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/97f0513c Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/97f0513c Branch: refs/heads/master Commit: 97f0513c4c8ff1c251b1bdd1b84bd238557f03b0 Parents: 857259e Author: Matt McCline Authored: Thu Oct 4 14:37:21 2018 -0500 Committer: Matt McCline Committed: Thu Oct 4 14:37:21 2018 -0500 -- .../test/resources/testconfiguration.properties | 1 + .../ql/exec/vector/VectorizationContext.java| 92 ++- .../expressions/CastTimestampToString.java | 10 +- .../hive/ql/optimizer/physical/Vectorizer.java | 15 +- .../exec/vector/TestVectorizationContext.java | 31 +- .../vector_case_when_conversion.q | 136 .../llap/vector_case_when_1.q.out | 36 +- .../llap/vector_case_when_2.q.out | 45 +- .../llap/vector_case_when_conversion.q.out | 616 +++ .../llap/vector_decimal_expressions.q.out | 2 +- .../llap/vector_udf_adaptor_1.q.out | 52 +- .../clientpositive/llap/vectorized_case.q.out | 12 +- .../clientpositive/spark/vectorized_case.q.out | 12 +- .../clientpositive/vector_case_when_1.q.out | 35 +- .../clientpositive/vector_case_when_2.q.out | 39 +- .../vector_decimal_expressions.q.out| 2 +- .../clientpositive/vectorized_case.q.out| 12 +- 17 files changed, 1061 insertions(+), 87 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/97f0513c/itests/src/test/resources/testconfiguration.properties -- diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index fdd8ecc..d444c99 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -763,6 +763,7 @@ minillaplocal.query.files=\ vector_acid4.q,\ vector_annotate_stats_select.q,\ vector_auto_smb_mapjoin_14.q,\ + vector_case_when_conversion.q,\ vector_char_varchar_1.q,\ vector_complex_all.q,\ vector_complex_join.q,\ http://git-wip-us.apache.org/repos/asf/hive/blob/97f0513c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 6ca1248..488f277 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -546,6 +546,7 @@ public class VectorizationContext { private final int initialOutputCol; private int outputColCount = 0; private boolean reuseScratchColumns = true; +private boolean dontReuseTrackedScratchColumns = false; protected OutputColumnManager(int initialOutputCol) { this.initialOutputCol = initialOutputCol; @@ -558,6 +559,7 @@ public class VectorizationContext { private String[] scratchVectorTypeNames = new String[100]; private DataTypePhysicalVariation[] scratchDataTypePhysicalVariations = new DataTypePhysicalVariation[100]; +private boolean[] scratchColumnTrackWasUsed = new boolean[100]; private final Set usedOutputColumns = new HashSet(); @@ -589,6 +591,9 @@ public class VectorizationContext { scratchDataTypePhysicalVariations[i] == dataTypePhysicalVariation)) { continue; } +if (dontReuseTrackedScratchColumns && scratchColumnTrackWasUsed[i]) { + continue; +} //Use i usedOutputColumns.add(i); return i; @@ -597,16 +602,19 @@ public class VectorizationContext { if (outputColCount < scratchVectorTypeNames.length) { int newIndex = outputColCount; scratchVectorTypeNames[outputColCount] = columnType; -scratchDataTypePhysicalVariations[outputColCount++] = dataTypePhysicalVariation; +scratchDataTypePhysicalVariations[outputColCount] = dataTypePhysicalVariation; +scratchColumnTrackWasUsed[outputColCount++] = true; usedOutputColumns.add(newIndex); return newIndex; } else { //Expand the array scratchVectorTypeNames = Arrays.copyOf(scratchVectorTypeNames, 2*outputColCount); scratchDataTypePhysicalVariations = Arrays.copyOf(scratchDataTypePhysicalVariations, 2*outputColCount); +
hive git commit: HIVE-20545: Exclude large-sized parameters from serialization of Table and Partition thrift objects in HMS notifications (Bharath Krishna, reviewed by Andrew Sherman)
Repository: hive Updated Branches: refs/heads/master d0ed25e3b -> 857259ed0 HIVE-20545: Exclude large-sized parameters from serialization of Table and Partition thrift objects in HMS notifications (Bharath Krishna, reviewed by Andrew Sherman) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/857259ed Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/857259ed Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/857259ed Branch: refs/heads/master Commit: 857259ed08aaf31e198bdeb25540ec16ef6cc3e6 Parents: d0ed25e Author: Bharath Krishna Authored: Thu Oct 4 09:36:01 2018 -0700 Committer: Andrew Sherman Committed: Thu Oct 4 10:01:05 2018 -0700 -- .../hive/metastore/conf/MetastoreConf.java | 28 ++ .../hive/metastore/utils/MetaStoreUtils.java| 42 + .../metastore/messaging/MessageFactory.java | 13 +-- .../messaging/json/JSONMessageFactory.java | 31 +++ .../utils/TestMetaStoreServerUtils.java | 89 +++- 5 files changed, 194 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/857259ed/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java -- diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index 946f644..7b01678 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -516,6 +516,12 @@ public class MetastoreConf { "hive.metastore.event.message.factory", "org.apache.hadoop.hive.metastore.messaging.json.JSONMessageFactory", "Factory class for making encoding and decoding messages in the events generated."), + EVENT_NOTIFICATION_PARAMETERS_EXCLUDE_PATTERNS("metastore.notification.parameters.exclude.patterns", +"hive.metastore.notification.parameters.exclude.patterns", "", +"List of comma-separated regexes that are used to reduced the size of HMS Notification messages." ++ " The regexes are matched against each key of parameters map in Table or Partition object" ++ "present in HMS Notification. Any key-value pair whose key is matched with any regex will" ++" be removed from Parameters map during Serialization of Table/Partition object."), EVENT_DB_LISTENER_TTL("metastore.event.db.listener.timetolive", "hive.metastore.event.db.listener.timetolive", 86400, TimeUnit.SECONDS, "time after which events will be removed from the database listener queue"), @@ -1410,6 +1416,28 @@ public class MetastoreConf { } /** + * Get values from comma-separated config, to an array after extracting individual values. + * @param conf Configuration to retrieve it from + * @param var variable to retrieve + * @return Array of String, containing each value from the comma-separated config, + * or default value if value not in config file + */ + public static String[] getTrimmedStringsVar(Configuration conf, ConfVars var) { +assert var.defaultVal.getClass() == String.class; +String[] result = conf.getTrimmedStrings(var.varname, (String[]) null); +if (result != null) { + return result; +} +if (var.hiveName != null) { + result = conf.getTrimmedStrings(var.hiveName, (String[]) null); + if (result != null) { +return result; + } +} +return org.apache.hadoop.util.StringUtils.getTrimmedStrings((String) var.getDefaultVal()); + } + + /** * Set the variable as a boolean * @param conf configuration file to set it in * @param var variable to set http://git-wip-us.apache.org/repos/asf/hive/blob/857259ed/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java -- diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java index a92f34b..720ec71 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java @@ -56,8 +56,13 @@ import java.util.List; import java.util.Map; import
hive git commit: HIVE-20544: TOpenSessionReq logs password and username (Karen Coppage, reviewed by Andrew Sherman and Peter Vary)
Repository: hive Updated Branches: refs/heads/master 1cfe4f913 -> d0ed25e3b HIVE-20544: TOpenSessionReq logs password and username (Karen Coppage, reviewed by Andrew Sherman and Peter Vary) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d0ed25e3 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d0ed25e3 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d0ed25e3 Branch: refs/heads/master Commit: d0ed25e3b6ba357f36ef7ee1d86fcc82974f13b2 Parents: 1cfe4f9 Author: Peter Vary Authored: Thu Oct 4 18:52:42 2018 +0200 Committer: Peter Vary Committed: Thu Oct 4 18:52:42 2018 +0200 -- .../thrift/TestThriftCLIServiceSecurity.java| 44 ++ service-rpc/pom.xml | 60 +--- .../service/rpc/thrift/TOpenSessionReq.java | 7 +-- 3 files changed, 85 insertions(+), 26 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/d0ed25e3/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceSecurity.java -- diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceSecurity.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceSecurity.java new file mode 100644 index 000..040c694 --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceSecurity.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.service.cli.thrift; + +import static org.junit.Assert.assertFalse; + +import org.apache.hive.service.rpc.thrift.TOpenSessionReq; +import org.junit.Test; + +/** + * Test security in classes generated by Thrift. + */ +public class TestThriftCLIServiceSecurity { + + /** + * Ensures password isn't printed to logs from TOpenSessionReq.toString(). + * See maven-replacer-plugin code in service-rpc/pom.xml. + * + * @throws Exception + */ + @Test + public void testPasswordNotInLogs() throws Exception { +String PASSWORD = "testpassword"; +TOpenSessionReq tOpenSessionReq = new TOpenSessionReq(); +tOpenSessionReq.setPassword(PASSWORD); +assertFalse(tOpenSessionReq.toString().contains(PASSWORD)); + } + +} http://git-wip-us.apache.org/repos/asf/hive/blob/d0ed25e3/service-rpc/pom.xml -- diff --git a/service-rpc/pom.xml b/service-rpc/pom.xml index d6a07a5..2f9ef45 100644 --- a/service-rpc/pom.xml +++ b/service-rpc/pom.xml @@ -121,29 +121,47 @@ replace + + ${basedir}/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/ + *.java + + + public class + @org.apache.hadoop.hive.common.classification.InterfaceAudience.Public @org.apache.hadoop.hive.common.classification.InterfaceStability.Stable public class + true + + + public static class + @org.apache.hadoop.hive.common.classification.InterfaceAudience.Public @org.apache.hadoop.hive.common.classification.InterfaceStability.Stable public static class + true + + + public interface + @org.apache.hadoop.hive.common.classification.InterfaceAudience.Public @org.apache.hadoop.hive.common.classification.InterfaceStability.Stable public interface + true + + + + + +mask-password +process-sources + + replace + + +
[1/2] hive git commit: HIVE-17300: WebUI query plan graphs (Karen Coppage, reviewed by Szehon Ho and Peter Vary)
Repository: hive Updated Branches: refs/heads/master 6c34a3742 -> 1cfe4f913 http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/service/src/resources/hive-webapps/static/js/vis.min.js -- diff --git a/service/src/resources/hive-webapps/static/js/vis.min.js b/service/src/resources/hive-webapps/static/js/vis.min.js new file mode 100644 index 000..09730da --- /dev/null +++ b/service/src/resources/hive-webapps/static/js/vis.min.js @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * vis.js + * https://github.com/almende/vis + * + * A dynamic, browser-based visualization library. + * + * @version 4.20.0 + * @date2017-05-21 + * + * @license + * Copyright (C) 2011-2017 Almende B.V, http://almende.com + * + * Vis.js is dual licensed under both + * + * * The Apache 2.0 License + * http://www.apache.org/licenses/LICENSE-2.0 + * + * and + * + * * The MIT License + * http://opensource.org/licenses/MIT + * + * Vis.js may be distributed under either license. + */ +"use strict";!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&?define([],e):"object"==typeof exports?exports.vis=e():t.vis=e()}(this,function(){return function(t){function e(o){if(i[o])return i[o].exports;var n=i[o]={exports:{},id:o,loaded:!1};return t[o].call(n.exports,n,n.exports,e),n.loaded=!0,n.exports}var i={};return e.m=t,e.c=i,e.p="",e(0)}([function(t,e,i){var o=i(1);o.extend(e,i(87)),o.extend(e,i(116)),o.extend(e,i(158))},function(t,e,i){function o(t){return t&__esModule?t:{default:t}}var n=i(2),s=o(n),r=i(55),a=o(r),h=i(58),d=o(h),l=i(62),u=o(l),c=i(82),p=i(86);e.isNumber=function(t){return t instanceof Number||"number"==typeof t},e.recursiveDOMDelete=function(t){if(t)for(;!0===t.hasChildNodes();)e.recursiveDOMDelete(t.firstChild),t.removeChild(t.firstChild)},e.giveRange=function(t,e,i,o){if(e==t)return.5;var n=1/(e-t);return Math.max(0,(o-t)*n)},e.isString=function(t){return t instanceof String||"st ring"==typeof t},e.isDate=function(t){if(t instanceof Date)return!0;if(e.isString(t)){if(f.exec(t))return!0;if(!isNaN(Date.parse(t)))return!0}return!1},e.randomUUID=function(){return p.v4()},e.assignAllKeys=function(t,e){for(var i in t)t.hasOwnProperty(i)&&"object"!==(0,u.default)(t[i])&&(t[i]=e)},e.fillIfDefined=function(t,i){var o=arguments.length>2& 0!==arguments[2]&[2];for(var n in t)void 0!==i[n]&&("object"!==(0,u.default)(i[n])?void 0!==i[n]&!==i[n]||void 0===t[n]||!0!==o?t[n]=i[n]:delete t[n]:"object"===(0,u.default)(t[n])&(t[n],i[n],o))},e.protoExtend=function(t,e){for(var i=1;i3& 0!==arguments[3]&[3];if(Array.isArray(o))throw new TypeError("Arrays are not supported by deepExtend");for(var s=2;s3& 0!==arguments[3]&[3];if(Array.isArray(o))throw new TypeError("Arrays are not supported by deepExtend");for(var s in o)if(o.hasOwnProperty(s)&&-1==t.indexOf(s))if(o[s]&[s].constructor ===Object)void 0===i[s]&&(i[s]={}),i[s].constructor===Object?e.deepExtend(i[s],o[s]):null===o[s]& 0!==i[s]&&!0===n?delete i[s]:i[s]=o[s];else if(Array.isArray(o[s])){i[s]=[];for(var r=0;r=0&&(e="DOMMouseScroll"),t.addEventListener(e,i,o)):t.attachEvent("on"+e,i)},e.removeEventListener=function(t,e,i,o){t.removeEventListener?(void 0===o&&(o=!1),"mousewheel"===e&("Firefox")>=0&&(e="DOMMouseScroll"),t.removeEventListener(e,i,o)):t.detachEvent("on"+e,i)},e.preventDefault=function(t){t||(t=window.event),t.preventDefault?t.preventDefault():t.returnValue=!1},e.getTarget=function(t){t||(t=window.event);var e;return t.target?e=t.target:t.srcElement&&(e=t.srcElement),void 0!=e.nodeType&&3==e.nodeType&&(e=e.parentNode),e},e.hasParent=function(t,e){for(var i=t;i;){if(i===e)return!0;i=i.parentNode}return!1},e.option={},e.option.asBoolean=function(t,e){return"function"==typeof t&&(t=t()),null!=t?0!=t:e||null},e.option.asNumber=function(t,e){return"function"==typeof t&&(t=t()),null!=t?Number(t)||e||null:e||null},e.option.asString=function(t,e){return"function"==typeof t&&(t=t()),null!=t?String(t):e||null},e.option.asSize=function(t ,i){return"function"==typeof
[2/2] hive git commit: HIVE-17300: WebUI query plan graphs (Karen Coppage, reviewed by Szehon Ho and Peter Vary)
HIVE-17300: WebUI query plan graphs (Karen Coppage, reviewed by Szehon Ho and Peter Vary) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1cfe4f91 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1cfe4f91 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1cfe4f91 Branch: refs/heads/master Commit: 1cfe4f913b7806707259c0894ddb991d2e3ddae4 Parents: 6c34a37 Author: Karen Coppage Authored: Thu Oct 4 18:44:24 2018 +0200 Committer: Peter Vary Committed: Thu Oct 4 18:44:24 2018 +0200 -- .../org/apache/hadoop/hive/common/LogUtils.java | 25 + .../org/apache/hadoop/hive/conf/HiveConf.java | 11 + .../service/cli/session/TestQueryDisplay.java | 83 +++ .../java/org/apache/hadoop/hive/ql/Driver.java | 20 +- .../org/apache/hadoop/hive/ql/MapRedStats.java | 12 +- .../org/apache/hadoop/hive/ql/QueryDisplay.java | 82 +++ .../org/apache/hadoop/hive/ql/QueryInfo.java| 10 + .../apache/hadoop/hive/ql/exec/ExplainTask.java | 5 +- .../hive/ql/exec/mr/HadoopJobExecHelper.java| 34 +- .../hadoop/hive/ql/exec/mr/MapRedTask.java | 16 + .../org/apache/hive/tmpl/QueryProfileTmpl.jamon | 91 +++- .../service/cli/operation/SQLOperation.java | 3 + .../static/css/query-plan-graph.css | 22 + .../hive-webapps/static/js/query-plan-graph.js | 533 +++ .../resources/hive-webapps/static/js/vis.min.js | 63 +++ 15 files changed, 975 insertions(+), 35 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/common/src/java/org/apache/hadoop/hive/common/LogUtils.java -- diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java index 5068eb5..874a3e1 100644 --- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java @@ -29,7 +29,10 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.appender.FileAppender; +import org.apache.logging.log4j.core.appender.RollingFileAppender; import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender; import org.apache.logging.log4j.core.appender.routing.RoutingAppender; import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.core.config.LoggerConfig; @@ -231,6 +234,28 @@ public class LogUtils { } /** + * Get path of the log file for user to see on the WebUI. + */ + public static String getLogFilePath() { +String logFilePath = null; +org.apache.logging.log4j.Logger rootLogger = LogManager.getRootLogger(); +if (rootLogger instanceof org.apache.logging.log4j.core.Logger) { + org.apache.logging.log4j.core.Logger coreLogger = + (org.apache.logging.log4j.core.Logger)rootLogger; + for (Appender appender : coreLogger.getAppenders().values()) { +if (appender instanceof FileAppender) { + logFilePath = ((FileAppender) appender).getFileName(); +} else if (appender instanceof RollingFileAppender) { + logFilePath = ((RollingFileAppender) appender).getFileName(); +} else if (appender instanceof RollingRandomAccessFileAppender) { + logFilePath = ((RollingRandomAccessFileAppender) appender).getFileName(); +} + } +} +return logFilePath; + } + + /** * Stop the subordinate appender for the operation log so it will not leak a file descriptor. * @param routingAppenderName the name of the RoutingAppender * @param queryId the id of the query that is closing http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java -- diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 799fc05..58951ef 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -3140,6 +3140,17 @@ public class HiveConf extends Configuration { HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT("hive.server2.webui.explain.output", false, "When set to true, the EXPLAIN output for every query is displayed" + " in the HS2 WebUI / Drilldown / Query Plan tab.\n"), +HIVE_SERVER2_WEBUI_SHOW_GRAPH("hive.server2.webui.show.graph", false, +"Set this to true to to display query plan as a graph instead of text in the WebUI. " + +
hive git commit: HIVE-20535: Add new configuration to set the size of the global compile lock (Denys Kuzmenko, reviewed by Zoltan Haindrich and Peter Vary)
Repository: hive Updated Branches: refs/heads/master ec4a28bde -> 6c34a3742 HIVE-20535: Add new configuration to set the size of the global compile lock (Denys Kuzmenko, reviewed by Zoltan Haindrich and Peter Vary) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6c34a374 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6c34a374 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6c34a374 Branch: refs/heads/master Commit: 6c34a3742035d963a2690f0249a21da92f2196a0 Parents: ec4a28b Author: denys kuzmenko Authored: Thu Oct 4 18:22:31 2018 +0200 Committer: Peter Vary Committed: Thu Oct 4 18:23:35 2018 +0200 -- .../org/apache/hadoop/hive/conf/HiveConf.java | 5 +- .../java/org/apache/hadoop/hive/ql/Driver.java | 107 ++ .../apache/hadoop/hive/ql/lock/CompileLock.java | 129 .../hadoop/hive/ql/lock/CompileLockFactory.java | 128 .../apache/hadoop/hive/ql/CompileLockTest.java | 329 +++ 5 files changed, 613 insertions(+), 85 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/6c34a374/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java -- diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 531fabd..799fc05 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -3100,6 +3100,8 @@ public class HiveConf extends Configuration { "Bind host on which to run the HiveServer2 Thrift service."), HIVE_SERVER2_PARALLEL_COMPILATION("hive.driver.parallel.compilation", false, "Whether to\n" + "enable parallel compilation of the queries between sessions and within the same session on HiveServer2. The default is false."), + HIVE_SERVER2_PARALLEL_COMPILATION_LIMIT("hive.driver.parallel.compilation.global.limit", -1, "Determines the " + +"degree of parallelism for compilation queries between sessions on HiveServer2. The default is -1."), HIVE_SERVER2_COMPILE_LOCK_TIMEOUT("hive.server2.compile.lock.timeout", "0s", new TimeValidator(TimeUnit.SECONDS), "Number of seconds a request will wait to acquire the compile lock before giving up. " + @@ -4447,7 +4449,8 @@ public class HiveConf extends Configuration { + ",fs.s3a.secret.key" + ",fs.s3a.proxy.password" + ",dfs.adls.oauth2.credential" -+ ",fs.adl.oauth2.credential", ++ ",fs.adl.oauth2.credential" ++ ",hive.driver.parallel.compilation.global.limit", "Comma separated list of configuration options which should not be read by normal user like passwords"), HIVE_CONF_INTERNAL_VARIABLE_LIST("hive.conf.internal.variable.list", "hive.added.files.path,hive.added.jars.path,hive.added.archives.path", http://git-wip-us.apache.org/repos/asf/hive/blob/6c34a374/ql/src/java/org/apache/hadoop/hive/ql/Driver.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index dad2035..95619a4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -36,7 +36,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Queue; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; @@ -94,6 +93,8 @@ import org.apache.hadoop.hive.ql.hooks.PrivateHookContext; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.lock.CompileLock; +import org.apache.hadoop.hive.ql.lock.CompileLockFactory; import org.apache.hadoop.hive.ql.lockmgr.HiveLock; import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode; import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; @@ -170,7 +171,6 @@ public class Driver implements IDriver { ByteStream.Output bos = new ByteStream.Output(); private final HiveConf conf; - private final boolean isParallelEnabled; private DataInput resStream; private Context ctx; private DriverContext driverCxt; @@ -451,8 +451,6 @@ public class Driver implements IDriver { public Driver(QueryState queryState, String userName, QueryInfo queryInfo, HiveTxnManager txnMgr) { this.queryState = queryState; this.conf = queryState.getConf(); -isParallelEnabled = (conf != null) -&& HiveConf.getBoolVar(conf,
hive git commit: HIVE-20691: Fix org.apache.hadoop.hive.cli.TestMiniLlapCliDriver.testCliDriver[cttl] (Jesus Camacho Rodriguez, reviewed by Zoltan Haindrich)
Repository: hive Updated Branches: refs/heads/master e96ea6631 -> ec4a28bde HIVE-20691: Fix org.apache.hadoop.hive.cli.TestMiniLlapCliDriver.testCliDriver[cttl] (Jesus Camacho Rodriguez, reviewed by Zoltan Haindrich) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ec4a28bd Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ec4a28bd Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ec4a28bd Branch: refs/heads/master Commit: ec4a28bde840fc474b301c68d85c3bed88129180 Parents: e96ea66 Author: Jesus Camacho Rodriguez Authored: Thu Oct 4 09:18:05 2018 -0700 Committer: Jesus Camacho Rodriguez Committed: Thu Oct 4 09:20:49 2018 -0700 -- ql/src/test/queries/clientpositive/cttl.q | 2 +- ql/src/test/results/clientpositive/llap/cttl.q.out | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/ec4a28bd/ql/src/test/queries/clientpositive/cttl.q -- diff --git a/ql/src/test/queries/clientpositive/cttl.q b/ql/src/test/queries/clientpositive/cttl.q index b3da8ec..86cd98f 100644 --- a/ql/src/test/queries/clientpositive/cttl.q +++ b/ql/src/test/queries/clientpositive/cttl.q @@ -21,7 +21,7 @@ create external table students( create temporary table temp1 like students; insert into table temp1 select * from students; -select * from temp1 order by name limit 10; +select * from temp1 order by name, age limit 10; drop table students; dfs -ls hdfs:///tmp/hive19577_cttl/; http://git-wip-us.apache.org/repos/asf/hive/blob/ec4a28bd/ql/src/test/results/clientpositive/llap/cttl.q.out -- diff --git a/ql/src/test/results/clientpositive/llap/cttl.q.out b/ql/src/test/results/clientpositive/llap/cttl.q.out index 8203b80..fe8fed5 100644 --- a/ql/src/test/results/clientpositive/llap/cttl.q.out +++ b/ql/src/test/results/clientpositive/llap/cttl.q.out @@ -47,11 +47,11 @@ POSTHOOK: Output: default@temp1 POSTHOOK: Lineage: temp1.age SIMPLE [(students)students.FieldSchema(name:age, type:int, comment:null), ] POSTHOOK: Lineage: temp1.gpa SIMPLE [(students)students.FieldSchema(name:gpa, type:double, comment:null), ] POSTHOOK: Lineage: temp1.name SIMPLE [(students)students.FieldSchema(name:name, type:string, comment:null), ] -PREHOOK: query: select * from temp1 order by name limit 10 +PREHOOK: query: select * from temp1 order by name, age limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@temp1 PREHOOK: Output: hdfs://### HDFS PATH ### -POSTHOOK: query: select * from temp1 order by name limit 10 +POSTHOOK: query: select * from temp1 order by name, age limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@temp1 POSTHOOK: Output: hdfs://### HDFS PATH ###
hive git commit: HIVE-20552: Get Schema from LogicalPlan faster (Teddy Choi, reviewed by Jesus Camacho Rodriguez)
Repository: hive Updated Branches: refs/heads/branch-3 50cc61154 -> 3936f8227 HIVE-20552: Get Schema from LogicalPlan faster (Teddy Choi, reviewed by Jesus Camacho Rodriguez) Signed-off-by: Teddy Choi Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3936f822 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3936f822 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3936f822 Branch: refs/heads/branch-3 Commit: 3936f8227ed733a50e547ed6ac3d566b1d28f78a Parents: 50cc611 Author: Teddy Choi Authored: Thu Oct 4 15:45:25 2018 +0900 Committer: Teddy Choi Committed: Thu Oct 4 15:45:25 2018 +0900 -- .../metadata/HiveMaterializedViewsRegistry.java | 31 .../apache/hadoop/hive/ql/parse/ParseUtils.java | 28 ++ .../ql/udf/generic/GenericUDTFGetSplits.java| 29 -- 3 files changed, 53 insertions(+), 35 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hive/blob/3936f822/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java index 696227b..a8856a9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java @@ -52,8 +52,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; -import org.apache.hadoop.hive.ql.Context; -import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveTypeSystemImpl; @@ -61,11 +59,8 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveRelNode; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter; -import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.CalcitePlanner; -import org.apache.hadoop.hive.ql.parse.ColumnStatsList; import org.apache.hadoop.hive.ql.parse.ParseUtils; -import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.RowResolver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.SerDeException; @@ -224,10 +219,12 @@ public final class HiveMaterializedViewsRegistry { " ignored; error creating view replacement"); return null; } -final RelNode queryRel = parseQuery(conf, viewQuery); -if (queryRel == null) { +final RelNode queryRel; +try { + queryRel = ParseUtils.parseQuery(conf, viewQuery); +} catch (Exception e) { LOG.warn("Materialized view " + materializedViewTable.getCompleteName() + - " ignored; error parsing original query"); + " ignored; error parsing original query; " + e); return null; } @@ -400,24 +397,6 @@ public final class HiveMaterializedViewsRegistry { return tableRel; } - private static RelNode parseQuery(HiveConf conf, String viewQuery) { -try { - final ASTNode node = ParseUtils.parse(viewQuery); - final QueryState qs = - new QueryState.Builder().withHiveConf(conf).build(); - CalcitePlanner analyzer = new CalcitePlanner(qs); - Context ctx = new Context(conf); - ctx.setIsLoadingMaterializedView(true); - analyzer.initCtx(ctx); - analyzer.init(false); - return analyzer.genLogicalPlan(node); -} catch (Exception e) { - // We could not parse the view - LOG.error("Error parsing original query for materialized view", e); - return null; -} - } - private static TableType obtainTableType(Table tabMetaData) { if (tabMetaData.getStorageHandler() != null) { final String storageHandlerStr = tabMetaData.getStorageHandler().toString(); http://git-wip-us.apache.org/repos/asf/hive/blob/3936f822/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java -- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java index 89e8412..be1c59f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java +++