This is an automated email from the ASF dual-hosted git repository.
krisztiankasa pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 169d5d029d HIVE-25728: ParseException while gathering Column Stats
(Soumyakanti Das, reviewed by Krisztian Kasa)
169d5d029d is described below
commit 169d5d029df8ee6789b8ae45e4d921191b56eb7b
Author: Soumyakanti Das <[email protected]>
AuthorDate: Tue Apr 5 21:07:46 2022 -0700
HIVE-25728: ParseException while gathering Column Stats (Soumyakanti Das,
reviewed by Krisztian Kasa)
---
.../hive/ql/parse/ColumnStatsSemanticAnalyzer.java | 2 +-
.../clientpositive/columnstats_columnname_parse.q | 20 +++++++
.../llap/columnstats_columnname_parse.q.out | 68 ++++++++++++++++++++++
3 files changed, 89 insertions(+), 1 deletion(-)
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
index 6ee38998a4..6a00131b25 100644
---
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
+++
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
@@ -259,7 +259,7 @@ public class ColumnStatsSemanticAnalyzer extends
SemanticAnalyzer {
final TypeInfo typeInfo =
TypeInfoUtils.getTypeInfoFromTypeString(colTypes.get(i));
genComputeStats(rewrittenQueryBuilder, conf, i, columnName, typeInfo);
- columnNamesBuilder.append(unparseIdentifier(columnName, conf));
+ columnNamesBuilder.append(columnName);
columnDummyValuesBuilder.append(
"cast(null as " + typeInfo.toString() + ")");
diff --git a/ql/src/test/queries/clientpositive/columnstats_columnname_parse.q
b/ql/src/test/queries/clientpositive/columnstats_columnname_parse.q
new file mode 100644
index 0000000000..a8be87b79e
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/columnstats_columnname_parse.q
@@ -0,0 +1,20 @@
+
+CREATE TABLE table1(
+ t1_col1 bigint);
+
+ CREATE TABLE table2(
+ t2_col1 bigint,
+ t2_col2 int)
+ PARTITIONED BY (
+ t2_col3 date);
+
+insert into table1 values(1);
+insert into table2 values("1","1","1");
+
+set hive.support.quoted.identifiers=none;
+
+create external table ext_table STORED AS ORC
tblproperties('compression'='snappy','external.table.purge'='true') as
+SELECT a.* ,d.`(t2_col1|t2_col3)?+.+`
+FROM table1 a
+LEFT JOIN (SELECT * FROM table2 where t2_col3 like '2021-01-%') d
+on a.t1_col1 = d.t2_col1;
diff --git
a/ql/src/test/results/clientpositive/llap/columnstats_columnname_parse.q.out
b/ql/src/test/results/clientpositive/llap/columnstats_columnname_parse.q.out
new file mode 100644
index 0000000000..4bedc8137f
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/columnstats_columnname_parse.q.out
@@ -0,0 +1,68 @@
+PREHOOK: query: CREATE TABLE table1(
+ t1_col1 bigint)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
+POSTHOOK: query: CREATE TABLE table1(
+ t1_col1 bigint)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table1
+PREHOOK: query: CREATE TABLE table2(
+ t2_col1 bigint,
+ t2_col2 int)
+ PARTITIONED BY (
+ t2_col3 date)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table2
+POSTHOOK: query: CREATE TABLE table2(
+ t2_col1 bigint,
+ t2_col2 int)
+ PARTITIONED BY (
+ t2_col3 date)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table2
+PREHOOK: query: insert into table1 values(1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@table1
+POSTHOOK: query: insert into table1 values(1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@table1
+POSTHOOK: Lineage: table1.t1_col1 SCRIPT []
+PREHOOK: query: insert into table2 values("1","1","1")
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@table2
+POSTHOOK: query: insert into table2 values("1","1","1")
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@table2
+POSTHOOK: Output: default@table2@t2_col3=__HIVE_DEFAULT_PARTITION__
+POSTHOOK: Lineage: table2
PARTITION(t2_col3=__HIVE_DEFAULT_PARTITION__).t2_col1 SCRIPT []
+POSTHOOK: Lineage: table2
PARTITION(t2_col3=__HIVE_DEFAULT_PARTITION__).t2_col2 SCRIPT []
+PREHOOK: query: create external table ext_table STORED AS ORC
tblproperties('compression'='snappy','external.table.purge'='true') as
+SELECT a.* ,d.`(t2_col1|t2_col3)?+.+`
+FROM table1 a
+LEFT JOIN (SELECT * FROM table2 where t2_col3 like '2021-01-%') d
+on a.t1_col1 = d.t2_col1
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@table1
+PREHOOK: Input: default@table2
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ext_table
+POSTHOOK: query: create external table ext_table STORED AS ORC
tblproperties('compression'='snappy','external.table.purge'='true') as
+SELECT a.* ,d.`(t2_col1|t2_col3)?+.+`
+FROM table1 a
+LEFT JOIN (SELECT * FROM table2 where t2_col3 like '2021-01-%') d
+on a.t1_col1 = d.t2_col1
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@table1
+POSTHOOK: Input: default@table2
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@ext_table
+POSTHOOK: Lineage: ext_table.t1_col1 SIMPLE
[(table1)a.FieldSchema(name:t1_col1, type:bigint, comment:null), ]
+POSTHOOK: Lineage: ext_table.t2_col2 SIMPLE
[(table2)table2.FieldSchema(name:t2_col2, type:int, comment:null), ]