This is an automated email from the ASF dual-hosted git repository.
dengzh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 4e7b6176409 HIVE-25813: Create Table x Like commands based storage
handlers fail over is fixed (#3627)
4e7b6176409 is described below
commit 4e7b617640912d66b95de29a276577c3a34ea81a
Author: Sai Hemanth Gantasala
<[email protected]>
AuthorDate: Mon Oct 31 18:08:15 2022 -0700
HIVE-25813: Create Table x Like commands based storage handlers fail over
is fixed (#3627)
---
.../java/org/apache/hadoop/hive/conf/HiveConf.java | 5 ++
.../src/test/results/positive/hbase_queries.q.out | 19 +++--
.../create/like/CreateTableLikeOperation.java | 42 +++------
.../queries/clientpositive/avro_extschema_insert.q | 5 +-
ql/src/test/queries/clientpositive/create_like2.q | 24 ++++++
.../llap/avro_extschema_insert.q.out | 47 +++++++++-
.../results/clientpositive/llap/create_like.q.out | 28 ------
.../results/clientpositive/llap/create_like2.q.out | 99 +++++++++++++++++++++-
.../llap/create_like_tbl_props.q.out | 2 -
.../clientpositive/llap/create_like_view.q.out | 6 --
.../clientpositive/llap/partition_discovery.q.out | 1 -
11 files changed, 198 insertions(+), 80 deletions(-)
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 1e06e0a956b..9528f22b707 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1510,6 +1510,11 @@ public class HiveConf extends Configuration {
NEWTABLEDEFAULTPARA("hive.table.parameters.default", "",
"Default property values for newly created tables"),
+ /**
+ * @deprecated With HIVE-25813 table properties of source tables will not
be copied over to dest table.
+ * see HIVE-25813 jira for details..
+ */
+ @Deprecated
DDL_CTL_PARAMETERS_WHITELIST("hive.ddl.createtablelike.properties.whitelist",
"",
"Table Properties to copy over when executing a Create Table Like."),
/**
diff --git a/hbase-handler/src/test/results/positive/hbase_queries.q.out
b/hbase-handler/src/test/results/positive/hbase_queries.q.out
index 48fed665fd9..5be5360de24 100644
--- a/hbase-handler/src/test/results/positive/hbase_queries.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_queries.q.out
@@ -925,6 +925,9 @@ POSTHOOK: query: INSERT OVERWRITE TABLE hbase_table_3_like
SELECT * FROM hbase_t
POSTHOOK: type: QUERY
POSTHOOK: Input: default@hbase_table_3
POSTHOOK: Output: default@hbase_table_3_like
+POSTHOOK: Lineage: hbase_table_3_like.count SIMPLE
[(hbase_table_3)hbase_table_3.FieldSchema(name:count, type:int, comment:), ]
+POSTHOOK: Lineage: hbase_table_3_like.key SIMPLE
[(hbase_table_3)hbase_table_3.FieldSchema(name:key, type:int, comment:), ]
+POSTHOOK: Lineage: hbase_table_3_like.value SIMPLE
[(hbase_table_3)hbase_table_3.FieldSchema(name:value, type:string, comment:), ]
PREHOOK: query: SELECT * FROM hbase_table_3_like ORDER BY key, value LIMIT 5
PREHOOK: type: QUERY
PREHOOK: Input: default@hbase_table_3_like
@@ -968,6 +971,8 @@ POSTHOOK: query: INSERT OVERWRITE TABLE hbase_table_1_like
SELECT * FROM hbase_t
POSTHOOK: type: QUERY
POSTHOOK: Input: default@hbase_table_1
POSTHOOK: Output: default@hbase_table_1_like
+POSTHOOK: Lineage: hbase_table_1_like.key SIMPLE
[(hbase_table_1)hbase_table_1.FieldSchema(name:key, type:int, comment:It is a
column key), ]
+POSTHOOK: Lineage: hbase_table_1_like.value SIMPLE
[(hbase_table_1)hbase_table_1.FieldSchema(name:value, type:string, comment:It
is the column string value), ]
PREHOOK: query: SELECT COUNT(*) FROM hbase_table_1_like
PREHOOK: type: QUERY
PREHOOK: Input: default@hbase_table_1_like
@@ -987,15 +992,15 @@ CREATE EXTERNAL TABLE `hbase_table_1_like`(
`key` int COMMENT 'It is a column key',
`value` string COMMENT 'It is the column string value')
ROW FORMAT SERDE
- 'org.apache.hadoop.hive.hbase.HBaseSerDe'
-STORED BY
- 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
- 'hbase.columns.mapping'='cf:string',
- 'serialization.format'='1')
+ 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+#### A masked pattern was here ####
TBLPROPERTIES (
'bucketing_version'='2',
- 'hbase.table.name'='hbase_table_0',
#### A masked pattern was here ####
PREHOOK: query: DROP TABLE IF EXISTS hbase_table_9
PREHOOK: type: DROPTABLE
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/like/CreateTableLikeOperation.java
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/like/CreateTableLikeOperation.java
index 09b3ed73c9c..e4f24c251cd 100644
---
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/like/CreateTableLikeOperation.java
+++
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/like/CreateTableLikeOperation.java
@@ -22,6 +22,7 @@ import static
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_
import java.util.Arrays;
import java.util.HashSet;
+import java.util.HashMap;
import java.util.Map;
import java.util.Set;
@@ -121,7 +122,7 @@ public class CreateTableLikeOperation extends
DDLOperation<CreateTableLikeDesc>
setTableParameters(table);
- if (desc.isUserStorageFormat()) {
+ if (desc.isUserStorageFormat() || (table.getInputFormatClass() == null) ||
(table.getOutputFormatClass() == null)) {
setStorage(table);
}
@@ -147,37 +148,13 @@ public class CreateTableLikeOperation extends
DDLOperation<CreateTableLikeDesc>
}
private void setTableParameters(Table tbl) throws HiveException {
- Set<String> retainer = new HashSet<String>();
-
- Class<? extends Deserializer> serdeClass;
- try {
- serdeClass = tbl.getDeserializerClass();
- } catch (Exception e) {
- throw new HiveException(e);
- }
- // We should copy only those table parameters that are specified in the
config.
- SerDeSpec spec = AnnotationUtils.getAnnotation(serdeClass,
SerDeSpec.class);
-
- // for non-native table, property storage_handler should be retained
- retainer.add(META_TABLE_STORAGE);
- if (spec != null && spec.schemaProps() != null) {
- retainer.addAll(Arrays.asList(spec.schemaProps()));
- }
-
- String paramsStr = HiveConf.getVar(context.getConf(),
HiveConf.ConfVars.DDL_CTL_PARAMETERS_WHITELIST);
- if (paramsStr != null) {
- retainer.addAll(Arrays.asList(paramsStr.split(",")));
- }
-
- Map<String, String> params = tbl.getParameters();
- if (!retainer.isEmpty()) {
- params.keySet().retainAll(retainer);
- } else {
- params.clear();
- }
+ // With Hive-25813, we'll not copy over table properties from the source.
+ // CTLT should should copy column schema but not table properties. It is
also consistent
+ // with other query engines like mysql, redshift.
+ tbl.getParameters().clear();
if (desc.getTblProps() != null) {
- params.putAll(desc.getTblProps());
+ tbl.setParameters(desc.getTblProps());
}
}
@@ -186,7 +163,10 @@ public class CreateTableLikeOperation extends
DDLOperation<CreateTableLikeDesc>
table.setOutputFormatClass(desc.getDefaultOutputFormat());
table.getTTable().getSd().setInputFormat(table.getInputFormatClass().getName());
table.getTTable().getSd().setOutputFormat(table.getOutputFormatClass().getName());
-
+ if (table.getTTable().getSd().getSerdeInfo() != null &&
+ table.getTTable().getSd().getSerdeInfo().getParameters() != null) {
+ table.getTTable().getSd().getSerdeInfo().getParameters().clear();
+ }
if (desc.getDefaultSerName() == null) {
LOG.info("Default to LazySimpleSerDe for table {}", desc.getTableName());
table.setSerializationLib(LazySimpleSerDe.class.getName());
diff --git a/ql/src/test/queries/clientpositive/avro_extschema_insert.q
b/ql/src/test/queries/clientpositive/avro_extschema_insert.q
index ef79d229df4..94b1c056836 100644
--- a/ql/src/test/queries/clientpositive/avro_extschema_insert.q
+++ b/ql/src/test/queries/clientpositive/avro_extschema_insert.q
@@ -7,7 +7,10 @@ create external table avro_extschema_insert1 (name string)
partitioned by (p1 st
describe avro_extschema_insert1;
-create external table avro_extschema_insert2 like avro_extschema_insert1;
+create external table avro_extschema_insert2 like avro_extschema_insert1
stored as avro
+ tblproperties
('avro.schema.url'='${system:test.tmp.dir}/avro_extschema_insert.avsc');
+
+desc formatted avro_extschema_insert2;
insert overwrite table avro_extschema_insert1 partition (p1='part1') values
('col1_value', 1, 'col3_value');
diff --git a/ql/src/test/queries/clientpositive/create_like2.q
b/ql/src/test/queries/clientpositive/create_like2.q
index 9294894d527..073834fdfe0 100644
--- a/ql/src/test/queries/clientpositive/create_like2.q
+++ b/ql/src/test/queries/clientpositive/create_like2.q
@@ -39,3 +39,27 @@ drop table test_external;
drop table test_mm1;
drop table test_external1;
drop table test_mm2;
+
+-- Create JBDC based CTLT table, HIVE-25813
+CREATE EXTERNAL TABLE default.dbs (
+ DB_ID bigint,
+ DB_LOCATION_URI string,
+ NAME string,
+ OWNER_NAME string,
+ OWNER_TYPE string )
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ 'hive.sql.database.type' = 'MYSQL',
+ 'hive.sql.jdbc.driver' = 'com.mysql.jdbc.Driver',
+ 'hive.sql.jdbc.url' = 'jdbc:mysql://localhost:3306/hive1',
+ 'hive.sql.dbcp.username' = 'hive1',
+ 'hive.sql.dbcp.password' = 'cloudera',
+ 'hive.sql.query' = 'SELECT DB_ID, DB_LOCATION_URI, NAME, OWNER_NAME,
OWNER_TYPE FROM DBS'
+);
+
+CREATE TABLE default.dbscopy LIKE default.dbs;
+
+desc formatted default.dbscopy;
+
+drop table default.dbs;
+drop table default.dbscopy;
\ No newline at end of file
diff --git
a/ql/src/test/results/clientpositive/llap/avro_extschema_insert.q.out
b/ql/src/test/results/clientpositive/llap/avro_extschema_insert.q.out
index 9b406d249c6..a6fc29248b1 100644
--- a/ql/src/test/results/clientpositive/llap/avro_extschema_insert.q.out
+++ b/ql/src/test/results/clientpositive/llap/avro_extschema_insert.q.out
@@ -22,14 +22,57 @@ p1 string
# Partition Information
# col_name data_type comment
p1 string
-PREHOOK: query: create external table avro_extschema_insert2 like
avro_extschema_insert1
+PREHOOK: query: create external table avro_extschema_insert2 like
avro_extschema_insert1 stored as avro
+#### A masked pattern was here ####
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@avro_extschema_insert2
-POSTHOOK: query: create external table avro_extschema_insert2 like
avro_extschema_insert1
+POSTHOOK: query: create external table avro_extschema_insert2 like
avro_extschema_insert1 stored as avro
+#### A masked pattern was here ####
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@avro_extschema_insert2
+PREHOOK: query: desc formatted avro_extschema_insert2
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@avro_extschema_insert2
+POSTHOOK: query: desc formatted avro_extschema_insert2
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@avro_extschema_insert2
+# col_name data_type comment
+col1 string
+col2 bigint
+col3 string
+
+# Partition Information
+# col_name data_type comment
+p1 string
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+#### A masked pattern was here ####
+Table Type: EXTERNAL_TABLE
+Table Parameters:
+ COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
+ EXTERNAL TRUE
+#### A masked pattern was here ####
+ bucketing_version 2
+ numFiles 0
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
+ totalSize 0
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.avro.AvroSerDe
+InputFormat:
org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat
+OutputFormat:
org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
PREHOOK: query: insert overwrite table avro_extschema_insert1 partition
(p1='part1') values ('col1_value', 1, 'col3_value')
PREHOOK: type: QUERY
PREHOOK: Input: _dummy_database@_dummy_table
diff --git a/ql/src/test/results/clientpositive/llap/create_like.q.out
b/ql/src/test/results/clientpositive/llap/create_like.q.out
index 8867dc19d50..ac3df060931 100644
--- a/ql/src/test/results/clientpositive/llap/create_like.q.out
+++ b/ql/src/test/results/clientpositive/llap/create_like.q.out
@@ -423,28 +423,6 @@ Retention: 0
Table Type: MANAGED_TABLE
Table Parameters:
COLUMN_STATS_ACCURATE
{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"first_name\":\"true\",\"last_name\":\"true\",\"number\":\"true\"}}
- avro.schema.literal {
- \"namespace\": \"testing.hive.avro.serde\",
- \"name\": \"doctors_n2\",
- \"type\": \"record\",
- \"fields\": [
- {
- \"name\":\"number\",
- \"type\":\"int\",
- \"doc\":\"Order of playing the role\"
- },
- {
- \"name\":\"first_name\",
- \"type\":\"string\",
- \"doc\":\"first name of actor playing
role\"
- },
- {
- \"name\":\"last_name\",
- \"type\":\"string\",
- \"doc\":\"last name of actor playing
role\"
- }
- ]
- }
bucketing_version 2
numFiles 0
numRows 0
@@ -499,7 +477,6 @@ Table Parameters:
bucketing_version 2
numFiles 0
numRows 0
- parquet.compression LZO
rawDataSize 0
totalSize 0
#### A masked pattern was here ####
@@ -598,8 +575,6 @@ Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
-Storage Desc Params:
- serialization.format 1
PREHOOK: query: drop table table6_n4
PREHOOK: type: DROPTABLE
PREHOOK: Input: default@table6_n4
@@ -646,8 +621,6 @@ Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
-Storage Desc Params:
- serialization.format 1
PREHOOK: query: drop table table5_n5
PREHOOK: type: DROPTABLE
PREHOOK: Input: default@table5_n5
@@ -696,7 +669,6 @@ Table Parameters:
bucketing_version 2
numFiles 0
numRows 0
- orc.compress SNAPPY
rawDataSize 0
totalSize 0
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/llap/create_like2.q.out
b/ql/src/test/results/clientpositive/llap/create_like2.q.out
index 36aaac060f8..2ca82c23185 100644
--- a/ql/src/test/results/clientpositive/llap/create_like2.q.out
+++ b/ql/src/test/results/clientpositive/llap/create_like2.q.out
@@ -40,9 +40,7 @@ Retention: 0
Table Type: MANAGED_TABLE
Table Parameters:
COLUMN_STATS_ACCURATE
{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
- a 1
bucketing_version 2
- c 3
numFiles 0
numRows 0
rawDataSize 0
@@ -339,3 +337,100 @@ POSTHOOK: query: drop table test_mm2
POSTHOOK: type: DROPTABLE
POSTHOOK: Input: default@test_mm2
POSTHOOK: Output: default@test_mm2
+PREHOOK: query: CREATE EXTERNAL TABLE default.dbs (
+ DB_ID bigint,
+ DB_LOCATION_URI string,
+ NAME string,
+ OWNER_NAME string,
+ OWNER_TYPE string )
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ 'hive.sql.database.type' = 'MYSQL',
+ 'hive.sql.jdbc.driver' = 'com.mysql.jdbc.Driver',
+ 'hive.sql.jdbc.url' = 'jdbc:mysql://localhost:3306/hive1',
+ 'hive.sql.dbcp.username' = 'hive1',
+ 'hive.sql.dbcp.password' = 'cloudera',
+ 'hive.sql.query' = 'SELECT DB_ID, DB_LOCATION_URI, NAME, OWNER_NAME,
OWNER_TYPE FROM DBS'
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dbs
+POSTHOOK: query: CREATE EXTERNAL TABLE default.dbs (
+ DB_ID bigint,
+ DB_LOCATION_URI string,
+ NAME string,
+ OWNER_NAME string,
+ OWNER_TYPE string )
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ 'hive.sql.database.type' = 'MYSQL',
+ 'hive.sql.jdbc.driver' = 'com.mysql.jdbc.Driver',
+ 'hive.sql.jdbc.url' = 'jdbc:mysql://localhost:3306/hive1',
+ 'hive.sql.dbcp.username' = 'hive1',
+ 'hive.sql.dbcp.password' = 'cloudera',
+ 'hive.sql.query' = 'SELECT DB_ID, DB_LOCATION_URI, NAME, OWNER_NAME,
OWNER_TYPE FROM DBS'
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dbs
+PREHOOK: query: CREATE TABLE default.dbscopy LIKE default.dbs
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dbscopy
+POSTHOOK: query: CREATE TABLE default.dbscopy LIKE default.dbs
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dbscopy
+PREHOOK: query: desc formatted default.dbscopy
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@dbscopy
+POSTHOOK: query: desc formatted default.dbscopy
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@dbscopy
+# col_name data_type comment
+db_id bigint
+db_location_uri string
+name string
+#### A masked pattern was here ####
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+#### A masked pattern was here ####
+ bucketing_version 2
+ numFiles 0
+ numRows 0
+ rawDataSize 0
+ totalSize 0
+ transactional true
+ transactional_properties default
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+PREHOOK: query: drop table default.dbs
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@dbs
+PREHOOK: Output: default@dbs
+POSTHOOK: query: drop table default.dbs
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@dbs
+POSTHOOK: Output: default@dbs
+PREHOOK: query: drop table default.dbscopy
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@dbscopy
+PREHOOK: Output: default@dbscopy
+POSTHOOK: query: drop table default.dbscopy
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@dbscopy
+POSTHOOK: Output: default@dbscopy
diff --git
a/ql/src/test/results/clientpositive/llap/create_like_tbl_props.q.out
b/ql/src/test/results/clientpositive/llap/create_like_tbl_props.q.out
index 80cb4c82c88..6de55c0e2ad 100644
--- a/ql/src/test/results/clientpositive/llap/create_like_tbl_props.q.out
+++ b/ql/src/test/results/clientpositive/llap/create_like_tbl_props.q.out
@@ -231,5 +231,3 @@ Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
-Storage Desc Params:
- serialization.format 1
diff --git a/ql/src/test/results/clientpositive/llap/create_like_view.q.out
b/ql/src/test/results/clientpositive/llap/create_like_view.q.out
index 870f2800cfb..f2f6c0b71af 100644
--- a/ql/src/test/results/clientpositive/llap/create_like_view.q.out
+++ b/ql/src/test/results/clientpositive/llap/create_like_view.q.out
@@ -126,8 +126,6 @@ Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
-Storage Desc Params:
- serialization.format 1
PREHOOK: query: CREATE TABLE IF NOT EXISTS table2_n9 LIKE view1_n1
PREHOOK: type: CREATETABLE
POSTHOOK: query: CREATE TABLE IF NOT EXISTS table2_n9 LIKE view1_n1
@@ -186,8 +184,6 @@ Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
-Storage Desc Params:
- serialization.format 1
PREHOOK: query: INSERT OVERWRITE TABLE table1_n14 SELECT key, value FROM src
WHERE key = 86
PREHOOK: type: QUERY
PREHOOK: Input: default@src
@@ -310,8 +306,6 @@ Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
-Storage Desc Params:
- serialization.format 1
PREHOOK: query: DROP TABLE table1_n14
PREHOOK: type: DROPTABLE
PREHOOK: Input: default@table1_n14
diff --git a/ql/src/test/results/clientpositive/llap/partition_discovery.q.out
b/ql/src/test/results/clientpositive/llap/partition_discovery.q.out
index e273c904c1c..14ad24d24d5 100644
--- a/ql/src/test/results/clientpositive/llap/partition_discovery.q.out
+++ b/ql/src/test/results/clientpositive/llap/partition_discovery.q.out
@@ -502,7 +502,6 @@ Table Parameters:
COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
EXTERNAL TRUE
bucketing_version 2
- discover.partitions false
numFiles 0
numPartitions 0
numRows 0