This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch branch-1.2-lts
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-1.2-lts by this push:
new 97302dea69 [Fix](multi-catalog) Fix some hive partition issues.
(#20401)
97302dea69 is described below
commit 97302dea69d1f98a823e5ea1f0ee120aeb946701
Author: Qi Chen <[email protected]>
AuthorDate: Wed Jun 7 22:20:53 2023 +0800
[Fix](multi-catalog) Fix some hive partition issues. (#20401)
merge #19513 to branch-1.2-lts.
---
be/src/exec/text_converter.cpp | 65 ++++++++++++++++++-
be/src/exec/text_converter.hpp | 37 ++++++++++-
.../hive/scripts/create_preinstalled_table.hql | 24 +++++++
.../decimal_par2=1.1/decimal_par3=1.11/data.txt | 2 +
.../decimal_par2=1.1/decimal_par3=1.12/data.txt | 2 +
.../decimal_par2=1.2/decimal_par3=1.21/data.txt | 2 +
.../decimal_par2=1.2/decimal_par3=1.22/data.txt | 2 +
.../decimal_par2=2.1/decimal_par3=2.11/data.txt | 2 +
.../decimal_par2=2.1/decimal_par3=2.12/data.txt | 2 +
.../decimal_par2=2.2/decimal_par3=2.21/data.txt | 2 +
.../decimal_par2=2.2/decimal_par3=2.22/data.txt | 2 +
.../decimal_par2=1.1/decimal_par3=1.11/data.txt | 2 +
.../decimal_par2=1.1/decimal_par3=1.12/data.txt | 2 +
.../decimal_par2=1.2/decimal_par3=1.21/data.txt | 2 +
.../decimal_par2=1.2/decimal_par3=1.22/data.txt | 2 +
.../decimal_par2=2.1/decimal_par3=2.11/data.txt | 2 +
.../decimal_par2=2.1/decimal_par3=2.12/data.txt | 2 +
.../decimal_par2=2.2/decimal_par3=2.21/data.txt | 2 +
.../decimal_par2=2.2/decimal_par3=2.22/data.txt | 2 +
.../decimal_par2=1.1/decimal_par3=1.11/data.txt | 2 +
.../decimal_par2=1.1/decimal_par3=1.12/data.txt | 2 +
.../decimal_par2=1.2/decimal_par3=1.21/data.txt | 2 +
.../decimal_par2=1.2/decimal_par3=1.22/data.txt | 2 +
.../decimal_par2=2.1/decimal_par3=2.11/data.txt | 2 +
.../decimal_par2=2.1/decimal_par3=2.12/data.txt | 2 +
.../decimal_par2=2.2/decimal_par3=2.21/data.txt | 2 +
.../decimal_par2=2.2/decimal_par3=2.22/data.txt | 2 +
.../decimal_par2=1.1/decimal_par3=1.11/data.txt | 2 +
.../decimal_par2=1.1/decimal_par3=1.12/data.txt | 2 +
.../decimal_par2=1.2/decimal_par3=1.21/data.txt | 2 +
.../decimal_par2=1.2/decimal_par3=1.22/data.txt | 2 +
.../decimal_par2=2.1/decimal_par3=2.11/data.txt | 2 +
.../decimal_par2=2.1/decimal_par3=2.12/data.txt | 2 +
.../decimal_par2=2.2/decimal_par3=2.21/data.txt | 2 +
.../decimal_par2=2.2/decimal_par3=2.22/data.txt | 2 +
.../java/org/apache/doris/backup/HdfsStorage.java | 4 +-
.../java/org/apache/doris/backup/S3Storage.java | 2 +-
.../doris/datasource/hive/HiveMetaStoreCache.java | 9 +++
.../org/apache/doris/load/loadv2/dpp/SparkDpp.java | 12 ++--
.../hive/test_hive_partitions.out | 73 ++++++++++++++++++++++
.../hive/test_hive_partitions.groovy | 61 ++++++++++++++++++
41 files changed, 337 insertions(+), 14 deletions(-)
diff --git a/be/src/exec/text_converter.cpp b/be/src/exec/text_converter.cpp
index 5888eefc43..6b6d79566e 100644
--- a/be/src/exec/text_converter.cpp
+++ b/be/src/exec/text_converter.cpp
@@ -143,6 +143,18 @@ bool TextConverter::write_vec_column(const SlotDescriptor*
slot_desc,
break;
}
+ case TYPE_DATEV2: {
+ vectorized::DateV2Value<vectorized::DateV2ValueType> ts_slot;
+ if (!ts_slot.from_date_str(data, len)) {
+ parse_result = StringParser::PARSE_FAILURE;
+ break;
+ }
+ uint32_t int_val = ts_slot.to_date_int_val();
+
reinterpret_cast<vectorized::ColumnVector<vectorized::UInt32>*>(col_ptr)
+ ->get_data()
+ .resize_fill(origin_size + rows, int_val);
+ break;
+ }
case TYPE_DATETIME: {
vectorized::VecDateTimeValue ts_slot;
if (!ts_slot.from_date_str(data, len)) {
@@ -155,7 +167,18 @@ bool TextConverter::write_vec_column(const SlotDescriptor*
slot_desc,
.resize_fill(origin_size + rows,
*reinterpret_cast<int64_t*>(&ts_slot));
break;
}
-
+ case TYPE_DATETIMEV2: {
+ vectorized::DateV2Value<vectorized::DateTimeV2ValueType> ts_slot;
+ if (!ts_slot.from_date_str(data, len)) {
+ parse_result = StringParser::PARSE_FAILURE;
+ break;
+ }
+ uint64_t int_val = ts_slot.to_date_int_val();
+
reinterpret_cast<vectorized::ColumnVector<vectorized::UInt64>*>(col_ptr)
+ ->get_data()
+ .resize_fill(origin_size + rows, int_val);
+ break;
+ }
case TYPE_DECIMALV2: {
DecimalV2Value decimal_slot;
if (decimal_slot.parse_from_str(data, len)) {
@@ -167,7 +190,45 @@ bool TextConverter::write_vec_column(const SlotDescriptor*
slot_desc,
.resize_fill(origin_size + rows, decimal_slot.value());
break;
}
-
+ case TYPE_DECIMAL32: {
+ StringParser::ParseResult result = StringParser::PARSE_SUCCESS;
+ int32_t value = StringParser::string_to_decimal<int32_t>(
+ data, len, slot_desc->type().precision,
slot_desc->type().scale, &result);
+ if (result != StringParser::PARSE_SUCCESS) {
+ parse_result = StringParser::PARSE_FAILURE;
+ break;
+ }
+ reinterpret_cast<vectorized::ColumnVector<vectorized::Int32>*>(col_ptr)
+ ->get_data()
+ .resize_fill(origin_size + rows, value);
+ break;
+ }
+ case TYPE_DECIMAL64: {
+ StringParser::ParseResult result = StringParser::PARSE_SUCCESS;
+ int64_t value = StringParser::string_to_decimal<int64_t>(
+ data, len, slot_desc->type().precision,
slot_desc->type().scale, &result);
+ if (result != StringParser::PARSE_SUCCESS) {
+ parse_result = StringParser::PARSE_FAILURE;
+ break;
+ }
+ reinterpret_cast<vectorized::ColumnVector<vectorized::Int64>*>(col_ptr)
+ ->get_data()
+ .resize_fill(origin_size + rows, value);
+ break;
+ }
+ case TYPE_DECIMAL128I: {
+ StringParser::ParseResult result = StringParser::PARSE_SUCCESS;
+ vectorized::Int128 value =
StringParser::string_to_decimal<vectorized::Int128>(
+ data, len, slot_desc->type().precision,
slot_desc->type().scale, &result);
+ if (result != StringParser::PARSE_SUCCESS) {
+ parse_result = StringParser::PARSE_FAILURE;
+ break;
+ }
+
reinterpret_cast<vectorized::ColumnVector<vectorized::Int128>*>(col_ptr)
+ ->get_data()
+ .resize_fill(origin_size + rows, value);
+ break;
+ }
default:
DCHECK(false) << "bad slot type: " << slot_desc->type();
break;
diff --git a/be/src/exec/text_converter.hpp b/be/src/exec/text_converter.hpp
index 3918d54a90..b1df974aca 100644
--- a/be/src/exec/text_converter.hpp
+++ b/be/src/exec/text_converter.hpp
@@ -328,7 +328,42 @@ inline bool TextConverter::write_vec_column(const
SlotDescriptor* slot_desc,
decimal_slot.value());
break;
}
-
+ case TYPE_DECIMAL32: {
+ StringParser::ParseResult result = StringParser::PARSE_SUCCESS;
+ int32_t value = StringParser::string_to_decimal<int32_t>(
+ data, len, slot_desc->type().precision,
slot_desc->type().scale, &result);
+ if (result != StringParser::PARSE_SUCCESS) {
+ parse_result = StringParser::PARSE_FAILURE;
+ break;
+ }
+
reinterpret_cast<vectorized::ColumnVector<vectorized::Int32>*>(col_ptr)->insert_value(
+ value);
+ break;
+ }
+ case TYPE_DECIMAL64: {
+ StringParser::ParseResult result = StringParser::PARSE_SUCCESS;
+ int64_t value = StringParser::string_to_decimal<int64_t>(
+ data, len, slot_desc->type().precision,
slot_desc->type().scale, &result);
+ if (result != StringParser::PARSE_SUCCESS) {
+ parse_result = StringParser::PARSE_FAILURE;
+ break;
+ }
+
reinterpret_cast<vectorized::ColumnVector<vectorized::Int64>*>(col_ptr)->insert_value(
+ value);
+ break;
+ }
+ case TYPE_DECIMAL128I: {
+ StringParser::ParseResult result = StringParser::PARSE_SUCCESS;
+ vectorized::Int128 value =
StringParser::string_to_decimal<vectorized::Int128>(
+ data, len, slot_desc->type().precision,
slot_desc->type().scale, &result);
+ if (result != StringParser::PARSE_SUCCESS) {
+ parse_result = StringParser::PARSE_FAILURE;
+ break;
+ }
+
reinterpret_cast<vectorized::ColumnVector<vectorized::Int128>*>(col_ptr)->insert_value(
+ value);
+ break;
+ }
default:
DCHECK(false) << "bad slot type: " << slot_desc->type();
break;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/create_preinstalled_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/create_preinstalled_table.hql
index 8bb9f456b8..c5b3d5dc46 100644
---
a/docker/thirdparties/docker-compose/hive/scripts/create_preinstalled_table.hql
+++
b/docker/thirdparties/docker-compose/hive/scripts/create_preinstalled_table.hql
@@ -243,6 +243,30 @@ TBLPROPERTIES (
msck repair table table_with_vertical_line;
+CREATE external TABLE `table_with_pars`(
+ `id` int COMMENT 'id',
+ `data` string COMMENT 'data')
+PARTITIONED BY (
+ `dt_par` date,
+ `time_par` timestamp,
+ `decimal_par1` decimal(8, 4),
+ `decimal_par2` decimal(18, 6),
+ `decimal_par3` decimal(38, 12))
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+WITH SERDEPROPERTIES (
+ 'field.delim'='|',
+ 'serialization.format'='|')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+ '/user/doris/preinstalled_data/csv_partition_table/table_with_pars/';
+
+set hive.msck.path.validation=ignore;
+msck repair table table_with_pars;
+
CREATE TABLE `table_with_x01`(
`k1` string COMMENT 'k1',
`k2` string COMMENT 'k2',
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
new file mode 100644
index 0000000000..234c0edb2f
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
@@ -0,0 +1,2 @@
+1|1.11abc
+2|1.11ABC
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
new file mode 100644
index 0000000000..755b236537
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
@@ -0,0 +1,2 @@
+3|1.12abc
+4|1.12ABC
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
new file mode 100644
index 0000000000..ca5889a36f
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
@@ -0,0 +1,2 @@
+5|1.21abc
+6|1.21ABC
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
new file mode 100644
index 0000000000..150f03bb75
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
@@ -0,0 +1,2 @@
+7|1.22abc
+8|1.22ABC
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
new file mode 100644
index 0000000000..1b04386283
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
@@ -0,0 +1,2 @@
+9|2.11abc
+10|2.11ABC
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
new file mode 100644
index 0000000000..b2d3299c3e
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
@@ -0,0 +1,2 @@
+11|2.12abc
+12|2.12ABC
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
new file mode 100644
index 0000000000..669e6b82f8
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
@@ -0,0 +1,2 @@
+13|2.21abc
+14|2.21ABC
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
new file mode 100644
index 0000000000..3a7abaa59c
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
@@ -0,0 +1,2 @@
+15|2.22abc
+16|2.22ABC
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
new file mode 100644
index 0000000000..3d4c56e08c
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
@@ -0,0 +1,2 @@
+17|1.11cba
+18|1.11CBA
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
new file mode 100644
index 0000000000..826b4223e4
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
@@ -0,0 +1,2 @@
+19|1.12cba
+20|1.12CBA
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
new file mode 100644
index 0000000000..d02032940c
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
@@ -0,0 +1,2 @@
+21|1.21cba
+22|1.21CBA
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
new file mode 100644
index 0000000000..2ae7e1e56f
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
@@ -0,0 +1,2 @@
+23|1.22cba
+24|1.22CBA
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
new file mode 100644
index 0000000000..5b4e60d3c7
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
@@ -0,0 +1,2 @@
+25|2.11cba
+26|2.11CBA
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
new file mode 100644
index 0000000000..6794f2b22e
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
@@ -0,0 +1,2 @@
+27|2.12cba
+28|2.12CBA
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
new file mode 100644
index 0000000000..a2d6ae84cb
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
@@ -0,0 +1,2 @@
+29|2.21cba
+30|2.21CBA
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
new file mode 100644
index 0000000000..c4369f26f4
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-01-01/time_par=2023-01-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
@@ -0,0 +1,2 @@
+31|2.22cba
+32|2.22CBA
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
new file mode 100644
index 0000000000..6c8bab34dc
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
@@ -0,0 +1,2 @@
+33|1.11xyz
+34|1.11XYZ
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
new file mode 100644
index 0000000000..6e8a9c0a56
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
@@ -0,0 +1,2 @@
+35|1.12xyz
+36|1.12XYZ
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
new file mode 100644
index 0000000000..370aff5f53
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
@@ -0,0 +1,2 @@
+37|1.21xyz
+38|1.21XYZ
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
new file mode 100644
index 0000000000..65b0efff6f
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
@@ -0,0 +1,2 @@
+39|1.22xyz
+40|1.22XYZ
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
new file mode 100644
index 0000000000..d292316bcc
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
@@ -0,0 +1,2 @@
+41|2.11xyz
+42|2.11XYZ
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
new file mode 100644
index 0000000000..f19d084e19
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
@@ -0,0 +1,2 @@
+43|2.12xyz
+44|2.12XYZ
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
new file mode 100644
index 0000000000..2dd58a86a3
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
@@ -0,0 +1,2 @@
+45|2.21xyz
+46|2.21XYZ
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
new file mode 100644
index 0000000000..beca231346
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
00%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
@@ -0,0 +1,2 @@
+47|2.22xyz
+48|2.22XYZ
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
new file mode 100644
index 0000000000..bf5ee5a31e
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.11/data.txt
@@ -0,0 +1,2 @@
+49|1.11zxy
+50|1.11ZXY
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
new file mode 100644
index 0000000000..47171ff8ff
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.1/decimal_par3=1.12/data.txt
@@ -0,0 +1,2 @@
+51|1.12zxy
+52|1.12ZXY
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
new file mode 100644
index 0000000000..bb7e8c0131
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.21/data.txt
@@ -0,0 +1,2 @@
+53|1.21zxy
+54|1.21ZXY
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
new file mode 100644
index 0000000000..6f06419331
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=1/decimal_par2=1.2/decimal_par3=1.22/data.txt
@@ -0,0 +1,2 @@
+55|1.22zxy
+56|1.22ZXY
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
new file mode 100644
index 0000000000..1ed314a1fd
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.11/data.txt
@@ -0,0 +1,2 @@
+57|2.11zxy
+58|2.11ZXY
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
new file mode 100644
index 0000000000..d0132c9c92
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.1/decimal_par3=2.12/data.txt
@@ -0,0 +1,2 @@
+59|2.12zxy
+60|2.12ZXY
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
new file mode 100644
index 0000000000..879d0bf877
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.21/data.txt
@@ -0,0 +1,2 @@
+61|2.21zxy
+62|2.21ZXY
\ No newline at end of file
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
new file mode 100644
index 0000000000..c11ba6adba
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/preinstalled_data/csv_partition_table/table_with_pars/dt_par=2023-02-01/time_par=2023-02-01
01%3A30%3A00/decimal_par1=2/decimal_par2=2.2/decimal_par3=2.22/data.txt
@@ -0,0 +1,2 @@
+63|2.22zxy
+64|2.22ZXY
\ No newline at end of file
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/HdfsStorage.java
b/fe/fe-core/src/main/java/org/apache/doris/backup/HdfsStorage.java
index be858c6648..a778b6d870 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/HdfsStorage.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/HdfsStorage.java
@@ -106,9 +106,9 @@ public class HdfsStorage extends BlobStorage {
hdfsProperties.get(HdfsResource.HADOOP_KERBEROS_KEYTAB));
}
if (username == null) {
- dfsFileSystem =
FileSystem.get(java.net.URI.create(remotePath), conf);
+ dfsFileSystem = FileSystem.get(new
Path(remotePath).toUri(), conf);
} else {
- dfsFileSystem =
FileSystem.get(java.net.URI.create(remotePath), conf, username);
+ dfsFileSystem = FileSystem.get(new
Path(remotePath).toUri(), conf, username);
}
} catch (Exception e) {
LOG.error("errors while connect to " + remotePath, e);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/S3Storage.java
b/fe/fe-core/src/main/java/org/apache/doris/backup/S3Storage.java
index b6f0e0752f..6db15c3461 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/S3Storage.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/S3Storage.java
@@ -144,7 +144,7 @@ public class S3Storage extends BlobStorage {
System.setProperty("com.amazonaws.services.s3.enableV4", "true");
S3Resource.getS3HadoopProperties(caseInsensitiveProperties).forEach(conf::set);
try {
- dfsFileSystem = FileSystem.get(new URI(remotePath), conf);
+ dfsFileSystem = FileSystem.get(new
org.apache.hadoop.fs.Path(remotePath).toUri(), conf);
} catch (Exception e) {
throw new UserException("Failed to get S3 FileSystem for " +
e.getMessage(), e);
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
index fee8394078..b82653016e 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
@@ -64,6 +64,9 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.parquet.Strings;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.List;
@@ -177,6 +180,12 @@ public class HiveMetaStoreCache {
Map<Long, List<UniqueId>> idToUniqueIdsMap =
Maps.newHashMapWithExpectedSize(partitionNames.size());
long idx = 0;
for (String partitionName : partitionNames) {
+ try {
+ partitionName = URLDecoder.decode(partitionName,
StandardCharsets.UTF_8.name());
+ } catch (UnsupportedEncodingException e) {
+ // It should not be here
+ throw new RuntimeException(e);
+ }
long partitionId = idx++;
ListPartitionItem listPartitionItem =
toListPartitionItem(partitionName, key.types);
idToPartitionItem.put(partitionId, listPartitionItem);
diff --git
a/fe/spark-dpp/src/main/java/org/apache/doris/load/loadv2/dpp/SparkDpp.java
b/fe/spark-dpp/src/main/java/org/apache/doris/load/loadv2/dpp/SparkDpp.java
index a829018725..e6c9bf5528 100644
--- a/fe/spark-dpp/src/main/java/org/apache/doris/load/loadv2/dpp/SparkDpp.java
+++ b/fe/spark-dpp/src/main/java/org/apache/doris/load/loadv2/dpp/SparkDpp.java
@@ -64,8 +64,6 @@ import scala.Tuple2;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
-import java.net.URI;
-import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
@@ -198,7 +196,7 @@ public final class SparkDpp implements java.io.Serializable
{
.foreachPartition((VoidFunction<Iterator<Tuple2<List<Object>,
Object[]>>>) t -> {
// write the data to dst file
Configuration conf = new
Configuration(serializableHadoopConf.value());
- FileSystem fs =
FileSystem.get(URI.create(etlJobConfig.outputPath), conf);
+ FileSystem fs = FileSystem.get(new
Path(etlJobConfig.outputPath).toUri(), conf);
String lastBucketKey = null;
ParquetWriter<InternalRow> parquetWriter = null;
TaskContext taskContext = TaskContext.get();
@@ -859,12 +857,11 @@ public final class SparkDpp implements
java.io.Serializable {
List<String> filePaths,
EtlJobConfig.EtlFileGroup
fileGroup,
StructType dstTableSchema)
- throws SparkDppException, IOException, URISyntaxException {
+ throws SparkDppException, IOException {
Dataset<Row> fileGroupDataframe = null;
for (String filePath : filePaths) {
try {
- URI uri = new URI(filePath);
- FileSystem fs = FileSystem.get(uri,
serializableHadoopConf.value());
+ FileSystem fs = FileSystem.get(new Path(filePath).toUri(),
serializableHadoopConf.value());
FileStatus[] fileStatuses = fs.globStatus(new Path(filePath));
if (fileStatuses == null) {
throw new SparkDppException("fs list status failed: " +
filePath);
@@ -1130,8 +1127,7 @@ public final class SparkDpp implements
java.io.Serializable {
private void writeDppResult(DppResult dppResult) throws Exception {
String outputPath = etlJobConfig.getOutputPath();
String resultFilePath = outputPath + "/" + DPP_RESULT_FILE;
- URI uri = new URI(outputPath);
- FileSystem fs = FileSystem.get(uri, serializableHadoopConf.value());
+ FileSystem fs = FileSystem.get(new Path(outputPath).toUri(),
serializableHadoopConf.value());
Path filePath = new Path(resultFilePath);
FSDataOutputStream outputStream = fs.create(filePath);
Gson gson = new Gson();
diff --git
a/regression-test/data/external_catalog_p0/hive/test_hive_partitions.out
b/regression-test/data/external_catalog_p0/hive/test_hive_partitions.out
new file mode 100644
index 0000000000..f5f44b2368
--- /dev/null
+++ b/regression-test/data/external_catalog_p0/hive/test_hive_partitions.out
@@ -0,0 +1,73 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !q01 --
+33 1.11xyz
+34 1.11XYZ
+35 1.12xyz
+36 1.12XYZ
+37 1.21xyz
+38 1.21XYZ
+39 1.22xyz
+40 1.22XYZ
+41 2.11xyz
+42 2.11XYZ
+43 2.12xyz
+44 2.12XYZ
+45 2.21xyz
+46 2.21XYZ
+47 2.22xyz
+48 2.22XYZ
+49 1.11zxy
+50 1.11ZXY
+51 1.12zxy
+52 1.12ZXY
+53 1.21zxy
+54 1.21ZXY
+55 1.22zxy
+56 1.22ZXY
+57 2.11zxy
+58 2.11ZXY
+59 2.12zxy
+60 2.12ZXY
+61 2.21zxy
+62 2.21ZXY
+63 2.22zxy
+64 2.22ZXY
+
+-- !q02 --
+49 1.11zxy
+50 1.11ZXY
+51 1.12zxy
+52 1.12ZXY
+53 1.21zxy
+54 1.21ZXY
+55 1.22zxy
+56 1.22ZXY
+57 2.11zxy
+58 2.11ZXY
+59 2.12zxy
+60 2.12ZXY
+61 2.21zxy
+62 2.21ZXY
+63 2.22zxy
+64 2.22ZXY
+
+-- !q03 --
+49 1.11zxy
+50 1.11ZXY
+51 1.12zxy
+52 1.12ZXY
+53 1.21zxy
+54 1.21ZXY
+55 1.22zxy
+56 1.22ZXY
+
+-- !q04 --
+53 1.21zxy
+54 1.21ZXY
+55 1.22zxy
+56 1.22ZXY
+
+-- !q05 --
+55 1.22zxy
+56 1.22ZXY
+
diff --git
a/regression-test/suites/external_catalog_p0/hive/test_hive_partitions.groovy
b/regression-test/suites/external_catalog_p0/hive/test_hive_partitions.groovy
new file mode 100644
index 0000000000..39c575f7c1
--- /dev/null
+++
b/regression-test/suites/external_catalog_p0/hive/test_hive_partitions.groovy
@@ -0,0 +1,61 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hive_partitions", "p0") {
+ def q01 = {
+ qt_q01 """
+ select id, data from table_with_pars where dt_par = '2023-02-01' order
by id;
+ """
+ qt_q02 """
+ select id, data from table_with_pars where dt_par = '2023-02-01' and
time_par = '2023-02-01 01:30:00' order by id;
+ """
+ qt_q03 """
+ select id, data from table_with_pars where dt_par = '2023-02-01' and
time_par = '2023-02-01 01:30:00'
+ and decimal_par1 = '1' order by id;
+ """
+ qt_q04 """
+ select id, data from table_with_pars where dt_par = '2023-02-01' and
time_par = '2023-02-01 01:30:00'
+ and decimal_par1 = '1' and decimal_par2 = '1.2' order by id;
+ """
+ qt_q05 """
+ select id, data from table_with_pars where dt_par = '2023-02-01' and
time_par = '2023-02-01 01:30:00'
+ and decimal_par1 = '1' and decimal_par2 = '1.2' and decimal_par3 =
'1.22' order by id;
+ """
+ }
+
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ try {
+ String hms_port = context.config.otherConfigs.get("hms_port")
+ String catalog_name = "hive_test_partitions"
+ sql """drop catalog if exists ${catalog_name}"""
+ sql """create resource if not exists hms_resource_hive_partitions
properties (
+ "type"="hms",
+ 'hive.metastore.uris' = 'thrift://127.0.0.1:${hms_port}'
+ );"""
+ sql """create catalog if not exists ${catalog_name} with resource
hms_resource_hive_partitions;"""
+ sql """use `${catalog_name}`.`default`"""
+
+ q01()
+
+ sql """drop catalog if exists ${catalog_name}"""
+ sql """drop resource if exists hms_resource_hive_partitions"""
+ } finally {
+ }
+ }
+}
+
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]