This is an automated email from the ASF dual-hosted git repository.
hellostephen pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-2.1 by this push:
new 8f39143c144 [test](fix) replace hardcode s3BucketName (#37750)
8f39143c144 is described below
commit 8f39143c14465e9b64bfc9ee4ef07d5f830c29c4
Author: Dongyang Li <[email protected]>
AuthorDate: Sun Jul 14 18:38:52 2024 +0800
[test](fix) replace hardcode s3BucketName (#37750)
## Proposed changes
pick from master #37739
<!--Describe your changes.-->
---------
Co-authored-by: stephen <[email protected]>
---
.../paimon/paimon_base_filesystem.groovy | 5 +-
.../hive/test_hive_write_insert_s3.groovy | 5 +-
.../suites/github_events_p2/load.groovy | 2 +-
.../load_p0/broker_load/test_compress_type.groovy | 41 +++---
.../test_csv_with_enclose_and_escapeS3_load.groovy | 15 ++-
.../load_p0/broker_load/test_etl_failed.groovy | 8 +-
.../broker_load/test_multi_table_load.groovy | 13 +-
.../suites/load_p0/broker_load/test_s3_load.groovy | 44 +++---
.../test_s3_load_with_load_parallelism.groovy | 7 +-
.../load_p0/broker_load/test_seq_load.groovy | 7 +-
.../load_p2/broker_load/test_broker_load.groovy | 140 +++++++++----------
.../test_parquet_large_metadata_load.groovy | 15 ++-
.../broker_load/test_s3_load_properties.groovy | 150 +++++++++++----------
.../tvf/test_tvf_based_broker_load.groovy | 74 +++++-----
.../diff_data/stress_test_diff_date_list.groovy | 2 +-
.../same_data/stress_test_same_date_range.groovy | 2 +-
.../stress_test_two_stream_load.groovy | 2 +-
...est_update_rows_and_partition_first_load.groovy | 16 ++-
regression-test/suites/tpcds_sf1000_p2/load.groovy | 5 +-
19 files changed, 287 insertions(+), 266 deletions(-)
diff --git
a/regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy
b/regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy
index 7be15f94243..0e00cd8fb7a 100644
---
a/regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy
+++
b/regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy
@@ -29,6 +29,7 @@ suite("paimon_base_filesystem",
"p0,external,doris,external_docker,external_dock
String s3ak = getS3AK()
String s3sk = getS3SK()
+ def s3Endpoint = getS3Endpoint()
def cos = """select
c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c18 from
${catalog_cos}.zd.all_table order by c18"""
def oss = """select * from ${catalog_oss}.paimonossdb1.test_tableoss
order by a"""
@@ -48,9 +49,9 @@ suite("paimon_base_filesystem",
"p0,external,doris,external_docker,external_dock
create catalog if not exists ${catalog_oss} properties (
"type" = "paimon",
"warehouse" = "oss://paimon-zd/paimonoss",
- "oss.endpoint"="oss-cn-beijing.aliyuncs.com",
"oss.access_key"="${ak}",
- "oss.secret_key"="${sk}"
+ "oss.secret_key"="${sk}",
+ "oss.endpoint"="oss-cn-beijing.aliyuncs.com"
);
"""
logger.info("catalog " + catalog_cos + " created")
diff --git
a/regression-test/suites/external_table_p2/hive/test_hive_write_insert_s3.groovy
b/regression-test/suites/external_table_p2/hive/test_hive_write_insert_s3.groovy
index 87633ba1b09..cf9fea67cbd 100644
---
a/regression-test/suites/external_table_p2/hive/test_hive_write_insert_s3.groovy
+++
b/regression-test/suites/external_table_p2/hive/test_hive_write_insert_s3.groovy
@@ -17,6 +17,7 @@
suite("test_hive_write_insert_s3",
"p2,external,hive,external_remote,external_remote_hive") {
def format_compressions = ["parquet_snappy"]
+ def s3BucketName = getS3BucketName()
def q01 = { String format_compression, String catalog_name ->
logger.info("hive sql: " + """ truncate table
all_types_${format_compression}_s3; """)
@@ -76,8 +77,8 @@ suite("test_hive_write_insert_s3",
"p2,external,hive,external_remote,external_re
hive_remote """ DROP TABLE IF EXISTS
all_types_par_${format_compression}_s3_${catalog_name}_q02; """
logger.info("hive sql: " + """ CREATE TABLE IF NOT EXISTS
all_types_par_${format_compression}_s3_${catalog_name}_q02 like
all_types_par_${format_compression}_s3; """)
hive_remote """ CREATE TABLE IF NOT EXISTS
all_types_par_${format_compression}_s3_${catalog_name}_q02 like
all_types_par_${format_compression}_s3; """
- logger.info("hive sql: " + """ ALTER TABLE
all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION
'cosn://doris-build-1308700295/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02';
""")
- hive_remote """ ALTER TABLE
all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION
'cosn://doris-build-1308700295/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02';
"""
+ logger.info("hive sql: " + """ ALTER TABLE
all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION
'cosn://${s3BucketName}/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02';
""")
+ hive_remote """ ALTER TABLE
all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION
'cosn://${s3BucketName}/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02';
"""
sql """refresh catalog ${catalog_name};"""
sql """
diff --git a/regression-test/suites/github_events_p2/load.groovy
b/regression-test/suites/github_events_p2/load.groovy
index dc2e0dbb975..92a588a2214 100644
--- a/regression-test/suites/github_events_p2/load.groovy
+++ b/regression-test/suites/github_events_p2/load.groovy
@@ -31,7 +31,7 @@ suite("load") {
ak "${getS3AK()}"
sk "${getS3SK()}"
endpoint "http://${getS3Endpoint()}"
- region "ap-beijing"
+ region "${getS3Region()}"
repository "regression_test_github_events"
snapshot "github_events"
timestamp "2022-03-23-12-19-51"
diff --git
a/regression-test/suites/load_p0/broker_load/test_compress_type.groovy
b/regression-test/suites/load_p0/broker_load/test_compress_type.groovy
index 86406cef506..9464ca847ce 100644
--- a/regression-test/suites/load_p0/broker_load/test_compress_type.groovy
+++ b/regression-test/suites/load_p0/broker_load/test_compress_type.groovy
@@ -17,6 +17,7 @@
suite("test_compress_type", "load_p0") {
def tableName = "basic_data"
+ def s3BucketName = getS3BucketName()
// GZ/LZO/BZ2/LZ4FRAME/DEFLATE/LZOP
def compressTypes = [
@@ -62,24 +63,24 @@ suite("test_compress_type", "load_p0") {
]
def paths = [
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
-
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
-
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.gz",
-
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.bz2",
-
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.lz4",
-
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.gz",
-
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.bz2",
-
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.lz4",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
+ "s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
+
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.gz",
+
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.bz2",
+
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.lz4",
+
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.gz",
+
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.bz2",
+
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.lz4",
]
def labels = []
@@ -137,8 +138,8 @@ suite("test_compress_type", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${getS3Endpoint()}",
+ "AWS_REGION" = "${getS3Region()}"
)
"""
logger.info("submit sql: ${sql_str}");
diff --git
a/regression-test/suites/load_p0/broker_load/test_csv_with_enclose_and_escapeS3_load.groovy
b/regression-test/suites/load_p0/broker_load/test_csv_with_enclose_and_escapeS3_load.groovy
index a761ad1a211..d1294e40731 100644
---
a/regression-test/suites/load_p0/broker_load/test_csv_with_enclose_and_escapeS3_load.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_csv_with_enclose_and_escapeS3_load.groovy
@@ -19,6 +19,7 @@
suite("test_csv_with_enclose_and_escapeS3_load", "load_p0") {
def tableName = "test_csv_with_enclose_and_escape"
+ def s3BucketName = getS3BucketName()
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
@@ -48,24 +49,24 @@ suite("test_csv_with_enclose_and_escapeS3_load", "load_p0")
{
]
for (i in 0..<normalCases.size()) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/${normalCases[i]}.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/${normalCases[i]}.csv",
"${tableName}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \",\"", "FORMAT AS \"CSV\"", "(k1,k2,v1,v2,v3,v4)",
"PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" = \"\\\\\",
\"trim_double_quotes\" = \"true\")"))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/enclose_incomplete.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/enclose_incomplete.csv",
"${tableName}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\",\"", "FORMAT AS \"CSV\"", "(k1,k2,v1,v2,v3,v4)",
"PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" = \"\\\\\",
\"trim_double_quotes\" = \"true\")").addProperties("max_filter_ratio", "0.5"))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/enclose_without_escape.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/enclose_without_escape.csv",
"${tableName}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\",\"", "FORMAT AS \"CSV\"", "(k1,k2,v1,v2,v3,v4)",
"PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" = \"\\\\\",
\"trim_double_quotes\" = \"true\")"))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/enclose_multi_char_delimiter.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/enclose_multi_char_delimiter.csv",
"${tableName}", "LINES TERMINATED BY \"\$\$\$\"", "COLUMNS TERMINATED
BY \"@@\"", "FORMAT AS \"CSV\"", "(k1,k2,v1,v2,v3,v4)",
"PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" = \"\\\\\",
\"trim_double_quotes\" = \"true\")"))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/enclose_not_trim_quotes.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/enclose_not_trim_quotes.csv",
"${tableName}", "", "COLUMNS TERMINATED BY \",\"", "FORMAT AS
\"CSV\"", "(k1,k2,v1,v2,v3,v4)",
"PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" =
\"\\\\\")").addProperties("trim_double_quotes", "false"))
@@ -92,8 +93,8 @@ suite("test_csv_with_enclose_and_escapeS3_load", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${getS3Endpoint()}",
+ "AWS_REGION" = "${getS3Region()}"
)
${prop}
"""
diff --git a/regression-test/suites/load_p0/broker_load/test_etl_failed.groovy
b/regression-test/suites/load_p0/broker_load/test_etl_failed.groovy
index 928b4e38542..c418223db27 100644
--- a/regression-test/suites/load_p0/broker_load/test_etl_failed.groovy
+++ b/regression-test/suites/load_p0/broker_load/test_etl_failed.groovy
@@ -16,6 +16,8 @@
// under the License.
suite("test_etl_failed", "load_p0") {
+ def s3BucketName = getS3BucketName()
+ def s3Endpoint = getS3Endpoint()
def tableName = "test_etl_failed"
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
@@ -33,7 +35,7 @@ suite("test_etl_failed", "load_p0") {
PROPERTIES ("replication_allocation" = "tag.location.default: 1");
"""
String label = "test_etl_failed"
- String path =
"s3://doris-build-1308700295/regression/load/data/etl_failure/etl-failure.csv"
+ String path =
"s3://${s3BucketName}/regression/load/data/etl_failure/etl-failure.csv"
String format = "CSV"
String ak = getS3AK()
String sk = getS3SK()
@@ -46,8 +48,8 @@ suite("test_etl_failed", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${s3Endpoint}",
+ "AWS_REGION" = "${s3Region}"
)
PROPERTIES(
"use_new_load_scan_node" = "true",
diff --git
a/regression-test/suites/load_p0/broker_load/test_multi_table_load.groovy
b/regression-test/suites/load_p0/broker_load/test_multi_table_load.groovy
index e0541f09ce8..0d3a5e20551 100644
--- a/regression-test/suites/load_p0/broker_load/test_multi_table_load.groovy
+++ b/regression-test/suites/load_p0/broker_load/test_multi_table_load.groovy
@@ -16,7 +16,10 @@
// under the License.
suite("test_multi_table_load", "load_p0") {
-
+ def s3BucketName = getS3BucketName()
+ def s3Endpoint = getS3Endpoint()
+ def s3Region = getS3Region()
+
def tableName = "test_multi_table_load"
sql """ DROP TABLE IF EXISTS ${tableName} """
@@ -81,7 +84,7 @@ suite("test_multi_table_load", "load_p0") {
);
"""
- def path =
"s3://doris-build-1308700295/regression/load/data/basic_data.csv"
+ def path = "s3://${s3BucketName}/regression/load/data/basic_data.csv"
def format_str = "CSV"
def ak = getS3AK()
def sk = getS3SK()
@@ -129,8 +132,8 @@ suite("test_multi_table_load", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${s3Endpoint}",
+ "AWS_REGION" = "${s3Region}"
)
properties(
"use_new_load_scan_node" = "true",
@@ -163,4 +166,4 @@ suite("test_multi_table_load", "load_p0") {
qt_sql """ SELECT COUNT(*) FROM ${tableName} """
}
-}
\ No newline at end of file
+}
diff --git a/regression-test/suites/load_p0/broker_load/test_s3_load.groovy
b/regression-test/suites/load_p0/broker_load/test_s3_load.groovy
index af9975473d1..b6abb2bd9b8 100644
--- a/regression-test/suites/load_p0/broker_load/test_s3_load.groovy
+++ b/regression-test/suites/load_p0/broker_load/test_s3_load.groovy
@@ -16,7 +16,9 @@
// under the License.
suite("test_s3_load", "load_p0") {
-
+ def s3BucketName = getS3BucketName()
+ def s3Endpoint = getS3Endpoint()
+ def s3Region = getS3Region()
sql "create workload group if not exists broker_load_test properties (
'cpu_share'='1024'); "
sql "set workload_group=broker_load_test;"
@@ -98,71 +100,71 @@ suite("test_s3_load", "load_p0") {
]
/* ========================================================== normal
========================================================== */
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "FORMAT AS \"CSV\"",
"(K00,K01,K02,K03,K04,K05,K06,K07,K08,K09,K10,K11,K12,K13,K14,K15,K16,K17,K18)",
"", "", "", "", ""))
/* ========================================================== error
========================================================== */
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "", true))
/* ========================================================== wrong column
sep ========================================================== */
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\",\"", "FORMAT AS \"csv\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "", true))
/* ========================================================== wrong line
delim ========================================================== */
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\t\"", "COLUMNS TERMINATED BY
\"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "", true))
/* ========================================================== strict mode
========================================================== */
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "","", true).addProperties("strict_mode", "true"))
/* ========================================================== timezone
========================================================== */
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET (k00=unix_timestamp('2023-09-01 12:00:00'))",
"","").addProperties("timezone", "Asia/Shanghai"))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET (k00=unix_timestamp('2023-09-01 12:00:00'))",
"","").addProperties("timezone", "America/Chicago"))
/* ========================================================== compress
type ========================================================== */
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY
\"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
@@ -172,20 +174,20 @@ suite("test_s3_load", "load_p0") {
/*========================================================== json
==========================================================*/
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.json",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.json",
"${table}", "", "", "FORMAT AS \"json\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "PROPERTIES(\"strip_outer_array\" = \"true\",
\"fuzzy_parse\" = \"true\")"))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_by_line.json",
"${table}", "", "", "FORMAT AS \"JSON\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "PROPERTIES(\"read_json_by_line\" = \"true\")"))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.parq",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.parq",
"${table}", "", "", "FORMAT AS \"parquet\"",
"(K00,K01,K02,K03,K04,K05,K06,K07,K08,K09,K10,K11,K12,K13,K14,K15,K16,K17,K18)",
"", "", "", "", ""))
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.orc",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.orc",
"${table}", "", "", "FORMAT AS \"orc\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
@@ -217,8 +219,8 @@ suite("test_s3_load", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing",
+ "AWS_ENDPOINT" = "${s3Endpoint}",
+ "AWS_REGION" = "${s3Region}",
"use_path_style" = "$attributes.usePathStyle"
)
${prop}
diff --git
a/regression-test/suites/load_p0/broker_load/test_s3_load_with_load_parallelism.groovy
b/regression-test/suites/load_p0/broker_load/test_s3_load_with_load_parallelism.groovy
index b1d1782b501..677cce88f06 100644
---
a/regression-test/suites/load_p0/broker_load/test_s3_load_with_load_parallelism.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_s3_load_with_load_parallelism.groovy
@@ -17,7 +17,8 @@
// under the License.
suite("test_s3_load_with_load_parallelism", "load_p0") {
-
+ def s3Endpoint = getS3Endpoint()
+ def s3Region = getS3Region()
def tableName = "test_load_parallelism"
sql """ DROP TABLE IF EXISTS ${tableName} """
@@ -74,8 +75,8 @@ suite("test_s3_load_with_load_parallelism", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${s3Endpoint}",
+ "AWS_REGION" = "${s3Region}"
)
${prop}
"""
diff --git a/regression-test/suites/load_p0/broker_load/test_seq_load.groovy
b/regression-test/suites/load_p0/broker_load/test_seq_load.groovy
index 0c94467c6a5..da91277b3fa 100644
--- a/regression-test/suites/load_p0/broker_load/test_seq_load.groovy
+++ b/regression-test/suites/load_p0/broker_load/test_seq_load.groovy
@@ -16,7 +16,6 @@
// under the License.
suite("test_seq_load", "load_p0") {
-
def tableName = "uniq_tbl_basic_seq"
sql """ DROP TABLE IF EXISTS ${tableName} """
@@ -83,7 +82,7 @@ suite("test_seq_load", "load_p0") {
"""
def label = UUID.randomUUID().toString().replace("-", "0")
- def path =
"s3://doris-build-1308700295/regression/load/data/basic_data.csv"
+ def path = "s3://${getS3BucketName()}/regression/load/data/basic_data.csv"
def format_str = "CSV"
def ak = getS3AK()
def sk = getS3SK()
@@ -101,8 +100,8 @@ suite("test_seq_load", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${getS3Endpoint()}",
+ "AWS_REGION" = "${getS3Region()}"
)
properties(
"use_new_load_scan_node" = "true"
diff --git a/regression-test/suites/load_p2/broker_load/test_broker_load.groovy
b/regression-test/suites/load_p2/broker_load/test_broker_load.groovy
index 75aa0a9aa9b..f2b4af71d5d 100644
--- a/regression-test/suites/load_p2/broker_load/test_broker_load.groovy
+++ b/regression-test/suites/load_p2/broker_load/test_broker_load.groovy
@@ -16,7 +16,9 @@
// under the License.
suite("test_broker_load_p2", "p2") {
-
+ def s3BucketName = getS3BucketName()
+ def s3Endpoint = getS3Endpoint()
+ def s3Region = getS3Region()
def tables = ["part",
"upper_case",
"reverse",
@@ -49,37 +51,37 @@ suite("test_broker_load_p2", "p2") {
"orc_s3_case9", // table column uppercase * load column
lowercase * orc file uppercase
"csv_s3_case_line_delimiter" // csv format table with
special line delimiter
]
- def paths = ["s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
-
"s3://doris-build-1308700295/regression/load/data/path/*/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_100k_rows.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_lowercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_lowercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_uppercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_uppercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_lowercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_lowercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_uppercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_uppercase.orc",
-
"s3://doris-build-1308700295/regression/line_delimiter/lineitem_0x7.csv.gz"
+ def paths = ["s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/path/*/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_100k_rows.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_lowercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_lowercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_uppercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_uppercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_lowercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_lowercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_uppercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_uppercase.orc",
+
"s3://${s3BucketName}/regression/line_delimiter/lineitem_0x7.csv.gz"
]
def columns_list = ["""p_partkey, p_name, p_mfgr, p_brand, p_type, p_size,
p_container, p_retailprice, p_comment""",
"""p_partkey, p_name, p_mfgr, p_brand, p_type, p_size,
p_container, p_retailprice, p_comment""",
@@ -194,37 +196,37 @@ suite("test_broker_load_p2", "p2") {
"\\N"
]
- def task_info = ["cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0",
- "cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0"
+ def task_info = ["cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0",
+ "cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0"
]
def error_msg = ["",
@@ -291,8 +293,8 @@ suite("test_broker_load_p2", "p2") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${s3Endpoint}",
+ "AWS_REGION" = "${s3Region}"
)
properties(
"use_new_load_scan_node" = "true"
@@ -384,11 +386,11 @@ suite("test_broker_load_p2", "p2") {
def label_22666 = "part_" +
UUID.randomUUID().toString().replace("-", "0")
sql """
LOAD LABEL ${label_22666} (
- DATA
INFILE("s3://doris-build-1308700295/regression/load/data/part0.parquet")
+ DATA
INFILE("s3://${s3BucketName}/regression/load/data/part0.parquet")
INTO TABLE ${tbl_22666}
FORMAT AS "PARQUET"
(p_partkey, p_name, p_mfgr),
- DATA
INFILE("s3://doris-build-1308700295/regression/load/data/part1.parquet")
+ DATA
INFILE("s3://${s3BucketName}/regression/load/data/part1.parquet")
INTO TABLE ${tbl_22666}
FORMAT AS "PARQUET"
(p_partkey, p_brand, p_type)
@@ -396,8 +398,8 @@ suite("test_broker_load_p2", "p2") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${s3Endpoint}",
+ "AWS_REGION" = "${s3Region}"
);
"""
diff --git
a/regression-test/suites/load_p2/broker_load/test_parquet_large_metadata_load.groovy
b/regression-test/suites/load_p2/broker_load/test_parquet_large_metadata_load.groovy
index 70e0681d0cb..6c1c7b68473 100644
---
a/regression-test/suites/load_p2/broker_load/test_parquet_large_metadata_load.groovy
+++
b/regression-test/suites/load_p2/broker_load/test_parquet_large_metadata_load.groovy
@@ -16,10 +16,11 @@
// under the License.
suite("test_parquet_large_metadata_load_p2", "p2") {
-
+ def s3Endpoint = getS3Endpoint()
+ def s3Region = getS3Region()
def tables = ["parquet_large_metadata_100mb" // metadata size more than
100MB
]
- def paths =
["s3://doris-build-1308700295/regression/load/metadata/parquet_large_metadata_100mb.parquet"
+ def paths =
["s3://${getS3BucketName()}/regression/load/metadata/parquet_large_metadata_100mb.parquet"
]
String ak = getS3AK()
String sk = getS3SK()
@@ -27,10 +28,10 @@ suite("test_parquet_large_metadata_load_p2", "p2") {
def expect_tvf_result = """[[2, 8], [2, 8], [2, 8], [2, 8], [2, 8]]"""
String[][] tvf_result = sql """select `1`,`2` from s3(
- "uri" =
"https://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/metadata/parquet_large_metadata_100mb.parquet",
+ "uri" =
"https://${getS3BucketName()}.${getS3Endpoint()}/regression/load/metadata/parquet_large_metadata_100mb.parquet",
"s3.access_key" = "$ak",
"s3.secret_key" = "$sk",
- "s3.region" = "ap-beijing",
+ "s3.region" = "${s3Region}",
"format" = "parquet"
) order by `1`,`2` limit 5;
"""
@@ -47,8 +48,8 @@ suite("test_parquet_large_metadata_load_p2", "p2") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${s3Endpoint}",
+ "AWS_REGION" = "${s3Region}"
)
PROPERTIES
(
@@ -59,7 +60,7 @@ suite("test_parquet_large_metadata_load_p2", "p2") {
}
def etl_info = ["unselected.rows=0; dpp.abnorm.ALL=0; dpp.norm.ALL=45000"]
- def task_info = ["cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0"]
+ def task_info = ["cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0"]
def error_msg = [""]
// test unified load
if (enabled != null && enabled.equalsIgnoreCase("true")) {
diff --git
a/regression-test/suites/load_p2/broker_load/test_s3_load_properties.groovy
b/regression-test/suites/load_p2/broker_load/test_s3_load_properties.groovy
index b837f28f171..fbb0df3b002 100644
--- a/regression-test/suites/load_p2/broker_load/test_s3_load_properties.groovy
+++ b/regression-test/suites/load_p2/broker_load/test_s3_load_properties.groovy
@@ -16,7 +16,9 @@
// under the License.
suite("test_s3_load_properties", "p2") {
-
+ def s3BucketName = getS3BucketName()
+ def s3Endpoint = getS3Endpoint()
+ def s3Region = getS3Region()
sql "create workload group if not exists broker_load_test properties (
'cpu_share'='1024'); "
sql "set workload_group=broker_load_test;"
@@ -55,183 +57,183 @@ suite("test_s3_load_properties", "p2") {
/* ========================================================== normal
========================================================== */
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "FORMAT AS \"csv\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", ""))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", ""))
}
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(K00,K01,K02,K03,K04,K05,K06,K07,K08,K09,K10,K11,K12,K13,K14,K15,K16,K17,K18)",
"", "", "", "", ""))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "FORMAT AS \"csv\"",
"(K00,K01,K02,K03,K04,K05,K06,k07,K08,K09,K10,K11,K12,K13,K14,K15,K16,K17,K18)",
"", "", "SET
(K19=to_bitmap(k04),K20=HLL_HASH(k04),K21=TO_QUANTILE_STATE(K04,1.0),Kd19=to_bitmap(K05),kd20=HLL_HASH(K05),KD21=TO_QUANTILE_STATE(K05,1.0))",
"", ""))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(K00,K01,K02,K03,K04,K05,K06,K07,K08,K09,K10,K11,K12,K13,K14,K15,K16,K17)",
"", "", "", "", ""))
}
// TODO: should be success ?
// for (String table : basicTables) {
-// attributesList.add(new
LoadAttributes("s3://cos.ap-beijing.myqcloud.com/doris-build-1308700295/regression/load/data/basic_data.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3Endpoint}/${s3BucketName}/regression/load/data/basic_data.csv",
// "${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
// "", "", "", "", "").withPathStyle())
// }
//
-// attributesList.add(new
LoadAttributes("s3://cos.ap-beijing.myqcloud.com/doris-build-1308700295/regression/load/data/basic_data.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3Endpoint}/${s3BucketName}/regression/load/data/basic_data.csv",
// "agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\" ", "FORMAT AS \"csv\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
// "", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "").withPathStyle())
//
// for (String table : arrayTables) {
-// attributesList.add(new
LoadAttributes("s3://cos.ap-beijing.myqcloud.com/doris-build-1308700295/regression/load/data/basic_array_data.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3Endpoint}/${s3BucketName}/regression/load/data/basic_array_data.csv",
// "${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
// "", "", "", "", "").withPathStyle())
// }
//
// for (String table : basicTables) {
-// attributesList.add(new
LoadAttributes("s3://cos.ap-beijing.myqcloud.com/doris-build-1308700295/regression/load/data/basic_data.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3Endpoint}/${s3BucketName}/regression/load/data/basic_data.csv",
// "${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
// "", "", "", "", "").withPathStyle())
// }
//
-// attributesList.add(new
LoadAttributes("s3://cos.ap-beijing.myqcloud.com/doris-build-1308700295/regression/load/data/basic_data.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3Endpoint}/${s3BucketName}/regression/load/data/basic_data.csv",
// "agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\" ", "FORMAT AS \"csv\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
// "", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "").withPathStyle())
//
// for (String table : arrayTables) {
-// attributesList.add(new
LoadAttributes("s3://cos.ap-beijing.myqcloud.com/doris-build-1308700295/regression/load/data/basic_array_data.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3Endpoint}/${s3BucketName}/regression/load/data/basic_array_data.csv",
// "${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
// "", "", "", "", "").withPathStyle())
// }
/* ========================================================== error
========================================================== */
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "", true))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "", true))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data_with_errors.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data_with_errors.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"csv\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", "", true))
}
// has problem, should be success
// for(String table: basicTables) {
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
// "${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
// "", "", "", "","").addProperties("max_filter_ratio", "0.5"))
// }
//
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
// "agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
// "", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "").addProperties("max_filter_ratio", "0.5"))
//
// for(String table : arrayTables) {
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data_with_errors.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data_with_errors.csv",
// "${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
// "", "", "", "","").addProperties("max_filter_ratio", "0.5"))
// }
// for(String table: basicTables) {
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
// "${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
// "", "", "", "","", true).addProperties("max_filter_ratio",
"0.4"))
// }
//
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
// "agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
// "", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "", true).addProperties("max_filter_ratio", "0.4"))
//
// for(String table : arrayTables) {
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data_with_errors.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data_with_errors.csv",
// "${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
// "", "", "", "","", true).addProperties("max_filter_ratio",
"0.4"))
// }
// skip lines
// for(String table: basicTables) {
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
// "${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
// "", "", "", "","").addProperties("skip_lines", "10"))
// }
//
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
// "agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
// "", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "").addProperties("skip_lines", "10"))
//
// for(String table : arrayTables) {
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data_with_errors.csv",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data_with_errors.csv",
// "${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS
TERMINATED BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
// "", "", "", "","").addProperties("skip_lines", "10"))
// }
/* ========================================================== wrong column
sep ========================================================== */
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \",\"", "FORMAT AS \"csv\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "", true))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \",\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "", true))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \",\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", "", true))
}
/* ========================================================== wrong line
delim ========================================================== */
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\t\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "", true))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"agg_tbl_basic", "LINES TERMINATED BY \"\t\"", "COLUMNS TERMINATED
BY \"|\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "", true))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv",
"${table}", "LINES TERMINATED BY \"\t\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", "", true))
}
/* ========================================================== strict mode
========================================================== */
for(String table: basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "","", true).addProperties("strict_mode", "true"))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_with_errors.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_with_errors.csv",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "", true).addProperties("strict_mode","true"))
for(String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data_with_errors.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data_with_errors.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "","", true).addProperties("strict_mode", "true"))
}
@@ -239,131 +241,131 @@ suite("test_s3_load_properties", "p2") {
/* ========================================================== timezone
========================================================== */
for(String table: basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET (k00=unix_timestamp('2023-09-01 12:00:00'))",
"","").addProperties("timezone", "Asia/Shanghai"))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET (k00=unix_timestamp('2023-09-01
12:00:00'),k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "").addProperties("timezone", "Asia/Shanghai"))
for(String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "SET (k00=unix_timestamp('2023-09-01 12:00:00'))",
"","").addProperties("timezone", "Asia/Shanghai"))
}
for(String table: basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET (k00=unix_timestamp('2023-09-01 12:00:00'))",
"","").addProperties("timezone", "America/Chicago"))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET (k00=unix_timestamp('2023-09-01
12:00:00'),k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "").addProperties("timezone", "America/Chicago"))
for(String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "SET (k00=unix_timestamp('2023-09-01 12:00:00'))",
"","").addProperties("timezone", "America/Chicago"))
}
/* ========================================================== compress
type ========================================================== */
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "FORMAT AS \"csv\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", ""))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv.gz",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv.gz",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", ""))
}
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", ""))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv.bz2",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv.bz2",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", ""))
}
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", ""))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv.lz4",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv.lz4",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "FORMAT AS \"CSV\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", ""))
}
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", ""))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv.gz",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv.gz",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", ""))
}
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", ""))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv.bz2",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv.bz2",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", ""))
}
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
"agg_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\" ", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", ""))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.csv.lz4",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.csv.lz4",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", ""))
}
@@ -371,7 +373,7 @@ suite("test_s3_load_properties", "p2") {
/*========================================================== order by
==========================================================*/
for (String table : uniqTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "ORDER BY k01"))
}
@@ -379,71 +381,71 @@ suite("test_s3_load_properties", "p2") {
/*========================================================== json
==========================================================*/
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.json",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.json",
"${table}", "", "", "FORMAT AS \"json\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "PROPERTIES(\"strip_outer_array\" = \"true\",
\"fuzzy_parse\" = \"true\")"))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.json",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.json",
"agg_tbl_basic", "", "", "FORMAT AS \"json\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "PROPERTIES(\"strip_outer_array\" = \"true\", \"fuzzy_parse\" =
\"true\")"))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.json",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.json",
"${table}", "", "", "FORMAT AS \"json\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", "PROPERTIES(\"strip_outer_array\" = \"true\",
\"fuzzy_parse\" = \"true\")"))
}
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_by_line.json",
"${table}", "", "", "FORMAT AS \"JSON\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", "PROPERTIES(\"read_json_by_line\" =
\"true\")"))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_by_line.json",
"agg_tbl_basic", "", "", "FORMAT AS \"JSON\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", "PROPERTIES(\"read_json_by_line\" = \"true\")"))
for (String table : arrayTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data_by_line.json",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data_by_line.json",
"${table}", "", "", "FORMAT AS \"JSON\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
"", "", "", "", "PROPERTIES(\"read_json_by_line\" =
\"true\")"))
}
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.parq",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.parq",
"${table}", "", "", "FORMAT AS \"parquet\"",
"(K00,K01,K02,K03,K04,K05,K06,K07,K08,K09,K10,K11,K12,K13,K14,K15,K16,K17,K18)",
"", "", "", "", ""))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.parq",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.parq",
"agg_tbl_basic", "", "", "FORMAT AS \"PARQUET\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", ""))
// for (String table : arrayTables) {
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.parq",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.parq",
// "${table}", "", "", "FORMAT AS \"parquet\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
// "", "", "", "", ""))
// }
for (String table : basicTables) {
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.orc",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.orc",
"${table}", "", "", "FORMAT AS \"orc\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "", "", ""))
}
- attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.orc",
+ attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data.orc",
"agg_tbl_basic", "", "", "FORMAT AS \"ORC\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)",
"", "", "SET
(k19=to_bitmap(k04),k20=HLL_HASH(k04),k21=TO_QUANTILE_STATE(k04,1.0),kd19=to_bitmap(k05),kd20=HLL_HASH(k05),kd21=TO_QUANTILE_STATE(k05,1.0))",
"", ""))
// for (String table : arrayTables) {
-// attributesList.add(new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_array_data.parq",
+// attributesList.add(new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_array_data.parq",
// "${table}", "", "", "FORMAT AS \"parquet\"",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17)",
// "", "", "", "", ""))
// }
for(String table : uniqTables) {
- def attributes = new
LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data_delete.csv",
+ def attributes = new
LoadAttributes("s3://${s3BucketName}/regression/load/data/basic_data_delete.csv",
"${table}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED
BY \"|\"", "",
"(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18,__DEL__)",
"", "", "", "", "DELETE ON __DEL__=true")
attributes.dataDesc.mergeType = "MERGE"
@@ -482,8 +484,8 @@ suite("test_s3_load_properties", "p2") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing",
+ "AWS_ENDPOINT" = "${s3Endpoint}",
+ "AWS_REGION" = "${s3Region}",
"use_path_style" = "$attributes.usePathStyle",
"provider" = "${getS3Provider()}"
)
diff --git
a/regression-test/suites/load_p2/broker_load/tvf/test_tvf_based_broker_load.groovy
b/regression-test/suites/load_p2/broker_load/tvf/test_tvf_based_broker_load.groovy
index dffc2acf49c..642487d093a 100644
---
a/regression-test/suites/load_p2/broker_load/tvf/test_tvf_based_broker_load.groovy
+++
b/regression-test/suites/load_p2/broker_load/tvf/test_tvf_based_broker_load.groovy
@@ -16,7 +16,9 @@
// under the License.
suite("test_tvf_based_broker_load", "p2") {
-
+ def s3BucketName = getS3BucketName()
+ def s3Endpoint = getS3Endpoint()
+ def s3Region = getS3Region()
def tables = ["part",
"upper_case",
"reverse",
@@ -50,38 +52,38 @@ suite("test_tvf_based_broker_load", "p2") {
"orc_s3_case9", // table column uppercase * load column
lowercase * orc file uppercase
"csv_s3_case_line_delimiter" // csv format table with
special line delimiter
]
- def paths = ["s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
-
"s3://doris-build-1308700295/regression/load/data/path/*/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- "s3://doris-build-1308700295/regression/load/data/part*",
- //
"s3://doris-build-1308700295/regression/load/data/random_all_types/part*", //
just ignore it, parquet_case9 can't support complex type
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_100k_rows.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_lowercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_lowercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_uppercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_uppercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_lowercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_lowercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_uppercase.orc",
-
"s3://doris-build-1308700295/regression/load/data/orc/hits_10k_rows_uppercase.orc",
-
"s3://doris-build-1308700295/regression/line_delimiter/lineitem_0x7.csv.gz"
+ def paths = ["s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/path/*/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ "s3://${s3BucketName}/regression/load/data/part*",
+ //
"s3://${s3BucketName}/regression/load/data/random_all_types/part*", // just
ignore it, parquet_case9 can't support complex type
+
"s3://${s3BucketName}/regression/load/data/orc/hits_100k_rows.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_lowercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_lowercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_uppercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_uppercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_lowercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_lowercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_uppercase.orc",
+
"s3://${s3BucketName}/regression/load/data/orc/hits_10k_rows_uppercase.orc",
+
"s3://${s3BucketName}/regression/line_delimiter/lineitem_0x7.csv.gz"
]
def columns_list = ["""p_partkey, p_name, p_mfgr, p_brand, p_type, p_size,
p_container, p_retailprice, p_comment""",
"""p_partkey, p_name, p_mfgr, p_brand, p_type, p_size,
p_container, p_retailprice, p_comment""",
@@ -230,8 +232,8 @@ suite("test_tvf_based_broker_load", "p2") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${s3Endpoint}",
+ "AWS_REGION" = "${s3Region}"
)
"""
logger.info("Submit load with lable: $uuid, table: $table, path:
$path")
@@ -240,7 +242,7 @@ suite("test_tvf_based_broker_load", "p2") {
}
def etl_info = ["unselected.rows=0; dpp.abnorm.ALL=0; dpp.norm.ALL=200000"]
- def task_info = ["cluster:cos.ap-beijing.myqcloud.com; timeout(s):14400;
max_filter_ratio:0.0"]
+ def task_info = ["cluster:${s3Endpoint}; timeout(s):14400;
max_filter_ratio:0.0"]
def error_msg = [""]
// test load
diff --git
a/regression-test/suites/partition_p2/auto_partition/diff_data/stress_test_diff_date_list.groovy
b/regression-test/suites/partition_p2/auto_partition/diff_data/stress_test_diff_date_list.groovy
index fe536cd2469..c2dab803b7d 100644
---
a/regression-test/suites/partition_p2/auto_partition/diff_data/stress_test_diff_date_list.groovy
+++
b/regression-test/suites/partition_p2/auto_partition/diff_data/stress_test_diff_date_list.groovy
@@ -28,7 +28,7 @@ suite("stress_test_diff_date_list", "p2,nonConcurrent") {
// get doris-db from s3
def dirPath = context.file.parent
def fileName = "doris-dbgen"
- def fileUrl =
"http://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen"
+ def fileUrl =
"http://${getS3BucketName()}.${getS3Endpoint()}/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen"
def filePath = Paths.get(dirPath, fileName)
if (!Files.exists(filePath)) {
new URL(fileUrl).withInputStream { inputStream ->
diff --git
a/regression-test/suites/partition_p2/auto_partition/same_data/stress_test_same_date_range.groovy
b/regression-test/suites/partition_p2/auto_partition/same_data/stress_test_same_date_range.groovy
index af53c945f59..4e11f09fbf7 100644
---
a/regression-test/suites/partition_p2/auto_partition/same_data/stress_test_same_date_range.groovy
+++
b/regression-test/suites/partition_p2/auto_partition/same_data/stress_test_same_date_range.groovy
@@ -28,7 +28,7 @@ suite("stress_test_same_date_range", "p2,nonConcurrent") {
// get doris-db from s3
def dirPath = context.file.parent
def fileName = "doris-dbgen"
- def fileUrl =
"http://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen"
+ def fileUrl =
"http://${getS3BucketName()}.${getS3Endpoint()}/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen"
def filePath = Paths.get(dirPath, fileName)
if (!Files.exists(filePath)) {
new URL(fileUrl).withInputStream { inputStream ->
diff --git
a/regression-test/suites/partition_p2/auto_partition/two_stream_load/stress_test_two_stream_load.groovy
b/regression-test/suites/partition_p2/auto_partition/two_stream_load/stress_test_two_stream_load.groovy
index 49db0f3e9a8..009ed6fc02e 100644
---
a/regression-test/suites/partition_p2/auto_partition/two_stream_load/stress_test_two_stream_load.groovy
+++
b/regression-test/suites/partition_p2/auto_partition/two_stream_load/stress_test_two_stream_load.groovy
@@ -26,7 +26,7 @@ suite("stress_test_two_stream_load", "p2,nonConcurrent") {
// get doris-db from s3
def dirPath = context.file.parent
def fileName = "doris-dbgen"
- def fileUrl =
"http://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen"
+ def fileUrl =
"http://${getS3BucketName()}.${getS3Endpoint()}/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen"
def filePath = Paths.get(dirPath, fileName)
if (!Files.exists(filePath)) {
new URL(fileUrl).withInputStream { inputStream ->
diff --git
a/regression-test/suites/statistics/test_update_rows_and_partition_first_load.groovy
b/regression-test/suites/statistics/test_update_rows_and_partition_first_load.groovy
index 5bfa58abd62..7f89c0acb39 100644
---
a/regression-test/suites/statistics/test_update_rows_and_partition_first_load.groovy
+++
b/regression-test/suites/statistics/test_update_rows_and_partition_first_load.groovy
@@ -16,7 +16,9 @@
// under the License.
suite("test_update_rows_and_partition_first_load", "p2") {
-
+ def s3BucketName = getS3BucketName()
+ def s3Endpoint = getS3Endpoint()
+ def s3Region = getS3Region()
String ak = getS3AK()
String sk = getS3SK()
String enabled = context.config.otherConfigs.get("enableBrokerLoad")
@@ -88,24 +90,24 @@ suite("test_update_rows_and_partition_first_load", "p2") {
def label = "part_" + UUID.randomUUID().toString().replace("-", "0")
sql """
LOAD LABEL ${label} (
- DATA
INFILE("s3://doris-build-1308700295/regression/load/data/update_rows_1.csv")
+ DATA
INFILE("s3://${s3BucketName}/regression/load/data/update_rows_1.csv")
INTO TABLE update_rows_test1
COLUMNS TERMINATED BY ",",
- DATA
INFILE("s3://doris-build-1308700295/regression/load/data/update_rows_2.csv")
+ DATA
INFILE("s3://${s3BucketName}/regression/load/data/update_rows_2.csv")
INTO TABLE update_rows_test2
COLUMNS TERMINATED BY ",",
- DATA
INFILE("s3://doris-build-1308700295/regression/load/data/update_rows_1.csv")
+ DATA
INFILE("s3://${s3BucketName}/regression/load/data/update_rows_1.csv")
INTO TABLE partition_test1
COLUMNS TERMINATED BY ",",
- DATA
INFILE("s3://doris-build-1308700295/regression/load/data/update_rows_2.csv")
+ DATA
INFILE("s3://${s3BucketName}/regression/load/data/update_rows_2.csv")
INTO TABLE partition_test2
COLUMNS TERMINATED BY ","
)
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
- "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
- "AWS_REGION" = "ap-beijing"
+ "AWS_ENDPOINT" = "${s3Endpoint}",
+ "AWS_REGION" = "${s3Region}"
);
"""
diff --git a/regression-test/suites/tpcds_sf1000_p2/load.groovy
b/regression-test/suites/tpcds_sf1000_p2/load.groovy
index aaf4fd54d71..9bf888e93b0 100644
--- a/regression-test/suites/tpcds_sf1000_p2/load.groovy
+++ b/regression-test/suites/tpcds_sf1000_p2/load.groovy
@@ -21,12 +21,13 @@
*
*/
suite("load") {
+ def s3Region = getS3Region()
restore {
location "s3://${getS3BucketName()}/regression/tpcds/sf1000"
ak "${getS3AK()}"
sk "${getS3SK()}"
endpoint "http://${getS3Endpoint()}"
- region "ap-beijing"
+ region "${s3Region}"
repository "tpcds_backup"
snapshot "tpcds_customer"
timestamp "2022-03-31-10-16-46"
@@ -40,7 +41,7 @@ suite("load") {
ak "${getS3AK()}"
sk "${getS3SK()}"
endpoint "http://${getS3Endpoint()}"
- region "ap-beijing"
+ region "${s3Region}"
repository "tpcds_backup"
snapshot "tpcds"
timestamp "2022-03-30-12-22-31"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]