This is an automated email from the ASF dual-hosted git repository.
dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 473225e670b [chore](case) do not use global var (#55280)
473225e670b is described below
commit 473225e670bda139cb343ccf9d496cd697fda8cf
Author: Yongqiang YANG <[email protected]>
AuthorDate: Tue Aug 26 17:55:43 2025 +0800
[chore](case) do not use global var (#55280)
---
.../suites/export/test_array_export.groovy | 24 +++++++++++-----------
.../load_p0/broker_load/test_array_load.groovy | 6 +++---
.../broker_load/test_broker_load_seq.groovy | 6 +++---
.../broker_load/test_broker_load_with_merge.groovy | 6 +++---
.../test_broker_load_with_partition.groovy | 10 ++++-----
.../broker_load/test_broker_load_with_where.groovy | 8 ++++----
.../test_broker_load_without_filepath.groovy | 6 +++---
.../test_broker_load_without_seq.groovy | 6 +++---
8 files changed, 36 insertions(+), 36 deletions(-)
diff --git a/regression-test/suites/export/test_array_export.groovy
b/regression-test/suites/export/test_array_export.groovy
index 4ce9786e997..a6c834cefc6 100644
--- a/regression-test/suites/export/test_array_export.groovy
+++ b/regression-test/suites/export/test_array_export.groovy
@@ -210,20 +210,20 @@ suite("test_array_export", "export") {
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
- hdfsDataDir = getHdfsDataDir()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
+ def hdfsDataDir = getHdfsDataDir()
// case2: test "select ...into outfile 'hdfs_path'"
try {
create_test_table.call(tableName)
- resultCount = sql "select count(*) from ${tableName}"
- currentTotalRows = resultCount[0][0]
+ def resultCount = sql "select count(*) from ${tableName}"
+ def currentTotalRows = resultCount[0][0]
- label = UUID.randomUUID().toString().replaceAll("-", "")
- select_out_file(tableName, hdfsDataDir + "/" + label +
"/export-data", "csv", brokerName, hdfsUser, hdfsPasswd)
+ def label = UUID.randomUUID().toString().replaceAll("-", "")
+ def result = select_out_file(tableName, hdfsDataDir + "/" + label
+ "/export-data", "csv", brokerName, hdfsUser, hdfsPasswd)
result = downloadExportFromHdfs(label + "/export-data")
check_download_result(result, currentTotalRows)
} finally {
@@ -234,13 +234,13 @@ suite("test_array_export", "export") {
try {
create_test_table.call(tableName)
- resultCount = sql "select count(*) from ${tableName}"
- currentTotalRows = resultCount[0][0]
+ def resultCount = sql "select count(*) from ${tableName}"
+ def currentTotalRows = resultCount[0][0]
- label = UUID.randomUUID().toString().replaceAll("-", "")
+ def label = UUID.randomUUID().toString().replaceAll("-", "")
export_to_hdfs.call(tableName, label, hdfsDataDir + "/" + label,
'', brokerName, hdfsUser, hdfsPasswd)
check_export_result(label)
- result = downloadExportFromHdfs(label + "/export-data")
+ def result = downloadExportFromHdfs(label + "/export-data")
check_download_result(result, currentTotalRows)
} finally {
try_sql("DROP TABLE IF EXISTS ${tableName}")
diff --git a/regression-test/suites/load_p0/broker_load/test_array_load.groovy
b/regression-test/suites/load_p0/broker_load/test_array_load.groovy
index fe2985aff4a..6ec6a601d0f 100644
--- a/regression-test/suites/load_p0/broker_load/test_array_load.groovy
+++ b/regression-test/suites/load_p0/broker_load/test_array_load.groovy
@@ -246,9 +246,9 @@ suite("test_array_load", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName =getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName =getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_json_file_path = uploadToHdfs
"load_p0/broker_load/simple_object_array.json"
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/simple_array.csv"
def hdfs_orc_file_path = uploadToHdfs
"load_p0/broker_load/simple_array.orc"
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_seq.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_seq.groovy
index 2529226382c..b9851c11789 100644
--- a/regression-test/suites/load_p0/broker_load/test_broker_load_seq.groovy
+++ b/regression-test/suites/load_p0/broker_load/test_broker_load_seq.groovy
@@ -95,9 +95,9 @@ suite("test_broker_load_seq", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load.csv"
//def hdfs_csv_file_path = "hdfs://ip:port/testfile"
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_merge.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_merge.groovy
index eb76e4d389a..24bdee97613 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_merge.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_merge.groovy
@@ -174,9 +174,9 @@ suite("test_broker_load_with_merge", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_with_merge.csv"
// case1: has delete on condition and without merge
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
index 45f0cc50be7..3b1620e1358 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
@@ -108,7 +108,7 @@ suite("test_broker_load_with_partition", "load_p0") {
}
def check_load_result = {checklabel, testTablex ->
- max_try_milli_secs = 10000
+ def max_try_milli_secs = 10000
while(max_try_milli_secs) {
result = sql "show load where label = '${checklabel}'"
log.info("result: ${result}")
@@ -127,7 +127,7 @@ suite("test_broker_load_with_partition", "load_p0") {
}
def check_load_tmp_partition_result = {checklabel, testTablex ->
- max_try_milli_secs = 10000
+ def max_try_milli_secs = 10000
while(max_try_milli_secs) {
result = sql "show load where label = '${checklabel}'"
log.info("result: ${result}")
@@ -148,9 +148,9 @@ suite("test_broker_load_with_partition", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_with_partition.csv"
//def hdfs_csv_file_path = "hdfs://ip:port/testfile"
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_where.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_where.groovy
index dd05905e4bb..a058f10349e 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_where.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_where.groovy
@@ -99,7 +99,7 @@ suite("test_broker_load_with_where", "load_p0") {
}
def check_load_result = {checklabel, testTablex ->
- max_try_milli_secs = 10000
+ def max_try_milli_secs = 10000
while(max_try_milli_secs) {
result = sql "show load where label = '${checklabel}'"
if(result[0][2] == "FINISHED") {
@@ -125,9 +125,9 @@ suite("test_broker_load_with_where", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_with_where.csv"
//def hdfs_csv_file_path = "hdfs://ip:port/testfile"
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy
index 2dae37c615b..c89ba78fc32 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy
@@ -78,9 +78,9 @@ suite("test_broker_load_without_filepath", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_without_filepath.csv"
try {
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
index 9d34c0c0978..b5a487ae562 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
@@ -79,9 +79,9 @@ suite("test_broker_load_without_seq", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_without_seq.csv"
//def hdfs_csv_file_path = "hdfs://ip:port/testfile"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]