This is an automated email from the ASF dual-hosted git repository.
morrysnow pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.1 by this push:
new a68ab2bd252 [fix](case) fix ingestion load cases (#55361)
a68ab2bd252 is described below
commit a68ab2bd252ccfcc022bf8cc289a18ffe8d608d6
Author: shuke <[email protected]>
AuthorDate: Thu Aug 28 16:37:56 2025 +0800
[fix](case) fix ingestion load cases (#55361)
---
.../doris/regression/action/HttpCliAction.groovy | 7 +++-
.../ingestion_load/test_ingestion_load.groovy | 18 +++++-----
.../test_ingestion_load_alter_column.groovy | 22 ++++++-------
.../test_ingestion_load_alter_partition.groovy | 38 +++++++++++-----------
.../test_ingestion_load_drop_table.groovy | 18 +++++-----
.../test_ingestion_load_multi_table.groovy | 18 +++++-----
.../test_ingestion_load_with_inverted_index.groovy | 14 ++++----
.../test_ingestion_load_with_partition.groovy | 18 +++++-----
8 files changed, 79 insertions(+), 74 deletions(-)
diff --git
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/action/HttpCliAction.groovy
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/action/HttpCliAction.groovy
index 16fa452c6eb..b156a315151 100644
---
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/action/HttpCliAction.groovy
+++
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/action/HttpCliAction.groovy
@@ -26,7 +26,9 @@ import org.apache.http.client.methods.HttpDelete
import org.apache.http.client.methods.HttpGet
import org.apache.http.entity.StringEntity
import org.apache.http.entity.ContentType
+import org.apache.http.impl.client.DefaultHttpClient
import org.apache.http.impl.client.HttpClients
+import org.apache.http.impl.client.LaxRedirectStrategy
import org.apache.http.util.EntityUtils
import org.apache.http.client.methods.HttpPost
import org.apache.tools.ant.taskdefs.condition.Http;
@@ -102,7 +104,10 @@ class HttpCliAction implements SuiteAction {
@Override
void run() {
try {
- def result = HttpClients.createDefault().withCloseable { client ->
+ def result = HttpClients.custom()
+ .setRedirectStrategy(new LaxRedirectStrategy()) // allow
redirect to fe master
+ .build()
+ .withCloseable { client ->
uri = "http://$endpoint" + uri
log.info("url : " + uri)
log.info("body: " + body)
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load.groovy
index 18bbd913e91..91e20070c09 100644
--- a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load.groovy
+++ b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load.groovy
@@ -19,7 +19,7 @@ import java.nio.file.Files
import java.nio.file.Paths
import java.nio.file.StandardCopyOption
-suite('test_ingestion_load', 'p0') {
+suite('test_ingestion_load', 'p0,external') {
def testIngestLoadJob = { testTable, loadLabel, String dataFile ->
@@ -27,12 +27,12 @@ suite('test_ingestion_load', 'p0') {
sql "CLEAN LABEL FROM ${context.dbName}"
- Integer loadId = -1
- Integer tableId = -1
- Integer partitionId = -1
- Integer indexId = -1
- Integer bucketId = 0
- Integer schemaHash = -1
+ long loadId = -1
+ long tableId = -1
+ long partitionId = -1
+ long indexId = -1
+ long bucketId = 0
+ long schemaHash = -1
String reqBody =
"""{
@@ -106,9 +106,9 @@ suite('test_ingestion_load', 'p0') {
}
}
- max_try_milli_secs = 120000
+ def max_try_milli_secs = 120000
while (max_try_milli_secs) {
- result = sql "show load where label = '${loadLabel}'"
+ def result = sql "show load where label = '${loadLabel}'"
if (result[0][2] == "FINISHED") {
sql "sync"
qt_select "select * from ${testTable} order by 1"
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_column.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_column.groovy
index 0d040492da2..89be972b5bf 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_column.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_column.groovy
@@ -19,7 +19,7 @@ import java.nio.file.Files
import java.nio.file.Paths
import java.nio.file.StandardCopyOption
-suite('test_ingestion_load_alter_column', 'p0') {
+suite('test_ingestion_load_alter_column', 'p0,external') {
def testIngestLoadJob = { testTable, loadLabel, dataFile, alterAction ->
@@ -27,12 +27,12 @@ suite('test_ingestion_load_alter_column', 'p0') {
sql "CLEAN LABEL FROM ${context.dbName}"
- Integer loadId = -1
- Integer tableId = -1
- Integer partitionId = -1
- Integer indexId = -1
- Integer bucketId = 0
- Integer schemaHash = -1
+ long loadId = -1
+ long tableId = -1
+ long partitionId = -1
+ long indexId = -1
+ long bucketId = 0
+ long schemaHash = -1
String reqBody =
"""{
@@ -108,9 +108,9 @@ suite('test_ingestion_load_alter_column', 'p0') {
alterAction.call()
- max_try_milli_secs = 120000
+ def max_try_milli_secs = 120000
while (max_try_milli_secs) {
- result = sql "show load where label = '${loadLabel}'"
+ def result = sql "show load where label = '${loadLabel}'"
if (result[0][2] == "CANCELLED") {
msg = result[0][7]
logger.info("err msg: " + msg)
@@ -199,8 +199,8 @@ suite('test_ingestion_load_alter_column', 'p0') {
})
} finally {
- sql "DROP TABLE ${tableName1}"
- sql "DROP TABLE ${tableName2}"
+ //sql "DROP TABLE ${tableName1}"
+ //sql "DROP TABLE ${tableName2}"
}
}
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_partition.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_partition.groovy
index 61c1587ea53..83492d1bf1c 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_partition.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_partition.groovy
@@ -19,7 +19,7 @@ import java.nio.file.Files
import java.nio.file.Paths
import java.nio.file.StandardCopyOption
-suite('test_ingestion_load_alter_partition', 'p0') {
+suite('test_ingestion_load_alter_partition', 'p0,external') {
def testIngestLoadJob = { testTable, loadLabel, dataFiles, alterAction ->
@@ -27,12 +27,12 @@ suite('test_ingestion_load_alter_partition', 'p0') {
sql "CLEAN LABEL ${loadLabel} FROM ${context.dbName}"
- Integer loadId = -1
- Integer tableId = -1
- Integer partitionId = -1
- Integer indexId = -1
- Integer bucketId = 0
- Integer schemaHash = -1
+ long loadId = -1
+ long tableId = -1
+ long partitionId = -1
+ long indexId = -1
+ long bucketId = 0
+ long schemaHash = -1
String reqBody =
"""{
@@ -43,7 +43,7 @@ suite('test_ingestion_load_alter_partition', 'p0') {
"properties": {}
}"""
- resultFileNames = []
+ def resultFileNames = []
httpTest {
endpoint context.config.feHttpAddress
@@ -63,7 +63,7 @@ suite('test_ingestion_load_alter_partition', 'p0') {
def index = tableMeta["${testTable}"].indexes[0]
indexId = index.indexId
schemaHash = index.schemaHash
- partitions = tableMeta["${testTable}"].partitionInfo.partitions
+ def partitions =
tableMeta["${testTable}"].partitionInfo.partitions
for(partition in partitions) {
logger.info("partitionId: " + partition.partitionId)
resultFileNames.add("V1.${loadLabel}.${tableId}.${partition.partitionId}.${indexId}.${bucketId}.${schemaHash}.parquet")
@@ -71,7 +71,7 @@ suite('test_ingestion_load_alter_partition', 'p0') {
}
}
- etlResultFilePaths = []
+ def etlResultFilePaths = []
for(int i=0; i < dataFiles.size(); i++) {
Files.copy(Paths.get(dataFiles[i]),
Paths.get(context.config.dataPath +
"/load_p0/ingestion_load/${resultFileNames[i]}"),
StandardCopyOption.REPLACE_EXISTING)
@@ -115,9 +115,9 @@ suite('test_ingestion_load_alter_partition', 'p0') {
alterAction.call()
- max_try_milli_secs = 120000
+ def max_try_milli_secs = 120000
while (max_try_milli_secs) {
- result = sql "show load where label = '${loadLabel}'"
+ def result = sql "show load where label = '${loadLabel}'"
if (result[0][2] == "FINISHED") {
sql "sync"
qt_select "select c1, count(*) from ${testTable} group by c1
order by c1"
@@ -162,7 +162,7 @@ suite('test_ingestion_load_alter_partition', 'p0') {
)
"""
- def label = "test_ingestion_load_alter_partition_1"
+ def label = UUID.randomUUID().toString().replaceAll("-", "")
testIngestLoadJob.call(tableName1, label, [context.config.dataPath
+ '/load_p0/ingestion_load/data2-0.parquet', context.config.dataPath +
'/load_p0/ingestion_load/data2-1.parquet',context.config.dataPath +
'/load_p0/ingestion_load/data2-2.parquet',context.config.dataPath +
'/load_p0/ingestion_load/data2-3.parquet'], {
sql "alter table ${tableName1} drop partition p_20240901"
@@ -184,7 +184,7 @@ suite('test_ingestion_load_alter_partition', 'p0') {
)
"""
- label = "test_ingestion_load_alter_partition_2"
+ label = UUID.randomUUID().toString().replaceAll("-", "")
testIngestLoadJob.call(tableName2, label, [context.config.dataPath
+ '/load_p0/ingestion_load/data2-0.parquet', context.config.dataPath +
'/load_p0/ingestion_load/data2-1.parquet',context.config.dataPath +
'/load_p0/ingestion_load/data2-2.parquet',context.config.dataPath +
'/load_p0/ingestion_load/data2-3.parquet'], {
sql "alter table ${tableName2} add partition p_20240905 VALUES
[('2024-09-05'), ('2024-09-06'))"
@@ -206,7 +206,7 @@ suite('test_ingestion_load_alter_partition', 'p0') {
)
"""
- label = "test_ingestion_load_alter_partition_3"
+ label = UUID.randomUUID().toString().replaceAll("-", "")
testIngestLoadJob.call(tableName3, label, [context.config.dataPath
+ '/load_p0/ingestion_load/data2-0.parquet', context.config.dataPath +
'/load_p0/ingestion_load/data2-1.parquet',context.config.dataPath +
'/load_p0/ingestion_load/data2-2.parquet',context.config.dataPath +
'/load_p0/ingestion_load/data2-3.parquet'], {
sql "alter table ${tableName3} add temporary partition
tp_20240901 VALUES [('2024-09-01'), ('2024-09-02'))"
@@ -214,11 +214,11 @@ suite('test_ingestion_load_alter_partition', 'p0') {
})
} finally {
- sql "DROP TABLE ${tableName1}"
- sql "DROP TABLE ${tableName2}"
- sql "DROP TABLE ${tableName3}"
+// sql "DROP TABLE ${tableName1}"
+// sql "DROP TABLE ${tableName2}"
+// sql "DROP TABLE ${tableName3}"
}
}
-}
\ No newline at end of file
+}
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_drop_table.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_drop_table.groovy
index e348b9db9ff..1f0adb8c1c0 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_drop_table.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_drop_table.groovy
@@ -19,7 +19,7 @@ import java.nio.file.Files
import java.nio.file.Paths
import java.nio.file.StandardCopyOption
-suite('test_ingestion_load_drop_table', 'p0') {
+suite('test_ingestion_load_drop_table', 'p0,external') {
def testIngestLoadJob = { testTable, loadLabel, dataFile, alterAction ->
@@ -27,12 +27,12 @@ suite('test_ingestion_load_drop_table', 'p0') {
sql "CLEAN LABEL FROM ${context.dbName}"
- Integer loadId = -1
- Integer tableId = -1
- Integer partitionId = -1
- Integer indexId = -1
- Integer bucketId = 0
- Integer schemaHash = -1
+ long loadId = -1
+ long tableId = -1
+ long partitionId = -1
+ long indexId = -1
+ long bucketId = 0
+ long schemaHash = -1
String reqBody =
"""{
@@ -108,9 +108,9 @@ suite('test_ingestion_load_drop_table', 'p0') {
alterAction.call()
- max_try_milli_secs = 120000
+ def max_try_milli_secs = 120000
while (max_try_milli_secs) {
- result = sql "show load where label = '${loadLabel}'"
+ def result = sql "show load where label = '${loadLabel}'"
if (result.size() == 0) {
break
} else {
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_multi_table.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_multi_table.groovy
index 4a4199a9b81..e536b57c204 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_multi_table.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_multi_table.groovy
@@ -19,7 +19,7 @@ import java.nio.file.Files
import java.nio.file.Paths
import java.nio.file.StandardCopyOption
-suite('test_ingestion_load_multi_table', 'p0') {
+suite('test_ingestion_load_multi_table', 'p0,external') {
def testIngestLoadJob = { loadLabel, testTable1, testTable2, dataFile1,
dataFile2 ->
@@ -28,12 +28,12 @@ suite('test_ingestion_load_multi_table', 'p0') {
sql "CLEAN LABEL FROM ${context.dbName}"
- Integer loadId = -1
- Integer tableId = -1
- Integer partitionId = -1
- Integer indexId = -1
- Integer bucketId = 0
- Integer schemaHash = -1
+ long loadId = -1
+ long tableId = -1
+ long partitionId = -1
+ long indexId = -1
+ long bucketId = 0
+ long schemaHash = -1
String resultFileName1 = ""
String resultFileName2 = ""
@@ -124,9 +124,9 @@ suite('test_ingestion_load_multi_table', 'p0') {
}
}
- max_try_milli_secs = 60000
+ def max_try_milli_secs = 60000
while (max_try_milli_secs) {
- result = sql "show load where label = '${loadLabel}'"
+ def result = sql "show load where label = '${loadLabel}'"
if (result[0][2] == "FINISHED") {
sql "sync"
qt_select "select * from ${testTable1} order by c_int"
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_inverted_index.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_inverted_index.groovy
index 7eed4bfdc58..15db777ddee 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_inverted_index.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_inverted_index.groovy
@@ -19,7 +19,7 @@ import java.nio.file.Files
import java.nio.file.Paths
import java.nio.file.StandardCopyOption
-suite('test_ingestion_load_with_inverted_index', 'p0') {
+suite('test_ingestion_load_with_inverted_index', 'p0,external') {
def testIngestLoadJob = { testTable, loadLabel, String dataFile ->
@@ -27,12 +27,12 @@ suite('test_ingestion_load_with_inverted_index', 'p0') {
sql "CLEAN LABEL FROM ${context.dbName}"
- Integer loadId = -1
- Integer tableId = -1
- Integer partitionId = -1
- Integer indexId = -1
- Integer bucketId = 0
- Integer schemaHash = -1
+ long loadId = -1
+ long tableId = -1
+ long partitionId = -1
+ long indexId = -1
+ long bucketId = 0
+ long schemaHash = -1
String reqBody =
"""{
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_partition.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_partition.groovy
index d6d00659b65..12a904f15d8 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_partition.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_partition.groovy
@@ -19,7 +19,7 @@ import java.nio.file.Files
import java.nio.file.Paths
import java.nio.file.StandardCopyOption
-suite('test_ingestion_load_with_partition', 'p0') {
+suite('test_ingestion_load_with_partition', 'p0,external') {
def testIngestLoadJob = { testTable, loadLabel, dataFiles ->
@@ -27,12 +27,12 @@ suite('test_ingestion_load_with_partition', 'p0') {
sql "CLEAN LABEL FROM ${context.dbName}"
- Integer loadId = -1
- Integer tableId = -1
- Integer partitionId = -1
- Integer indexId = -1
- Integer bucketId = 0
- Integer schemaHash = -1
+ long loadId = -1
+ long tableId = -1
+ long partitionId = -1
+ long indexId = -1
+ long bucketId = 0
+ long schemaHash = -1
String reqBody =
"""{
@@ -63,7 +63,7 @@ suite('test_ingestion_load_with_partition', 'p0') {
def index = tableMeta["${testTable}"].indexes[0]
indexId = index.indexId
schemaHash = index.schemaHash
- partitions = tableMeta["${testTable}"].partitionInfo.partitions
+ def partitions =
tableMeta["${testTable}"].partitionInfo.partitions
for(partition in partitions) {
logger.info("partitionId: " + partition.partitionId)
resultFileNames.add("V1.${loadLabel}.${tableId}.${partition.partitionId}.${indexId}.${bucketId}.${schemaHash}.parquet")
@@ -113,7 +113,7 @@ suite('test_ingestion_load_with_partition', 'p0') {
}
}
- max_try_milli_secs = 120000
+ def max_try_milli_secs = 120000
while (max_try_milli_secs) {
result = sql "show load where label = '${loadLabel}'"
if (result[0][2] == "FINISHED") {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]