This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new ec956e7859b [fix](regression test) fix case  (#56729)
ec956e7859b is described below

commit ec956e7859bed4e20904d13b93854710df1d3b51
Author: Refrain <[email protected]>
AuthorDate: Sat Oct 11 09:53:57 2025 +0800

    [fix](regression test) fix case  (#56729)
---
 regression-test/suites/csv_header_p0/test_csv_with_header.groovy | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/regression-test/suites/csv_header_p0/test_csv_with_header.groovy 
b/regression-test/suites/csv_header_p0/test_csv_with_header.groovy
index 965b4c06858..f58f973e556 100644
--- a/regression-test/suites/csv_header_p0/test_csv_with_header.groovy
+++ b/regression-test/suites/csv_header_p0/test_csv_with_header.groovy
@@ -132,19 +132,19 @@ suite("test_csv_with_header", "p0,external") {
         def hdfsFs = getHdfsFs()
         //[broker load] test normal
         label = UUID.randomUUID().toString().replaceAll("-", "")
-        def remote_csv_file = uploadToHdfs("csv_header_p0/"+format_csv_file)
+        def remote_csv_file = uploadToHdfs("csv_header_p0/csv.txt")
         def export_result = import_from_hdfs.call(testTable, label, 
remote_csv_file, format_csv, brokerName, hdfsUser, hdfsPasswd)
         check_import_result.call(label, testTable, expect_rows * 4)
 
         //[broker load] csv_with_names
         label = UUID.randomUUID().toString().replaceAll("-", "")
-        remote_csv_file = 
uploadToHdfs("csv_header_p0/"+format_csv_with_names_file)
+        remote_csv_file = uploadToHdfs("csv_header_p0/csv_with_names.txt")
         export_result = import_from_hdfs.call(testTable, label, 
remote_csv_file, format_csv_with_names, brokerName, hdfsUser, hdfsPasswd)
         check_import_result.call(label, testTable, expect_rows * 5)
 
          //[broker load] csv_with_names_and_types
         label = UUID.randomUUID().toString().replaceAll("-", "")
-        remote_csv_file = 
uploadToHdfs("csv_header_p0/"+format_csv_with_names_and_types_file)
+        remote_csv_file = 
uploadToHdfs("csv_header_p0/csv_with_names_and_types.txt")
         export_result = import_from_hdfs.call(testTable, label, 
remote_csv_file, format_csv_with_names_and_types, brokerName, hdfsUser, 
hdfsPasswd)
         check_import_result.call(label, testTable, expect_rows * 6)
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to