This is an automated email from the ASF dual-hosted git repository. kxiao pushed a commit to branch branch-2.0-beta in repository https://gitbox.apache.org/repos/asf/doris.git
commit 704934c733564db63bf2c86fb867d9c333942a8c Author: zgxme <[email protected]> AuthorDate: Tue Jun 6 20:07:31 2023 +0800 [fix](regression) fix export file test cases (#20463) --- .../org/apache/doris/regression/suite/Suite.groovy | 24 +++++++++++ .../suites/export/test_array_export.groovy | 48 +++++++++++++--------- .../suites/export/test_map_export.groovy | 15 ++++++- .../suites/export/test_struct_export.groovy | 15 ++++++- .../suites/export_p2/test_export_with_s3.groovy | 2 +- 5 files changed, 80 insertions(+), 24 deletions(-) diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index 934a18b8dd..7106965929 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -381,6 +381,30 @@ class Suite implements GroovyInterceptable { String s3Url = "http://${s3BucketName}.${s3Endpoint}" return s3Url } + + void scpFiles(String username, String host, String files, String filePath, boolean fromDst=true) { + String cmd = "scp -r ${username}@${host}:${files} ${filePath}" + if (!fromDst) { + cmd = "scp -r ${files} ${username}@${host}:${filePath}" + } + logger.info("Execute: ${cmd}".toString()) + Process process = cmd.execute() + def code = process.waitFor() + Assert.assertEquals(0, code) + } + + void sshExec(String username, String host, String cmd) { + String command = "ssh ${username}@${host} '${cmd}'" + def cmds = ["/bin/bash", "-c", command] + logger.info("Execute: ${cmds}".toString()) + Process p = cmds.execute() + def errMsg = new StringBuilder() + def msg = new StringBuilder() + p.waitForProcessOutput(msg, errMsg) + assert errMsg.length() == 0: "error occurred!" + errMsg + assert p.exitValue() == 0 + } + void getBackendIpHttpPort(Map<String, String> backendId_to_backendIP, Map<String, String> backendId_to_backendHttpPort) { List<List<Object>> backends = sql("show backends"); diff --git a/regression-test/suites/export/test_array_export.groovy b/regression-test/suites/export/test_array_export.groovy index e59d53f1cb..af17ab32fb 100644 --- a/regression-test/suites/export/test_array_export.groovy +++ b/regression-test/suites/export/test_array_export.groovy @@ -26,7 +26,6 @@ suite("test_array_export", "export") { StringBuilder strBuilder = new StringBuilder() strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword) strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe") - String command = strBuilder.toString() def process = command.toString().execute() def code = process.waitFor() @@ -49,12 +48,15 @@ suite("test_array_export", "export") { logger.warn("Please set enable_outfile_to_local to true to run test_outfile") return } - + // define the table and out file path def tableName = "array_outfile_test" def outFilePath = """${context.file.parent}/test_array_export""" + def outFile = "/tmp" + def urlHost = "" + def csvFiles = "" logger.warn("test_array_export the outFilePath=" + outFilePath) - + def create_test_table = {testTablex -> sql """ DROP TABLE IF EXISTS ${tableName} """ @@ -66,7 +68,7 @@ suite("test_array_export", "export") { `k4` ARRAY<BIGINT> NOT NULL COMMENT "", `k5` ARRAY<CHAR> NOT NULL COMMENT "", `k6` ARRAY<VARCHAR(20)> NULL COMMENT "", - `k7` ARRAY<DATE> NOT NULL COMMENT "", + `k7` ARRAY<DATE> NOT NULL COMMENT "", `k8` ARRAY<DATETIME> NOT NULL COMMENT "", `k9` ARRAY<FLOAT> NOT NULL COMMENT "", `k10` ARRAY<DOUBLE> NOT NULL COMMENT "", @@ -84,26 +86,25 @@ suite("test_array_export", "export") { assertTrue(result1.size() == 1) assertTrue(result1[0].size() == 1) assertTrue(result1[0][0] == 0, "Create table should update 0 rows") - sql """ INSERT INTO ${tableName} VALUES - (1, [1, 2, 3], [32767, 32768, 32769], [65534, 65535, 65536], ['a', 'b', 'c'], ["hello", "world"], + (1, [1, 2, 3], [32767, 32768, 32769], [65534, 65535, 65536], ['a', 'b', 'c'], ["hello", "world"], ['2022-07-13'], ['2022-08-15 12:30:00'], [0.331111, 0.672222], [3.141592, 0.878787], [4.2222, 5.5555, 6.67]) """ - + sql """ INSERT INTO ${tableName} VALUES - (2, [4, 5, 6], [32767, 32768, 32769], [65534, 65535, 65536], ['d', 'e', 'f'], ["good", "luck"], + (2, [4, 5, 6], [32767, 32768, 32769], [65534, 65535, 65536], ['d', 'e', 'f'], ["good", "luck"], ['2022-07-13'], ['2022-08-15 15:59:59'], [0.333336, 0.666677], [3.141592, 0.878787], [4.22222, 5.5555555, 6.6666777]) """ } - + def export_to_hdfs = {exportTable, exportLable, hdfsPath, exportFormat, BrokerName, HdfsUserName, HdfsPasswd-> - sql """ EXPORT TABLE ${exportTable} - TO "${hdfsPath}" + sql """ EXPORT TABLE ${exportTable} + TO "${hdfsPath}" PROPERTIES ( "label" = "${exportLable}", "column_separator"=",", "format"="${exportFormat}" - ) + ) WITH BROKER "${BrokerName}" ( "username"="${HdfsUserName}", "password"="${HdfsPasswd}" @@ -126,7 +127,7 @@ suite("test_array_export", "export") { ) """ } - + def check_export_result = {checklabel-> max_try_milli_secs = 15000 while(max_try_milli_secs) { @@ -155,7 +156,7 @@ suite("test_array_export", "export") { // case1: test "select ...into outfile ...." try { create_test_table.call(tableName) - + qt_select_default """ SELECT * FROM ${tableName} t ORDER BY k1; """ // check outfile @@ -165,9 +166,15 @@ suite("test_array_export", "export") { } else { throw new IllegalStateException("""${outFilePath} already exists! """) } - sql """ - SELECT * FROM ${tableName} t ORDER BY k1 INTO OUTFILE "file://${outFilePath}/"; + result = sql """ + SELECT * FROM ${tableName} t ORDER BY k1 INTO OUTFILE "file://${outFile}/"; """ + url = result[0][3] + urlHost = url.substring(8, url.indexOf("${outFile}")) + def filePrifix = url.split("${outFile}")[1] + csvFiles = "${outFile}${filePrifix}*.csv" + scpFiles ("root", urlHost, csvFiles, outFilePath); + File[] files = path.listFiles() assert files.length == 1 List<String> outLines = Files.readAllLines(Paths.get(files[0].getAbsolutePath()), StandardCharsets.UTF_8); @@ -186,14 +193,17 @@ suite("test_array_export", "export") { } path.delete(); } + cmd = "rm -rf ${csvFiles}" + sshExec ("root", urlHost, cmd) } - + + if (enableHdfs()) { brokerName = getBrokerName() hdfsUser = getHdfsUser() hdfsPasswd = getHdfsPasswd() hdfsDataDir = getHdfsDataDir() - + // case2: test "select ...into outfile 'hdfs_path'" try { create_test_table.call(tableName) @@ -225,4 +235,4 @@ suite("test_array_export", "export") { try_sql("DROP TABLE IF EXISTS ${tableName}") } } -} +} \ No newline at end of file diff --git a/regression-test/suites/export/test_map_export.groovy b/regression-test/suites/export/test_map_export.groovy index 362e512491..d6e3a2e197 100644 --- a/regression-test/suites/export/test_map_export.groovy +++ b/regression-test/suites/export/test_map_export.groovy @@ -78,6 +78,9 @@ suite("test_map_export", "export") { qt_select_count """SELECT COUNT(m) FROM ${testTable}""" def outFilePath = """${context.file.parent}/test_map_export""" + def outFile = "/tmp" + def urlHost = "" + def csvFiles = "" logger.info("test_map_export the outFilePath=" + outFilePath) // map select into outfile try { @@ -87,9 +90,15 @@ suite("test_map_export", "export") { } else { throw new IllegalStateException("""${outFilePath} already exists! """) } - sql """ - SELECT * FROM ${testTable} ORDER BY id INTO OUTFILE "file://${outFilePath}/"; + result = sql """ + SELECT * FROM ${testTable} ORDER BY id INTO OUTFILE "file://${outFile}/"; """ + url = result[0][3] + urlHost = url.substring(8, url.indexOf("${outFile}")) + def filePrifix = url.split("${outFile}")[1] + csvFiles = "${outFile}${filePrifix}*.csv" + scpFiles ("root", urlHost, csvFiles, outFilePath); + File[] files = path.listFiles() assert files.length == 1 @@ -128,5 +137,7 @@ suite("test_map_export", "export") { } path.delete(); } + cmd = "rm -rf ${csvFiles}" + sshExec ("root", urlHost, cmd) } } diff --git a/regression-test/suites/export/test_struct_export.groovy b/regression-test/suites/export/test_struct_export.groovy index a1b6ccbe06..485270ccbf 100644 --- a/regression-test/suites/export/test_struct_export.groovy +++ b/regression-test/suites/export/test_struct_export.groovy @@ -80,6 +80,9 @@ suite("test_struct_export", "export") { qt_select_count """SELECT COUNT(k2), COUNT(k4) FROM ${testTable}""" def outFilePath = """${context.file.parent}/test_struct_export""" + def outFile = "/tmp" + def urlHost = "" + def csvFiles = "" logger.info("test_struct_export the outFilePath=" + outFilePath) // struct select into outfile try { @@ -89,9 +92,15 @@ suite("test_struct_export", "export") { } else { throw new IllegalStateException("""${outFilePath} already exists! """) } - sql """ - SELECT * FROM ${testTable} ORDER BY k1 INTO OUTFILE "file://${outFilePath}/"; + result = sql """ + SELECT * FROM ${testTable} ORDER BY k1 INTO OUTFILE "file://${outFile}/"; """ + url = result[0][3] + urlHost = url.substring(8, url.indexOf("${outFile}")) + def filePrifix = url.split("${outFile}")[1] + csvFiles = "${outFile}${filePrifix}*.csv" + scpFiles ("root", urlHost, csvFiles, outFilePath); + File[] files = path.listFiles() assert files.length == 1 @@ -131,5 +140,7 @@ suite("test_struct_export", "export") { } path.delete(); } + cmd = "rm -rf ${csvFiles}" + sshExec ("root", urlHost, cmd) } } diff --git a/regression-test/suites/export_p2/test_export_with_s3.groovy b/regression-test/suites/export_p2/test_export_with_s3.groovy index 9dc5100054..a26dde3238 100644 --- a/regression-test/suites/export_p2/test_export_with_s3.groovy +++ b/regression-test/suites/export_p2/test_export_with_s3.groovy @@ -21,7 +21,7 @@ suite("test_export_with_s3", "p2") { String sk = getS3SK() String s3_endpoint = getS3Endpoint() String region = getS3Region() - String bucket = context.config.otherConfigs.get("s3ExportBucketName"); + String bucket = context.config.otherConfigs.get("s3BucketName"); def table_export_name = "test_export_with_s3" // create table and insert --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
