eldenmoon commented on code in PR #58711: URL: https://github.com/apache/doris/pull/58711#discussion_r2621943316
########## regression-test/suites/export_p0/test_export_variant_10k_columns.groovy: ########## @@ -0,0 +1,238 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import java.io.File +import org.awaitility.Awaitility +import static java.util.concurrent.TimeUnit.SECONDS; + +suite("test_export_variant_10k_columns", "p0") { + // open nereids + sql """ set enable_nereids_planner=true """ + sql """ set enable_fallback_to_original_planner=false """ + + String ak = getS3AK() + String sk = getS3SK() + String s3_endpoint = getS3Endpoint() + String region = getS3Region() + // String bucket = context.config.otherConfigs.get("s3BucketName"); + String bucket = getS3BucketName() + + def table_export_name = "test_export_variant_10k" + def table_load_name = "test_load_variant_10k" + def outfile_path_prefix = """${bucket}/export/p0/variant_10k/exp""" + + def waiting_export = { export_label -> + while (true) { + def res = sql """ show export where label = "${export_label}" """ + logger.info("export state: " + res[0][2]) + if (res[0][2] == "FINISHED") { + def json = parseJson(res[0][11]) + assert json instanceof List + // assertEquals("1", json.fileNumber[0][0]) + log.info("outfile_path: ${json.url[0][0]}") + return json.url[0][0]; + } else if (res[0][2] == "CANCELLED") { + throw new IllegalStateException("""export failed: ${res[0][10]}""") + } else { + sleep(5000) + } + } + } + + // 1. Create table with variant column + sql """ DROP TABLE IF EXISTS ${table_export_name} """ + sql """ + CREATE TABLE IF NOT EXISTS ${table_export_name} ( + `id` INT NOT NULL, + `v` VARIANT<PROPERTIES ("variant_max_subcolumns_count" = "2048")> NULL + ) + DISTRIBUTED BY HASH(id) PROPERTIES("replication_num" = "1"); + """ + + // 2. Generate data with 10000 keys in variant + // Generate N=100,000 rows. + // Total 10,000 columns, but each row only has 50 columns (sparse). + // This simulates a realistic sparse wide table scenario. + + File dataFile = File.createTempFile("variant_10k_data", ".json") + dataFile.deleteOnExit() + int num_rows = 1000 + try { + dataFile.withWriter { writer -> + StringBuilder sb = new StringBuilder() + for (int i = 1; i <= num_rows; i++) { + sb.setLength(0) + sb.append("{\"id\": ").append(i).append(", \"v\": {") + // Select 50 keys out of 10000 for each row + for (int k = 0; k < 50; k++) { + if (k > 0) sb.append(", ") + // Scatter the keys to ensure coverage of all 10000 columns across rows + int keyIdx = (i + k * 200) % 10000 + sb.append('"k').append(keyIdx).append('":').append(i) + } + sb.append("}}\n") + writer.write(sb.toString()) + } + } + + // 3. Stream Load + streamLoad { + table table_export_name + set 'format', 'json' + set 'read_json_by_line', 'true' + file dataFile.getAbsolutePath() + time 60000 // 60s + check { result, exception, startTime, endTime -> + if (exception != null) { + throw exception + } + log.info("Stream load result: ${result}".toString()) + def json = parseJson(result) + assertEquals("Success", json.Status) + assertEquals(num_rows, json.NumberTotalRows) + assertEquals(num_rows, json.NumberLoadedRows) + } + } + } finally { + dataFile.delete() + } + + // def format = "parquet" + def format = "native" + + // 4. Export to S3 (Parquet) + def uuid = UUID.randomUUID().toString() + // def outFilePath = """/tmp/variant_10k_export""" + def outFilePath = """${outfile_path_prefix}_${uuid}""" + def label = "label_${uuid}" + + try { + // for (int i = 0; i < 10; i++) { Review Comment: done ########## regression-test/suites/export_p0/outfile/native/test_outfile_native.groovy: ########## @@ -0,0 +1,100 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.codehaus.groovy.runtime.IOGroovyMethods + +suite("test_outfile_native", "p0") { + // open nereids + sql """ set enable_nereids_planner=true """ + sql """ set enable_fallback_to_original_planner=false """ + + String ak = getS3AK() + String sk = getS3SK() + String s3_endpoint = getS3Endpoint() + String region = getS3Region() + String bucket = context.config.otherConfigs.get("s3BucketName"); + + def tableName = "outfile_native_test" + def outFilePath = "${bucket}/outfile/native/exp_" + + // 导出 helper:写到 S3,返回 FE 输出的 URL + def outfile_to_s3 = { + def res = sql """ + SELECT * FROM ${tableName} t ORDER BY id + INTO OUTFILE "s3://${outFilePath}" + FORMAT AS native + PROPERTIES ( + "s3.endpoint" = "${s3_endpoint}", + "s3.region" = "${region}", + "s3.secret_key"="${sk}", + "s3.access_key" = "${ak}" + ); + """ + return res[0][3] + } + + try { + sql """ DROP TABLE IF EXISTS ${tableName} """ + sql """ + CREATE TABLE IF NOT EXISTS ${tableName} ( + `id` INT NOT NULL, + `c_date` DATE NOT NULL, + `c_dt` DATETIME NOT NULL, + `c_str` VARCHAR(20), + `c_int` INT, + `c_tinyint` TINYINT, + `c_bool` boolean, + `c_double` double + ) + DISTRIBUTED BY HASH(id) PROPERTIES("replication_num" = "1"); + """ + + // 插入 10 行测试数据(最后一行全 NULL) Review Comment: done -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
