This is an automated email from the ASF dual-hosted git repository.
eldenmoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 8bb66066626 [fix](nestedType)fix nested data type to create table
(#26506)
8bb66066626 is described below
commit 8bb66066626984c17f19171ea945aba099b1c508
Author: zfr95 <[email protected]>
AuthorDate: Thu Nov 9 14:38:41 2023 +0800
[fix](nestedType)fix nested data type to create table (#26506)
---
.../plugins_create_table_nested_type.groovy | 117 +++++++++++++++------
.../datatype_p0/nested_types/create_table.groovy | 3 +-
2 files changed, 87 insertions(+), 33 deletions(-)
diff --git a/regression-test/plugins/plugins_create_table_nested_type.groovy
b/regression-test/plugins/plugins_create_table_nested_type.groovy
index 9c830795875..bb4bdde5818 100644
--- a/regression-test/plugins/plugins_create_table_nested_type.groovy
+++ b/regression-test/plugins/plugins_create_table_nested_type.groovy
@@ -18,75 +18,130 @@
import org.apache.doris.regression.suite.Suite
// create table with nested data type, now default complex data include array,
map, struct
-Suite.metaClass.create_table_with_nested_type = { int maxDepth, String tbName
/* param */ ->
+Suite.metaClass.create_table_with_nested_type = { int max_depth, def type_arr,
String tb_name /* param */ ->
Suite suite = delegate as Suite
- maxDepth = maxDepth > 9 ? 9 : maxDepth
+ try {
+ if (type_arr.size() != max_depth) {
+ throw new Exception("level not equal type_arr size")
+ }
+ } catch (Exception e) {
+ logger.info(e.message)
+ return
+ }
+
+ def cur_depth = type_arr.size()
+ max_depth = max_depth > 9 ? 9 : max_depth
+ max_depth = max_depth < 1 ? 1 : max_depth
- def dataTypeArr = ["boolean", "tinyint(4)", "smallint(6)", "int(11)",
"bigint(20)", "largeint(40)", "float",
+// def datatype_arr = ["boolean", "tinyint(4)", "smallint(6)",
"int(11)", "bigint(20)", "largeint(40)", "float",
+// "double", "decimal(20, 3)", "decimalv3(20, 3)",
"date", "datetime", "datev2", "datetimev2(0)",
+// "char(15)", "varchar(100)", "text",
"hll","bitmap", "QUANTILE_STATE"]
+ def datatype_arr = ["boolean", "tinyint(4)", "smallint(6)", "int(11)",
"bigint(20)", "largeint(40)", "float",
"double", "decimal(20, 3)", "decimalv3(20, 3)", "date",
"datetime", "datev2", "datetimev2(0)",
"char(15)", "varchar(100)", "text"]
- def colNameArr = ["c_bool", "c_tinyint", "c_smallint", "c_int",
"c_bigint", "c_largeint", "c_float",
+ def col_name_arr = ["c_bool", "c_tinyint", "c_smallint", "c_int",
"c_bigint", "c_largeint", "c_float",
"c_double", "c_decimal", "c_decimalv3", "c_date",
"c_datetime", "c_datev2", "c_datetimev2",
"c_char", "c_varchar", "c_string"]
- def complexDataTypeArr = ["array", "map", "struct"]
-// def tbName = "test"
+ def complex_datatype_arr = ["array", "map", "struct"]
+ def base_struct_scala =
"col1:int(11),col2:tinyint(4),col3:smallint(6),col4:boolean,col5:bigint(20),col6:largeint(40),"
+
+ "col7:float,col8:double,col9:decimal(20, 3),col10:decimalv3(20,
3),col11:date,col12:datetime,col13:datev2,col14:datetimev2(0)," +
+ "col15:char(15),col16:varchar(100),col17:text"
+
def colCount = 1
- def memo = new String[20]
- def r = new Random()
def getDataType
- getDataType = { dataType, level ->
- if (memo[level] != null) {
- return memo[level];
- }
+ getDataType = { i, level ->
StringBuilder res = new StringBuilder();
- def data_r = r.nextInt(3);
+ def data_r = type_arr[cur_depth - level]
if (level == 1) {
if (data_r == 0) {
- res.append(complexDataTypeArr[data_r]+"<"+dataType+">");
+ res.append(complex_datatype_arr[data_r]+"<"+ datatype_arr[i]
+">");
} else if (data_r == 1) {
- res.append(complexDataTypeArr[data_r]+"<"+dataType+","
+dataType+">");
+ if (i == 16) {
+ res.append(complex_datatype_arr[data_r]+"<"+
datatype_arr[i] +"," + datatype_arr[0] +">");
+ } else if (i == 17 || i == 18 || i == 19) {
+ res.append(complex_datatype_arr[data_r]+"<int(11)," +
datatype_arr[i] +">");
+ } else {
+ res.append(complex_datatype_arr[data_r]+"<"+
datatype_arr[i] +"," + datatype_arr[i+1] +">");
+ }
} else if (data_r == 2) {
- res.append(complexDataTypeArr[data_r]+"<col_"+colCount+":" +
dataType +">");
- colCount++;
+ res.append(complex_datatype_arr[data_r]+"<"+ base_struct_scala
+">");
}
} else {
level--;
if (data_r == 0) {
-
res.append(complexDataTypeArr[data_r]+"<"+getDataType(dataType, level)+">");
+ res.append(complex_datatype_arr[data_r]+"<"+getDataType(i,
level)+">");
} else if (data_r == 1) {
-// String str = getDataType(dataType, level);
-
res.append(complexDataTypeArr[data_r]+"<"+getDataType(dataType, level)+","
+getDataType(dataType, level)+">")
+ if (i == 17 || i == 18 || i == 19) {
+ res.append(complex_datatype_arr[data_r]+"<int(11)," +
getDataType(i, level) +">");
+ } else {
+
res.append(complex_datatype_arr[data_r]+"<"+datatype_arr[i]+"," +getDataType(i,
level)+">")
+ }
+
} else if (data_r == 2) {
- res.append(complexDataTypeArr[data_r]+"<col_"+colCount+":" +
getDataType(dataType, level) +">");
+ res.append(complex_datatype_arr[data_r]+"<col_"+colCount+":" +
getDataType(i, level) +">");
colCount++;
}
}
- memo[level] = res.toString()
- return memo[level];
+ return res.toString()
}
- def stmt = "CREATE TABLE IF NOT EXISTS " + tbName + "(\n" +
+ def stmt = "CREATE TABLE IF NOT EXISTS " + tb_name + "(\n" +
"`k1` bigint(11) NULL,\n"
- String strTmp = "`" + colNameArr[0] + "` " + getDataType(dataTypeArr[0],
maxDepth) + " NULL,\n";
- stmt += strTmp
- for (int i = 1; i < dataTypeArr.size(); i++) {
- String changeDataType = strTmp.replaceAll(colNameArr[0], colNameArr[i])
- changeDataType = changeDataType.replaceAll(dataTypeArr[0],
dataTypeArr[i])
- stmt += changeDataType
+
+ for (int i = 0; i < datatype_arr.size(); i++) {
+ String strTmp = "`" + col_name_arr[i] + "` " + getDataType(i,
max_depth) + " NULL,\n";
+ stmt += strTmp
}
+
stmt = stmt.substring(0, stmt.length()-2)
stmt += ") ENGINE=OLAP\n" +
"DUPLICATE KEY(`k1`)\n" +
"COMMENT 'OLAP'\n" +
"DISTRIBUTED BY HASH(`k1`) BUCKETS 10\n" +
"PROPERTIES(\"replication_num\" = \"1\");"
- logger.info(stmt)
return stmt
}
logger.info("Added 'create_table_with_nested_type' function to Suite")
+Suite.metaClass.get_create_table_with_nested_type { int depth, String tb_name
->
+
+ List<List<Integer>> res = new ArrayList<>();
+ List<Integer> track = new ArrayList();
+
+// int depth = 1;
+
+ List<Integer> nums = new ArrayList<>([0, 1, 2]);
+
+ def backtrack
+ backtrack = {
+ if (track.size() == depth) {
+ List<Integer> copied = new ArrayList<>(track);
+ res.add(copied);
+ return;
+ }
+
+ for (int i = 0; i < nums.size(); i++) {
+ track.add(nums.get(i));
+ backtrack();
+ track.remove(track.size() - 1);
+ }
+ }
+
+ backtrack();
+ for (int i = 0; i < res.size; i++) {
+ def date_type_str = ""
+ for (int j = 0; j < res[i].size; j++) {
+ date_type_str += res[i][j] + " "
+ }
+ logger.info(date_type_str)
+ def result = create_table_with_nested_type(depth, res[i], tb_name)
+ logger.info(result)
+ }
+}
+
+logger.info("Added 'get_create_table_with_nested_type' function to Suite")
diff --git
a/regression-test/suites/datatype_p0/nested_types/create_table.groovy
b/regression-test/suites/datatype_p0/nested_types/create_table.groovy
index 8fe270cb33f..94a8d3b09a5 100644
--- a/regression-test/suites/datatype_p0/nested_types/create_table.groovy
+++ b/regression-test/suites/datatype_p0/nested_types/create_table.groovy
@@ -19,6 +19,5 @@ suite("create_table_with_nested_type") {
// register testPlugin function in
${DORIS_HOME}/regression-test/plugins/plugins_create_table_nested_type.groovy
// input the nested depth and table name, the output is table schema
// run "bash run-regression-test.sh --run -s
create_table_with_nested_type" and watch the logger
- def result = create_table_with_nested_type(1, "test")
- logger.info(result)
+ get_create_table_with_nested_type(1, "test")
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]