This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch branch-2.0
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-2.0 by this push:
new 10e991c8bd0 [branch2.0] disable some external hive case for complex
type (#25994)
10e991c8bd0 is described below
commit 10e991c8bd0674d2d1cb467935b618b7904a8f2e
Author: Mingyu Chen <[email protected]>
AuthorDate: Fri Oct 27 16:42:49 2023 +0800
[branch2.0] disable some external hive case for complex type (#25994)
branch 2.0 does not support complex type in Nereids, disable these cases
---
.../orc/test_hive_read_orc_complex_type.groovy | 292 ++++++++++-----------
.../test_hive_read_parquet_comlex_type.groovy | 256 +++++++++---------
2 files changed, 274 insertions(+), 274 deletions(-)
diff --git
a/regression-test/suites/external_table_p0/export/hive_read/orc/test_hive_read_orc_complex_type.groovy
b/regression-test/suites/external_table_p0/export/hive_read/orc/test_hive_read_orc_complex_type.groovy
index 9edac8229a1..9dc12b968f3 100644
---
a/regression-test/suites/external_table_p0/export/hive_read/orc/test_hive_read_orc_complex_type.groovy
+++
b/regression-test/suites/external_table_p0/export/hive_read/orc/test_hive_read_orc_complex_type.groovy
@@ -108,173 +108,173 @@ suite("test_hive_read_orc_complex_type",
"external,hive,external_docker") {
}
// 1. struct NULL type
- try {
+ // try {
+
+ // def doris_field_define = "`s_info` STRUCT<s_id:int(11),
s_name:string, s_address:string> NULL"
+ //
+ // def hive_field_define = "`s_info` STRUCT<s_id:int, s_name:string,
s_address:string>"
+ //
- def doris_field_define = "`s_info` STRUCT<s_id:int(11), s_name:string,
s_address:string> NULL"
-
- def hive_field_define = "`s_info` STRUCT<s_id:int, s_name:string,
s_address:string>"
-
+ // // create table to export data
+ // create_table(export_table_name, doris_field_define)
+
+ // // insert data
+ // sql """ insert into ${export_table_name} values (1, 'doris1', {1,
'sn1', 'sa1'}); """
+ // sql """ insert into ${export_table_name} values (2, 'doris2',
struct(2, 'sn2', 'sa2')); """
+ // sql """ insert into ${export_table_name} values (3, 'doris3',
named_struct('s_id', 3, 's_name', 'sn3', 's_address', 'sa3')); """
+ // sql """ insert into ${export_table_name} values (4, 'doris4',
null); """
+ // sql """ insert into ${export_table_name} values (5, 'doris5',
struct(5, null, 'sa5')); """
+ // sql """ insert into ${export_table_name} values (6, 'doris6',
struct(null, null, null)); """
+ // sql """ insert into ${export_table_name} values (7, null,
struct(null, null, null)); """
+ // sql """ insert into ${export_table_name} values (8, null, null); """
+
+ // // test base data
+ // qt_select_base1 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
+
+ // // test outfile to hdfs
+ // def outfile_url = outfile_to_HDFS()
- // create table to export data
- create_table(export_table_name, doris_field_define)
+ // // create hive table
+ // create_hive_table(hive_table, hive_field_define)
- // insert data
- sql """ insert into ${export_table_name} values (1, 'doris1', {1,
'sn1', 'sa1'}); """
- sql """ insert into ${export_table_name} values (2, 'doris2',
struct(2, 'sn2', 'sa2')); """
- sql """ insert into ${export_table_name} values (3, 'doris3',
named_struct('s_id', 3, 's_name', 'sn3', 's_address', 'sa3')); """
- sql """ insert into ${export_table_name} values (4, 'doris4', null);
"""
- sql """ insert into ${export_table_name} values (5, 'doris5',
struct(5, null, 'sa5')); """
- sql """ insert into ${export_table_name} values (6, 'doris6',
struct(null, null, null)); """
- sql """ insert into ${export_table_name} values (7, null, struct(null,
null, null)); """
- sql """ insert into ${export_table_name} values (8, null, null); """
+ // qt_select_tvf1 """ select * from HDFS(
+ // "uri" = "${outfile_url}0.orc",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "${format}");
+ // """
- // test base data
- qt_select_base1 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
+ // qt_hive_docker_01 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
+
+ // } finally {
+ // }
+
+ // // 2. test Map
+ // try {
+ // def doris_field_define = "`m_info` Map<STRING, LARGEINT> NULL"
+ //
+ // def hive_field_define = "`m_info` Map<STRING, STRING>"
+ //
- // test outfile to hdfs
- def outfile_url = outfile_to_HDFS()
+ // // create table to export data
+ // create_table(export_table_name, doris_field_define)
- // create hive table
- create_hive_table(hive_table, hive_field_define)
+ // // insert data
+ // sql """ insert into ${export_table_name} values (1, 'doris1', {'a':
100, 'b': 111}), (2, 'doris2', {'a': 200, 'b': 222}); """
+ // sql """ insert into ${export_table_name} values (3, 'doris3', {'a':
null, 'b': 333, 'c':399, 'd':399999999999999}); """
+ // sql """ insert into ${export_table_name} values (4, 'doris4',
{null: null, null:null}); """
+ // sql """ insert into ${export_table_name} values (5, 'doris5', {'b':
null}); """
+ // sql """ insert into ${export_table_name} values (6, null, null); """
+ // sql """ insert into ${export_table_name} values (7, 'doris7',
null); """
- qt_select_tvf1 """ select * from HDFS(
- "uri" = "${outfile_url}0.orc",
- "hadoop.username" = "${hdfsUserName}",
- "format" = "${format}");
- """
+ // // test base data
+ // qt_select_base2 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
+
+ // // test outfile to hdfs
+ // def outfile_url = outfile_to_HDFS()
+
+ // // create hive table
+ // create_hive_table(hive_table, hive_field_define)
- qt_hive_docker_01 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
+ // qt_select_tvf2 """ select * from HDFS(
+ // "uri" = "${outfile_url}0.orc",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "${format}");
+ // """
+
+ // qt_hive_docker_02 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
+ //
+ // } finally {
+ // }
+
+ // // 3. test ARRAY
+ // try {
+ // def doris_field_define = "`a_info` ARRAY<int> NULL"
+ //
+ // def hive_field_define = "`a_info` ARRAY<int>"
+
+
+ // // create table to export data
+ // create_table(export_table_name, doris_field_define)
+
+
+ // // insert data
+ // sql """ insert into ${export_table_name} values (1, 'doris1', [9,
99, 999]), (2, 'doris2', [8, 88]); """
+ // sql """ insert into ${export_table_name} values (3, 'doris3', []);
"""
+ // sql """ insert into ${export_table_name} values (4, 'doris4',
null); """
+ // sql """ insert into ${export_table_name} values (5, 'doris5', [1,
null, 2]); """
+ // sql """ insert into ${export_table_name} values (6, 'doris6',
[null, null, null]); """
+ // sql """ insert into ${export_table_name} values (7, 'doris7',
[null, null, null, 1, 2, 999999, 111111]); """
+ // sql """ insert into ${export_table_name} values (8, 'doris8',
null); """
+
+ // // test base data
+ // qt_select_base3 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
+
+ // // test outfile to hdfs
+ // def outfile_url = outfile_to_HDFS()
+
+ // // create hive table
+ // create_hive_table(hive_table, hive_field_define)
- } finally {
- }
-
- // 2. test Map
- try {
- def doris_field_define = "`m_info` Map<STRING, LARGEINT> NULL"
-
- def hive_field_define = "`m_info` Map<STRING, STRING>"
-
-
- // create table to export data
- create_table(export_table_name, doris_field_define)
-
- // insert data
- sql """ insert into ${export_table_name} values (1, 'doris1', {'a':
100, 'b': 111}), (2, 'doris2', {'a': 200, 'b': 222}); """
- sql """ insert into ${export_table_name} values (3, 'doris3', {'a':
null, 'b': 333, 'c':399, 'd':399999999999999}); """
- sql """ insert into ${export_table_name} values (4, 'doris4', {null:
null, null:null}); """
- sql """ insert into ${export_table_name} values (5, 'doris5', {'b':
null}); """
- sql """ insert into ${export_table_name} values (6, null, null); """
- sql """ insert into ${export_table_name} values (7, 'doris7', null);
"""
-
- // test base data
- qt_select_base2 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
-
- // test outfile to hdfs
- def outfile_url = outfile_to_HDFS()
-
- // create hive table
- create_hive_table(hive_table, hive_field_define)
-
- qt_select_tvf2 """ select * from HDFS(
- "uri" = "${outfile_url}0.orc",
- "hadoop.username" = "${hdfsUserName}",
- "format" = "${format}");
- """
-
- qt_hive_docker_02 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
-
- } finally {
- }
-
- // 3. test ARRAY
- try {
- def doris_field_define = "`a_info` ARRAY<int> NULL"
-
- def hive_field_define = "`a_info` ARRAY<int>"
-
-
- // create table to export data
- create_table(export_table_name, doris_field_define)
-
-
- // insert data
- sql """ insert into ${export_table_name} values (1, 'doris1', [9, 99,
999]), (2, 'doris2', [8, 88]); """
- sql """ insert into ${export_table_name} values (3, 'doris3', []); """
- sql """ insert into ${export_table_name} values (4, 'doris4', null);
"""
- sql """ insert into ${export_table_name} values (5, 'doris5', [1,
null, 2]); """
- sql """ insert into ${export_table_name} values (6, 'doris6', [null,
null, null]); """
- sql """ insert into ${export_table_name} values (7, 'doris7', [null,
null, null, 1, 2, 999999, 111111]); """
- sql """ insert into ${export_table_name} values (8, 'doris8', null);
"""
+ // qt_select_tvf3 """ select * from HDFS(
+ // "uri" = "${outfile_url}0.orc",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "${format}");
+ // """
+
+ // qt_hive_docker_03 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
- // test base data
- qt_select_base3 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
+ // } finally {
+ // }
- // test outfile to hdfs
- def outfile_url = outfile_to_HDFS()
+ // // 4. test struct with all type
+ // try {
+ // def doris_field_define = "`s_info` STRUCT<user_id:INT, date:DATE,
datetime:DATETIME, city:VARCHAR(20), age:SMALLINT, sex:TINYINT,
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:LARGEINT,
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL>
NULL"
+ //
+ // def hive_field_define = "`s_info` STRUCT<user_id:INT, `date`:DATE,
`datetime`:TIMESTAMP, city:VARCHAR(20), age:SMALLINT, sex:TINYINT,
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:STRING,
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL>"
- // create hive table
- create_hive_table(hive_table, hive_field_define)
- qt_select_tvf3 """ select * from HDFS(
- "uri" = "${outfile_url}0.orc",
- "hadoop.username" = "${hdfsUserName}",
- "format" = "${format}");
- """
+ // // create table to export data
+ // create_table(export_table_name, doris_field_define)
- qt_hive_docker_03 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
- } finally {
- }
-
- // 4. test struct with all type
- try {
- def doris_field_define = "`s_info` STRUCT<user_id:INT, date:DATE,
datetime:DATETIME, city:VARCHAR(20), age:SMALLINT, sex:TINYINT,
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:LARGEINT,
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL>
NULL"
-
- def hive_field_define = "`s_info` STRUCT<user_id:INT, `date`:DATE,
`datetime`:TIMESTAMP, city:VARCHAR(20), age:SMALLINT, sex:TINYINT,
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:STRING,
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL>"
-
-
- // create table to export data
- create_table(export_table_name, doris_field_define)
+ // // insert data
+ // StringBuilder sb = new StringBuilder()
+ // int i = 1
+ // for (; i < 10; i ++) {
+ // sb.append("""
+ // (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01
00:00:00', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i},
${i}.${i}, 'char${i}_1234', ${i}}),
+ // """)
+ // }
+ // sb.append("""
+ // (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01
00:00:00', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL})
+ // """)
+
+ // sql """ INSERT INTO ${export_table_name} VALUES ${sb.toString()} """
+ // // test base data
+ // qt_select_base4 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
- // insert data
- StringBuilder sb = new StringBuilder()
- int i = 1
- for (; i < 10; i ++) {
- sb.append("""
- (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01
00:00:00', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i},
${i}.${i}, 'char${i}_1234', ${i}}),
- """)
- }
- sb.append("""
- (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01 00:00:00',
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL})
- """)
+ // // test outfile to hdfs
+ // def outfile_url = outfile_to_HDFS()
- sql """ INSERT INTO ${export_table_name} VALUES ${sb.toString()} """
+ // // create hive table
+ // create_hive_table(hive_table, hive_field_define)
- // test base data
- qt_select_base4 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
+ // qt_select_tvf4 """ select * from HDFS(
+ // "uri" = "${outfile_url}0.orc",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "${format}");
+ // """
- // test outfile to hdfs
- def outfile_url = outfile_to_HDFS()
+ // def tvf_res = sql """ select * from HDFS(
+ // "uri" = "${outfile_url}0.orc",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "${format}");
+ // """
- // create hive table
- create_hive_table(hive_table, hive_field_define)
+ // qt_hive_docker_04 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
- qt_select_tvf4 """ select * from HDFS(
- "uri" = "${outfile_url}0.orc",
- "hadoop.username" = "${hdfsUserName}",
- "format" = "${format}");
- """
-
- def tvf_res = sql """ select * from HDFS(
- "uri" = "${outfile_url}0.orc",
- "hadoop.username" = "${hdfsUserName}",
- "format" = "${format}");
- """
-
- qt_hive_docker_04 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
-
- } finally {
- }
+ // } finally {
+ // }
}
diff --git
a/regression-test/suites/external_table_p0/export/hive_read/parquet/test_hive_read_parquet_comlex_type.groovy
b/regression-test/suites/external_table_p0/export/hive_read/parquet/test_hive_read_parquet_comlex_type.groovy
index d6b43a2598f..eecd74fa40c 100644
---
a/regression-test/suites/external_table_p0/export/hive_read/parquet/test_hive_read_parquet_comlex_type.groovy
+++
b/regression-test/suites/external_table_p0/export/hive_read/parquet/test_hive_read_parquet_comlex_type.groovy
@@ -116,167 +116,167 @@ suite("test_hive_read_parquet_complex_type",
"external,hive,external_docker") {
// 1. struct NULL type
- try {
-
- def doris_field_define = "`s_info` STRUCT<s_id:int(11), s_name:string,
s_address:string> NULL"
-
- def hive_field_define = "`s_info` STRUCT<s_id:int, s_name:string,
s_address:string>"
-
-
- // create table to export data
- create_table(export_table_name, doris_field_define)
-
- // insert data
- sql """ insert into ${export_table_name} values (1, 'doris1', {1,
'sn1', 'sa1'}); """
- sql """ insert into ${export_table_name} values (2, 'doris2',
struct(2, 'sn2', 'sa2')); """
- sql """ insert into ${export_table_name} values (3, 'doris3',
named_struct('s_id', 3, 's_name', 'sn3', 's_address', 'sa3')); """
- sql """ insert into ${export_table_name} values (4, 'doris4', null);
"""
- sql """ insert into ${export_table_name} values (5, 'doris5',
struct(5, null, 'sa5')); """
- sql """ insert into ${export_table_name} values (6, 'doris6',
struct(null, null, null)); """
- sql """ insert into ${export_table_name} values (7, null, struct(null,
null, null)); """
- sql """ insert into ${export_table_name} values (8, null, null); """
-
- // test base data
- qt_select_base1 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
-
- // test outfile to hdfs
- def outfile_url = outfile_to_HDFS()
-
- // create hive table
- create_hive_table(hive_table, hive_field_define)
-
- qt_select_tvf1 """ select * from HDFS(
- "uri" = "${outfile_url}0.parquet",
- "hadoop.username" = "${hdfsUserName}",
- "format" = "${format}");
- """
-
- qt_hive_docker_02 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
-
- } finally {
- }
+ // try {
+
+ // def doris_field_define = "`s_info` STRUCT<s_id:int(11),
s_name:string, s_address:string> NULL"
+ //
+ // def hive_field_define = "`s_info` STRUCT<s_id:int, s_name:string,
s_address:string>"
+ //
+
+ // // create table to export data
+ // create_table(export_table_name, doris_field_define)
+
+ // // insert data
+ // sql """ insert into ${export_table_name} values (1, 'doris1', {1,
'sn1', 'sa1'}); """
+ // sql """ insert into ${export_table_name} values (2, 'doris2',
struct(2, 'sn2', 'sa2')); """
+ // sql """ insert into ${export_table_name} values (3, 'doris3',
named_struct('s_id', 3, 's_name', 'sn3', 's_address', 'sa3')); """
+ // sql """ insert into ${export_table_name} values (4, 'doris4',
null); """
+ // sql """ insert into ${export_table_name} values (5, 'doris5',
struct(5, null, 'sa5')); """
+ // sql """ insert into ${export_table_name} values (6, 'doris6',
struct(null, null, null)); """
+ // sql """ insert into ${export_table_name} values (7, null,
struct(null, null, null)); """
+ // sql """ insert into ${export_table_name} values (8, null, null); """
+
+ // // test base data
+ // // qt_select_base1 """ SELECT * FROM ${export_table_name} t ORDER
BY user_id; """
+
+ // // test outfile to hdfs
+ // def outfile_url = outfile_to_HDFS()
+
+ // // create hive table
+ // create_hive_table(hive_table, hive_field_define)
+
+ // qt_select_tvf1 """ select * from HDFS(
+ // "uri" = "${outfile_url}0.parquet",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "${format}");
+ // """
+
+ // qt_hive_docker_02 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
+ //
+ // } finally {
+ // }
// 2. test Map
- try {
- def doris_field_define = "`m_info` Map<STRING, LARGEINT> NULL"
-
- def hive_field_define = "`m_info` Map<STRING, STRING>"
-
+ // try {
+ // def doris_field_define = "`m_info` Map<STRING, LARGEINT> NULL"
+ //
+ // def hive_field_define = "`m_info` Map<STRING, STRING>"
+ //
- // create table to export data
- create_table(export_table_name, doris_field_define)
+ // // create table to export data
+ // create_table(export_table_name, doris_field_define)
- // insert data
- sql """ insert into ${export_table_name} values (1, 'doris1', {'a':
100, 'b': 111}), (2, 'doris2', {'a': 200, 'b': 222}); """
- sql """ insert into ${export_table_name} values (3, 'doris3', {'a':
null, 'b': 333, 'c':399, 'd':399999999999999}); """
- sql """ insert into ${export_table_name} values (4, 'doris4', {}); """
- sql """ insert into ${export_table_name} values (5, 'doris5', {'b':
null}); """
- sql """ insert into ${export_table_name} values (6, null, null); """
- sql """ insert into ${export_table_name} values (7, 'doris7', null);
"""
+ // // insert data
+ // sql """ insert into ${export_table_name} values (1, 'doris1', {'a':
100, 'b': 111}), (2, 'doris2', {'a': 200, 'b': 222}); """
+ // sql """ insert into ${export_table_name} values (3, 'doris3', {'a':
null, 'b': 333, 'c':399, 'd':399999999999999}); """
+ // sql """ insert into ${export_table_name} values (4, 'doris4', {});
"""
+ // sql """ insert into ${export_table_name} values (5, 'doris5', {'b':
null}); """
+ // sql """ insert into ${export_table_name} values (6, null, null); """
+ // sql """ insert into ${export_table_name} values (7, 'doris7',
null); """
- // test base data
- qt_select_base2 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
+ // // test base data
+ // qt_select_base2 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
- // test outfile to hdfs
- def outfile_url = outfile_to_HDFS()
+ // // test outfile to hdfs
+ // def outfile_url = outfile_to_HDFS()
- // create hive table
- create_hive_table(hive_table, hive_field_define)
+ // // create hive table
+ // create_hive_table(hive_table, hive_field_define)
- qt_select_tvf2 """ select * from HDFS(
- "uri" = "${outfile_url}0.parquet",
- "hadoop.username" = "${hdfsUserName}",
- "format" = "${format}");
- """
+ // qt_select_tvf2 """ select * from HDFS(
+ // "uri" = "${outfile_url}0.parquet",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "${format}");
+ // """
- qt_hive_docker_02 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
+ // qt_hive_docker_02 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
- } finally {
- }
+ // } finally {
+ // }
// 3. test ARRAY
- try {
- def doris_field_define = "`a_info` ARRAY<int> NULL"
-
- def hive_field_define = "`a_info` ARRAY<int>"
+ // try {
+ // def doris_field_define = "`a_info` ARRAY<int> NULL"
+ //
+ // def hive_field_define = "`a_info` ARRAY<int>"
- // create table to export data
- create_table(export_table_name, doris_field_define)
+ // // create table to export data
+ // create_table(export_table_name, doris_field_define)
- // insert data
- sql """ insert into ${export_table_name} values (1, 'doris1', [9, 99,
999]), (2, 'doris2', [8, 88]); """
- sql """ insert into ${export_table_name} values (3, 'doris3', []); """
- sql """ insert into ${export_table_name} values (4, 'doris4', null);
"""
- sql """ insert into ${export_table_name} values (5, 'doris5', [1,
null, 2]); """
- sql """ insert into ${export_table_name} values (6, 'doris6', [null,
null, null]); """
- sql """ insert into ${export_table_name} values (7, 'doris7', [null,
null, null, 1, 2, 999999, 111111]); """
- sql """ insert into ${export_table_name} values (8, 'doris8', null);
"""
+ // // insert data
+ // sql """ insert into ${export_table_name} values (1, 'doris1', [9,
99, 999]), (2, 'doris2', [8, 88]); """
+ // sql """ insert into ${export_table_name} values (3, 'doris3', []);
"""
+ // sql """ insert into ${export_table_name} values (4, 'doris4',
null); """
+ // sql """ insert into ${export_table_name} values (5, 'doris5', [1,
null, 2]); """
+ // sql """ insert into ${export_table_name} values (6, 'doris6',
[null, null, null]); """
+ // sql """ insert into ${export_table_name} values (7, 'doris7',
[null, null, null, 1, 2, 999999, 111111]); """
+ // sql """ insert into ${export_table_name} values (8, 'doris8',
null); """
- // test base data
- qt_select_base3 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
+ // // test base data
+ // qt_select_base3 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
- // test outfile to hdfs
- def outfile_url = outfile_to_HDFS()
+ // // test outfile to hdfs
+ // def outfile_url = outfile_to_HDFS()
- // create hive table
- create_hive_table(hive_table, hive_field_define)
+ // // create hive table
+ // create_hive_table(hive_table, hive_field_define)
- qt_select_tvf3 """ select * from HDFS(
- "uri" = "${outfile_url}0.parquet",
- "hadoop.username" = "${hdfsUserName}",
- "format" = "${format}");
- """
+ // qt_select_tvf3 """ select * from HDFS(
+ // "uri" = "${outfile_url}0.parquet",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "${format}");
+ // """
- qt_hive_docker_03 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
+ // qt_hive_docker_03 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
- } finally {
- }
+ // } finally {
+ // }
- // 4. test struct with all type
- try {
- def doris_field_define = "`s_info` STRUCT<user_id:INT, date:DATE,
datetime:DATETIME, city:VARCHAR(20), age:SMALLINT, sex:TINYINT,
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:LARGEINT,
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL>
NULL"
-
- def hive_field_define = "`s_info` STRUCT<user_id:INT, `date`:STRING,
`datetime`:STRING, city:VARCHAR(20), age:SMALLINT, sex:TINYINT,
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:STRING,
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL>"
+ // // 4. test struct with all type
+ // try {
+ // def doris_field_define = "`s_info` STRUCT<user_id:INT, date:DATE,
datetime:DATETIME, city:VARCHAR(20), age:SMALLINT, sex:TINYINT,
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:LARGEINT,
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL>
NULL"
+ //
+ // def hive_field_define = "`s_info` STRUCT<user_id:INT,
`date`:STRING, `datetime`:STRING, city:VARCHAR(20), age:SMALLINT, sex:TINYINT,
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:STRING,
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL>"
- // create table to export data
- create_table(export_table_name, doris_field_define)
+ // // create table to export data
+ // create_table(export_table_name, doris_field_define)
- // insert data
- StringBuilder sb = new StringBuilder()
- int i = 1
- for (; i < 10; i ++) {
- sb.append("""
- (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01
00:00:00', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i},
${i}.${i}, 'char${i}_1234', ${i}}),
- """)
- }
- sb.append("""
- (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01 00:00:00',
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL})
- """)
+ // // insert data
+ // StringBuilder sb = new StringBuilder()
+ // int i = 1
+ // for (; i < 10; i ++) {
+ // sb.append("""
+ // (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01
00:00:00', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i},
${i}.${i}, 'char${i}_1234', ${i}}),
+ // """)
+ // }
+ // sb.append("""
+ // (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01
00:00:00', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL})
+ // """)
- sql """ INSERT INTO ${export_table_name} VALUES ${sb.toString()} """
+ // sql """ INSERT INTO ${export_table_name} VALUES ${sb.toString()} """
- // test base data
- qt_select_base4 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
+ // // test base data
+ // qt_select_base4 """ SELECT * FROM ${export_table_name} t ORDER BY
user_id; """
- // test outfile to hdfs
- def outfile_url = outfile_to_HDFS()
+ // // test outfile to hdfs
+ // def outfile_url = outfile_to_HDFS()
- // create hive table
- create_hive_table(hive_table, hive_field_define)
+ // // create hive table
+ // create_hive_table(hive_table, hive_field_define)
- qt_select_tvf4 """ select * from HDFS(
- "uri" = "${outfile_url}0.parquet",
- "hadoop.username" = "${hdfsUserName}",
- "format" = "${format}");
- """
+ // qt_select_tvf4 """ select * from HDFS(
+ // "uri" = "${outfile_url}0.parquet",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "${format}");
+ // """
- qt_hive_docker_04 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
+ // qt_hive_docker_04 """ SELECT * FROM ${hive_database}.${hive_table}
ORDER BY user_id;"""
- } finally {
- }
+ // } finally {
+ // }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]