This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 32254952337 [regression-test](export) Add some tests that use hive 
external table to read orc/parquet file exported by doris (#25431)
32254952337 is described below

commit 322549523375310bda4650a6fd5f6e8c27e266bc
Author: Tiewei Fang <43782773+bepppo...@users.noreply.github.com>
AuthorDate: Wed Oct 18 09:59:15 2023 +0800

    [regression-test](export) Add some tests that use hive external table to 
read orc/parquet file exported by doris (#25431)
    
    add some regression test:
    
    1. Export Doris data to the orc/parquet file on HDFS with DORIS.
    2. Create external table to read orc/parquet files on hive.
---
 .../export/hive_read/orc/test_hive_read_orc.out    |  49 ++++
 .../orc/test_hive_read_orc_complex_type.out        | 124 +++++++++
 .../hive_read/parquet/test_hive_read_parquet.out   |  49 ++++
 .../parquet/test_hive_read_parquet_comlex_type.out | 124 +++++++++
 .../external_table_p0/hive/test_hive_other.out     |   5 -
 .../export/hive_read/orc/test_hive_read_orc.groovy | 272 ++++++++++++++++++++
 .../orc/test_hive_read_orc_complex_type.groovy     | 280 ++++++++++++++++++++
 .../parquet/test_hive_read_parquet.groovy          | 272 ++++++++++++++++++++
 .../test_hive_read_parquet_comlex_type.groovy      | 282 +++++++++++++++++++++
 .../external_table_p0/hive/test_hive_other.groovy  |  13 +-
 .../test_insert_from_tvf_with_common_user.groovy   |   2 +-
 11 files changed, 1465 insertions(+), 7 deletions(-)

diff --git 
a/regression-test/data/external_table_p0/export/hive_read/orc/test_hive_read_orc.out
 
b/regression-test/data/external_table_p0/export/hive_read/orc/test_hive_read_orc.out
new file mode 100644
index 00000000000..4a6bc2065c7
--- /dev/null
+++ 
b/regression-test/data/external_table_p0/export/hive_read/orc/test_hive_read_orc.out
@@ -0,0 +1,49 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !select_base1 --
+1      doris1  18
+2      doris2  40
+3      \N      \N
+4      doris4  -2147483648
+5      doris5  2147483647
+6      \N      -2147483648
+7      \N      0
+8      nereids \N
+
+-- !select_tvf1 --
+1      doris1  18
+2      doris2  40
+3      \N      \N
+4      doris4  -2147483648
+5      doris5  2147483647
+6      \N      -2147483648
+7      \N      0
+8      nereids \N
+
+-- !hive_docker_01 --
+1      doris1  18
+2      doris2  40
+3      \N      \N
+4      doris4  -2147483648
+5      doris5  2147483647
+6      \N      -2147483648
+7      \N      0
+8      nereids \N
+
+-- !select_base2 --
+1      2023-04-20      2023-04-20      2023-04-20T00:00        
2023-04-20T00:00        2023-04-20T00:00        2023-04-20T00:00        Beijing 
Haidian 1       1       true    1       1       1       1.1     1.1     char1   
1       1       1       0.1     1.00000000      1.0000000000    1       
1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
+2      9999-12-31      9999-12-31      9999-12-31T23:59:59     
9999-12-31T23:59:59     2023-04-20T00:00:00.120 2023-04-20T00:00:00.334400      
        Haidian -32768  -128    true    -2147483648     -9223372036854775808    
-170141183460469231731687303715884105728        1.4E-45 4.9E-324        char2   
100000000       100000000       4       0.1     0.99999999      
9999999999.9999999999   99999999999999999999999999999999999999  
9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
+3      2023-04-21      2023-04-21      2023-04-20T12:34:56     
2023-04-20T00:00        2023-04-20T00:00:00.123 2023-04-20T00:00:00.123456      
Beijing         32767   127     true    2147483647      9223372036854775807     
170141183460469231731687303715884105727 3.4028235e+38   1.7976931348623157E308  
char3   999999999       999999999       9       0.9     9.99999999      
1234567890.0123456789   12345678901234567890123456789012345678  
1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
+4      0000-01-01      0000-01-01      2023-04-20T00:00        
2023-04-20T00:00        2023-04-20T00:00        2023-04-20T00:00        Beijing 
Haidian 4       4       true    4       4       4       4.4     4.4     char4   
4       4       4       0.4     4.00000000      4.0000000000    4       
4.0000000000000000000000000000000000000 0.40000000000000000000000000000000000000
+
+-- !select_tvf2 --
+1      2023-04-20      2023-04-20      2023-04-20 00:00:00     2023-04-20 
00:00:00     2023-04-20 00:00:00.000 2023-04-20 00:00:00.000000      Beijing 
Haidian 1       1       true    1       1       1       1.1     1.1     char1   
1       1       1       0.1     1.00000000      1.0000000000    1       
1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
+2      9999-12-31      9999-12-31      9999-12-31 23:59:59     9999-12-31 
23:59:59     2023-04-20 00:00:00.120 2023-04-20 00:00:00.334400              
Haidian -32768  -128    true    -2147483648     -9223372036854775808    
-170141183460469231731687303715884105728        1.4E-45 4.9E-324        char2   
100000000       100000000       4       0.1     0.99999999      
9999999999.9999999999   99999999999999999999999999999999999999  
9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
+3      2023-04-21      2023-04-21      2023-04-20 12:34:56     2023-04-20 
00:00:00     2023-04-20 00:00:00.123 2023-04-20 00:00:00.123456      Beijing    
     32767   127     true    2147483647      9223372036854775807     
170141183460469231731687303715884105727 3.4028235e+38   1.7976931348623157E308  
char3   999999999       999999999       9       0.9     9.99999999      
1234567890.0123456789   12345678901234567890123456789012345678  
1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
+4      0000-01-01      0000-01-01      2023-04-20 00:00:00     2023-04-20 
00:00:00     2023-04-20 00:00:00.000 2023-04-20 00:00:00.000000      Beijing 
Haidian 4       4       true    4       4       4       4.4     4.4     char4   
4       4       4       0.4     4.00000000      4.0000000000    4       
4.0000000000000000000000000000000000000 0.40000000000000000000000000000000000000
+
+-- !hive_docker_02 --
+1      2023-04-20      2023-04-20      2023-04-20 00:00:00     2023-04-20 
00:00:00     2023-04-20 00:00:00.000 2023-04-20 00:00:00.000000      Beijing 
Haidian 1       1       true    1       1       1       1.1     1.1     char1   
1       1       1       0.1     1.00000000      1.0000000000    1       
1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
+2      9999-12-31      9999-12-31      9999-12-31 23:59:59     9999-12-31 
23:59:59     2023-04-20 00:00:00.120 2023-04-20 00:00:00.334400              
Haidian -32768  -128    true    -2147483648     -9223372036854775808    
-170141183460469231731687303715884105728        1.4E-45 4.9E-324        char2   
100000000       100000000       4       0.1     0.99999999      
9999999999.9999999999   99999999999999999999999999999999999999  
9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
+3      2023-04-21      2023-04-21      2023-04-20 12:34:56     2023-04-20 
00:00:00     2023-04-20 00:00:00.123 2023-04-20 00:00:00.123456      Beijing    
     32767   127     true    2147483647      9223372036854775807     
170141183460469231731687303715884105727 3.4028235E38    1.7976931348623157E308  
char3   999999999       999999999       9       0.9     9.99999999      
1234567890.0123456789   12345678901234567890123456789012345678  
1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
+4      0000-01-01      0000-01-01      2023-04-20 00:00:00     2023-04-20 
00:00:00     2023-04-20 00:00:00.000 2023-04-20 00:00:00.000000      Beijing 
Haidian 4       4       true    4       4       4       4.4     4.4     char4   
4       4       4       0.4     4.00000000      4.0000000000    4       
4.0000000000000000000000000000000000000 0.40000000000000000000000000000000000000
+
diff --git 
a/regression-test/data/external_table_p0/export/hive_read/orc/test_hive_read_orc_complex_type.out
 
b/regression-test/data/external_table_p0/export/hive_read/orc/test_hive_read_orc_complex_type.out
new file mode 100644
index 00000000000..77b38dc4d0b
--- /dev/null
+++ 
b/regression-test/data/external_table_p0/export/hive_read/orc/test_hive_read_orc_complex_type.out
@@ -0,0 +1,124 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !select_base1 --
+1      doris1  {1, "sn1", "sa1"}
+2      doris2  {2, "sn2", "sa2"}
+3      doris3  {3, "sn3", "sa3"}
+4      doris4  \N
+5      doris5  {5, NULL, "sa5"}
+6      doris6  {NULL, NULL, NULL}
+7      \N      {NULL, NULL, NULL}
+8      \N      \N
+
+-- !select_tvf1 --
+1      doris1  {1, "sn1", "sa1"}
+2      doris2  {2, "sn2", "sa2"}
+3      doris3  {3, "sn3", "sa3"}
+4      doris4  \N
+5      doris5  {5, NULL, "sa5"}
+6      doris6  {NULL, NULL, NULL}
+7      \N      {NULL, NULL, NULL}
+8      \N      \N
+
+-- !hive_docker_01 --
+1      doris1  {"s_id":1,"s_name":"sn1","s_address":"sa1"}
+2      doris2  {"s_id":2,"s_name":"sn2","s_address":"sa2"}
+3      doris3  {"s_id":3,"s_name":"sn3","s_address":"sa3"}
+4      doris4  \N
+5      doris5  {"s_id":5,"s_name":null,"s_address":"sa5"}
+6      doris6  {"s_id":null,"s_name":null,"s_address":null}
+7      \N      {"s_id":null,"s_name":null,"s_address":null}
+8      \N      \N
+
+-- !select_base2 --
+1      doris1  {"a":100, "b":111}
+2      doris2  {"a":200, "b":222}
+3      doris3  {"a":NULL, "b":333, "c":399, "d":399999999999999}
+4      doris4  {NULL:NULL, NULL:NULL}
+5      doris5  {"b":NULL}
+6      \N      \N
+7      doris7  \N
+
+-- !select_tvf2 --
+1      doris1  {"a":"100", "b":"111"}
+2      doris2  {"a":"200", "b":"222"}
+3      doris3  {"a":NULL, "b":"333", "c":"399", "d":"399999999999999"}
+4      doris4  {NULL:NULL, NULL:NULL}
+5      doris5  {"b":NULL}
+6      \N      \N
+7      doris7  \N
+
+-- !hive_docker_02 --
+1      doris1  {"a":"100","b":"111"}
+2      doris2  {"a":"200","b":"222"}
+3      doris3  {"a":null,"b":"333","c":"399","d":"399999999999999"}
+4      doris4  {}
+5      doris5  {"b":null}
+6      \N      \N
+7      doris7  \N
+
+-- !select_base3 --
+1      doris1  [9, 99, 999]
+2      doris2  [8, 88]
+3      doris3  []
+4      doris4  \N
+5      doris5  [1, NULL, 2]
+6      doris6  [NULL, NULL, NULL]
+7      doris7  [NULL, NULL, NULL, 1, 2, 999999, 111111]
+8      doris8  \N
+
+-- !select_tvf3 --
+1      doris1  [9, 99, 999]
+2      doris2  [8, 88]
+3      doris3  []
+4      doris4  \N
+5      doris5  [1, NULL, 2]
+6      doris6  [NULL, NULL, NULL]
+7      doris7  [NULL, NULL, NULL, 1, 2, 999999, 111111]
+8      doris8  \N
+
+-- !hive_docker_03 --
+1      doris1  [9,99,999]
+2      doris2  [8,88]
+3      doris3  []
+4      doris4  \N
+5      doris5  [1,null,2]
+6      doris6  [null,null,null]
+7      doris7  [null,null,null,1,2,999999,111111]
+8      doris8  \N
+
+-- !select_base4 --
+1      doris_1 {1, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 1, 1, 1, 1, 1, 
1, 1.1, 1.1, "char1_1234", 1}
+2      doris_2 {2, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 2, 2, 1, 2, 2, 
2, 2.2, 2.2, "char2_1234", 2}
+3      doris_3 {3, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 3, 3, 1, 3, 3, 
3, 3.3, 3.3, "char3_1234", 3}
+4      doris_4 {4, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 4, 4, 1, 4, 4, 
4, 4.4, 4.4, "char4_1234", 4}
+5      doris_5 {5, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 5, 5, 1, 5, 5, 
5, 5.5, 5.5, "char5_1234", 5}
+6      doris_6 {6, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 6, 6, 1, 6, 6, 
6, 6.6, 6.6, "char6_1234", 6}
+7      doris_7 {7, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 7, 7, 1, 7, 7, 
7, 7.7, 7.7, "char7_1234", 7}
+8      doris_8 {8, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 8, 8, 1, 8, 8, 
8, 8.8, 8.8, "char8_1234", 8}
+9      doris_9 {9, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 9, 9, 1, 9, 9, 
9, 9.9, 9.9, "char9_1234", 9}
+10     doris_10        {10, 2017-10-01, 2017-10-01 00:00:00, NULL, NULL, NULL, 
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL}
+
+-- !select_tvf4 --
+1      doris_1 {1, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 1, 1, 1, 1, 
1, "1", 1.1, 1.1, "char1_1234", 1}
+2      doris_2 {2, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 2, 2, 1, 2, 
2, "2", 2.2, 2.2, "char2_1234", 2}
+3      doris_3 {3, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 3, 3, 1, 3, 
3, "3", 3.3, 3.3, "char3_1234", 3}
+4      doris_4 {4, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 4, 4, 1, 4, 
4, "4", 4.4, 4.4, "char4_1234", 4}
+5      doris_5 {5, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 5, 5, 1, 5, 
5, "5", 5.5, 5.5, "char5_1234", 5}
+6      doris_6 {6, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 6, 6, 1, 6, 
6, "6", 6.6, 6.6, "char6_1234", 6}
+7      doris_7 {7, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 7, 7, 1, 7, 
7, "7", 7.7, 7.7, "char7_1234", 7}
+8      doris_8 {8, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 8, 8, 1, 8, 
8, "8", 8.8, 8.8, "char8_1234", 8}
+9      doris_9 {9, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 9, 9, 1, 9, 
9, "9", 9.9, 9.9, "char9_1234", 9}
+10     doris_10        {10, "2017-10-01", "2017-10-01 00:00:00", NULL, NULL, 
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL}
+
+-- !hive_docker_04 --
+1      doris_1 {"user_id":1,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":1,"sex":1,"bool_col":true,"int_col":1,"bigint_col":1,"largeint_col":"1","float_col":1.1,"double_col":1.1,"char_col":"char1_1234","decimal_col":1}
+2      doris_2 {"user_id":2,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":2,"sex":2,"bool_col":true,"int_col":2,"bigint_col":2,"largeint_col":"2","float_col":2.2,"double_col":2.2,"char_col":"char2_1234","decimal_col":2}
+3      doris_3 {"user_id":3,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":3,"sex":3,"bool_col":true,"int_col":3,"bigint_col":3,"largeint_col":"3","float_col":3.3,"double_col":3.3,"char_col":"char3_1234","decimal_col":3}
+4      doris_4 {"user_id":4,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":4,"sex":4,"bool_col":true,"int_col":4,"bigint_col":4,"largeint_col":"4","float_col":4.4,"double_col":4.4,"char_col":"char4_1234","decimal_col":4}
+5      doris_5 {"user_id":5,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":5,"sex":5,"bool_col":true,"int_col":5,"bigint_col":5,"largeint_col":"5","float_col":5.5,"double_col":5.5,"char_col":"char5_1234","decimal_col":5}
+6      doris_6 {"user_id":6,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":6,"sex":6,"bool_col":true,"int_col":6,"bigint_col":6,"largeint_col":"6","float_col":6.6,"double_col":6.6,"char_col":"char6_1234","decimal_col":6}
+7      doris_7 {"user_id":7,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":7,"sex":7,"bool_col":true,"int_col":7,"bigint_col":7,"largeint_col":"7","float_col":7.7,"double_col":7.7,"char_col":"char7_1234","decimal_col":7}
+8      doris_8 {"user_id":8,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":8,"sex":8,"bool_col":true,"int_col":8,"bigint_col":8,"largeint_col":"8","float_col":8.8,"double_col":8.8,"char_col":"char8_1234","decimal_col":8}
+9      doris_9 {"user_id":9,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":9,"sex":9,"bool_col":true,"int_col":9,"bigint_col":9,"largeint_col":"9","float_col":9.9,"double_col":9.9,"char_col":"char9_1234","decimal_col":9}
+10     doris_10        
{"user_id":10,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":null,"age":null,"sex":null,"bool_col":null,"int_col":null,"bigint_col":null,"largeint_col":null,"float_col":null,"double_col":null,"char_col":null,"decimal_col":null}
+
diff --git 
a/regression-test/data/external_table_p0/export/hive_read/parquet/test_hive_read_parquet.out
 
b/regression-test/data/external_table_p0/export/hive_read/parquet/test_hive_read_parquet.out
new file mode 100644
index 00000000000..b2a579e9698
--- /dev/null
+++ 
b/regression-test/data/external_table_p0/export/hive_read/parquet/test_hive_read_parquet.out
@@ -0,0 +1,49 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !select_base1 --
+1      doris1  18
+2      doris2  40
+3      \N      \N
+4      doris4  -2147483648
+5      doris5  2147483647
+6      \N      -2147483648
+7      \N      0
+8      nereids \N
+
+-- !select_tvf1 --
+1      doris1  18
+2      doris2  40
+3      \N      \N
+4      doris4  -2147483648
+5      doris5  2147483647
+6      \N      -2147483648
+7      \N      0
+8      nereids \N
+
+-- !hive_docker_01 --
+1      doris1  18
+2      doris2  40
+3      \N      \N
+4      doris4  -2147483648
+5      doris5  2147483647
+6      \N      -2147483648
+7      \N      0
+8      nereids \N
+
+-- !select_base2 --
+1      2023-04-20      2023-04-20      2023-04-20T00:00        
2023-04-20T00:00        2023-04-20T00:00        2023-04-20T00:00        Beijing 
Haidian 1       1       true    1       1       1       1.1     1.1     char1   
1       1       1       0.1     1.00000000      1.0000000000    1       
1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
+2      9999-12-31      9999-12-31      9999-12-31T23:59:59     
9999-12-31T23:59:59     2023-04-20T00:00:00.120 2023-04-20T00:00:00.334400      
        Haidian -32768  -128    true    -2147483648     -9223372036854775808    
-170141183460469231731687303715884105728        1.4E-45 4.9E-324        char2   
100000000       100000000       4       0.1     0.99999999      
9999999999.9999999999   99999999999999999999999999999999999999  
9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
+3      2023-04-21      2023-04-21      2023-04-20T12:34:56     
2023-04-20T00:00        2023-04-20T00:00:00.123 2023-04-20T00:00:00.123456      
Beijing         32767   127     true    2147483647      9223372036854775807     
170141183460469231731687303715884105727 3.4028235e+38   1.7976931348623157E308  
char3   999999999       999999999       9       0.9     9.99999999      
1234567890.0123456789   12345678901234567890123456789012345678  
1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
+4      0000-01-01      0000-01-01      2023-04-20T00:00        
2023-04-20T00:00        2023-04-20T00:00        2023-04-20T00:00        Beijing 
Haidian 4       4       true    4       4       4       4.4     4.4     char4   
4       4       4       0.4     4.00000000      4.0000000000    4       
4.0000000000000000000000000000000000000 0.40000000000000000000000000000000000000
+
+-- !select_tvf2 --
+1      2023-04-20      2023-04-20      2023-04-20 00:00:00     2023-04-20 
00:00:00     2023-04-20 00:00:00     2023-04-20 00:00:00     Beijing Haidian 1  
     1       true    1       1       1       1.1     1.1     char1   1       1  
     1       0.1     1.00000000      1.0000000000    1       
1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
+2      9999-12-31      9999-12-31      9999-12-31 23:59:59     9999-12-31 
23:59:59     2023-04-20 00:00:00.120000      2023-04-20 00:00:00.334400         
     Haidian -32768  -128    true    -2147483648     -9223372036854775808    
-170141183460469231731687303715884105728        1.4E-45 4.9E-324        char2   
100000000       100000000       4       0.1     0.99999999      
9999999999.9999999999   99999999999999999999999999999999999999  
9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
+3      2023-04-21      2023-04-21      2023-04-20 12:34:56     2023-04-20 
00:00:00     2023-04-20 00:00:00.123000      2023-04-20 00:00:00.123456      
Beijing         32767   127     true    2147483647      9223372036854775807     
170141183460469231731687303715884105727 3.4028235e+38   1.7976931348623157E308  
char3   999999999       999999999       9       0.9     9.99999999      
1234567890.0123456789   12345678901234567890123456789012345678  
1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
+4      0000-01-01      0000-01-01      2023-04-20 00:00:00     2023-04-20 
00:00:00     2023-04-20 00:00:00     2023-04-20 00:00:00     Beijing Haidian 4  
     4       true    4       4       4       4.4     4.4     char4   4       4  
     4       0.4     4.00000000      4.0000000000    4       
4.0000000000000000000000000000000000000 0.40000000000000000000000000000000000000
+
+-- !hive_docker_02 --
+1      2023-04-20      2023-04-20      2023-04-20 00:00:00     2023-04-20 
00:00:00     2023-04-20 00:00:00     2023-04-20 00:00:00     Beijing Haidian 1  
     1       true    1       1       1       1.1     1.1     char1   1       1  
     1       0.1     1.00000000      1.0000000000    1       
1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
+2      9999-12-31      9999-12-31      9999-12-31 23:59:59     9999-12-31 
23:59:59     2023-04-20 00:00:00.120000      2023-04-20 00:00:00.334400         
     Haidian -32768  -128    true    -2147483648     -9223372036854775808    
-170141183460469231731687303715884105728        1.4E-45 4.9E-324        char2   
100000000       100000000       4       0.1     0.99999999      
9999999999.9999999999   99999999999999999999999999999999999999  
9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
+3      2023-04-21      2023-04-21      2023-04-20 12:34:56     2023-04-20 
00:00:00     2023-04-20 00:00:00.123000      2023-04-20 00:00:00.123456      
Beijing         32767   127     true    2147483647      9223372036854775807     
170141183460469231731687303715884105727 3.4028235E38    1.7976931348623157E308  
char3   999999999       999999999       9       0.9     9.99999999      
1234567890.0123456789   12345678901234567890123456789012345678  
1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
+4      0000-01-01      0000-01-01      2023-04-20 00:00:00     2023-04-20 
00:00:00     2023-04-20 00:00:00     2023-04-20 00:00:00     Beijing Haidian 4  
     4       true    4       4       4       4.4     4.4     char4   4       4  
     4       0.4     4.00000000      4.0000000000    4       
4.0000000000000000000000000000000000000 0.40000000000000000000000000000000000000
+
diff --git 
a/regression-test/data/external_table_p0/export/hive_read/parquet/test_hive_read_parquet_comlex_type.out
 
b/regression-test/data/external_table_p0/export/hive_read/parquet/test_hive_read_parquet_comlex_type.out
new file mode 100644
index 00000000000..1820905cb81
--- /dev/null
+++ 
b/regression-test/data/external_table_p0/export/hive_read/parquet/test_hive_read_parquet_comlex_type.out
@@ -0,0 +1,124 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !select_base1 --
+1      doris1  {1, "sn1", "sa1"}
+2      doris2  {2, "sn2", "sa2"}
+3      doris3  {3, "sn3", "sa3"}
+4      doris4  \N
+5      doris5  {5, NULL, "sa5"}
+6      doris6  {NULL, NULL, NULL}
+7      \N      {NULL, NULL, NULL}
+8      \N      \N
+
+-- !select_tvf1 --
+1      doris1  {1, "sn1", "sa1"}
+2      doris2  {2, "sn2", "sa2"}
+3      doris3  {3, "sn3", "sa3"}
+4      doris4  \N
+5      doris5  {5, NULL, "sa5"}
+6      doris6  {NULL, NULL, NULL}
+7      \N      {NULL, NULL, NULL}
+8      \N      \N
+
+-- !hive_docker_02 --
+1      doris1  {"s_id":1,"s_name":"sn1","s_address":"sa1"}
+2      doris2  {"s_id":2,"s_name":"sn2","s_address":"sa2"}
+3      doris3  {"s_id":3,"s_name":"sn3","s_address":"sa3"}
+4      doris4  \N
+5      doris5  {"s_id":5,"s_name":null,"s_address":"sa5"}
+6      doris6  {"s_id":null,"s_name":null,"s_address":null}
+7      \N      {"s_id":null,"s_name":null,"s_address":null}
+8      \N      \N
+
+-- !select_base2 --
+1      doris1  {"a":100, "b":111}
+2      doris2  {"a":200, "b":222}
+3      doris3  {"a":NULL, "b":333, "c":399, "d":399999999999999}
+4      doris4  {}
+5      doris5  {"b":NULL}
+6      \N      \N
+7      doris7  \N
+
+-- !select_tvf2 --
+1      doris1  {"a":"100", "b":"111"}
+2      doris2  {"a":"200", "b":"222"}
+3      doris3  {"a":NULL, "b":"333", "c":"399", "d":"399999999999999"}
+4      doris4  {}
+5      doris5  {"b":NULL}
+6      \N      \N
+7      doris7  \N
+
+-- !hive_docker_02 --
+1      doris1  {"a":"100","b":"111"}
+2      doris2  {"a":"200","b":"222"}
+3      doris3  {"a":null,"b":"333","c":"399","d":"399999999999999"}
+4      doris4  {}
+5      doris5  {"b":null}
+6      \N      \N
+7      doris7  \N
+
+-- !select_base3 --
+1      doris1  [9, 99, 999]
+2      doris2  [8, 88]
+3      doris3  []
+4      doris4  \N
+5      doris5  [1, NULL, 2]
+6      doris6  [NULL, NULL, NULL]
+7      doris7  [NULL, NULL, NULL, 1, 2, 999999, 111111]
+8      doris8  \N
+
+-- !select_tvf3 --
+1      doris1  [9, 99, 999]
+2      doris2  [8, 88]
+3      doris3  []
+4      doris4  \N
+5      doris5  [1, NULL, 2]
+6      doris6  [NULL, NULL, NULL]
+7      doris7  [NULL, NULL, NULL, 1, 2, 999999, 111111]
+8      doris8  \N
+
+-- !hive_docker_03 --
+1      doris1  [9,99,999]
+2      doris2  [8,88]
+3      doris3  []
+4      doris4  \N
+5      doris5  [1,null,2]
+6      doris6  [null,null,null]
+7      doris7  [null,null,null,1,2,999999,111111]
+8      doris8  \N
+
+-- !select_base4 --
+1      doris_1 {1, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 1, 1, 1, 1, 1, 
1, 1.1, 1.1, "char1_1234", 1}
+2      doris_2 {2, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 2, 2, 1, 2, 2, 
2, 2.2, 2.2, "char2_1234", 2}
+3      doris_3 {3, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 3, 3, 1, 3, 3, 
3, 3.3, 3.3, "char3_1234", 3}
+4      doris_4 {4, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 4, 4, 1, 4, 4, 
4, 4.4, 4.4, "char4_1234", 4}
+5      doris_5 {5, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 5, 5, 1, 5, 5, 
5, 5.5, 5.5, "char5_1234", 5}
+6      doris_6 {6, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 6, 6, 1, 6, 6, 
6, 6.6, 6.6, "char6_1234", 6}
+7      doris_7 {7, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 7, 7, 1, 7, 7, 
7, 7.7, 7.7, "char7_1234", 7}
+8      doris_8 {8, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 8, 8, 1, 8, 8, 
8, 8.8, 8.8, "char8_1234", 8}
+9      doris_9 {9, 2017-10-01, 2017-10-01 00:00:00, "Beijing", 9, 9, 1, 9, 9, 
9, 9.9, 9.9, "char9_1234", 9}
+10     doris_10        {10, 2017-10-01, 2017-10-01 00:00:00, NULL, NULL, NULL, 
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL}
+
+-- !select_tvf4 --
+1      doris_1 {1, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 1, 1, 1, 1, 
1, "1", 1.1, 1.1, "char1_1234", 1}
+2      doris_2 {2, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 2, 2, 1, 2, 
2, "2", 2.2, 2.2, "char2_1234", 2}
+3      doris_3 {3, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 3, 3, 1, 3, 
3, "3", 3.3, 3.3, "char3_1234", 3}
+4      doris_4 {4, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 4, 4, 1, 4, 
4, "4", 4.4, 4.4, "char4_1234", 4}
+5      doris_5 {5, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 5, 5, 1, 5, 
5, "5", 5.5, 5.5, "char5_1234", 5}
+6      doris_6 {6, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 6, 6, 1, 6, 
6, "6", 6.6, 6.6, "char6_1234", 6}
+7      doris_7 {7, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 7, 7, 1, 7, 
7, "7", 7.7, 7.7, "char7_1234", 7}
+8      doris_8 {8, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 8, 8, 1, 8, 
8, "8", 8.8, 8.8, "char8_1234", 8}
+9      doris_9 {9, "2017-10-01", "2017-10-01 00:00:00", "Beijing", 9, 9, 1, 9, 
9, "9", 9.9, 9.9, "char9_1234", 9}
+10     doris_10        {10, "2017-10-01", "2017-10-01 00:00:00", NULL, NULL, 
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL}
+
+-- !hive_docker_04 --
+1      doris_1 {"user_id":1,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":1,"sex":1,"bool_col":true,"int_col":1,"bigint_col":1,"largeint_col":"1","float_col":1.1,"double_col":1.1,"char_col":"char1_1234","decimal_col":1}
+2      doris_2 {"user_id":2,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":2,"sex":2,"bool_col":true,"int_col":2,"bigint_col":2,"largeint_col":"2","float_col":2.2,"double_col":2.2,"char_col":"char2_1234","decimal_col":2}
+3      doris_3 {"user_id":3,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":3,"sex":3,"bool_col":true,"int_col":3,"bigint_col":3,"largeint_col":"3","float_col":3.3,"double_col":3.3,"char_col":"char3_1234","decimal_col":3}
+4      doris_4 {"user_id":4,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":4,"sex":4,"bool_col":true,"int_col":4,"bigint_col":4,"largeint_col":"4","float_col":4.4,"double_col":4.4,"char_col":"char4_1234","decimal_col":4}
+5      doris_5 {"user_id":5,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":5,"sex":5,"bool_col":true,"int_col":5,"bigint_col":5,"largeint_col":"5","float_col":5.5,"double_col":5.5,"char_col":"char5_1234","decimal_col":5}
+6      doris_6 {"user_id":6,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":6,"sex":6,"bool_col":true,"int_col":6,"bigint_col":6,"largeint_col":"6","float_col":6.6,"double_col":6.6,"char_col":"char6_1234","decimal_col":6}
+7      doris_7 {"user_id":7,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":7,"sex":7,"bool_col":true,"int_col":7,"bigint_col":7,"largeint_col":"7","float_col":7.7,"double_col":7.7,"char_col":"char7_1234","decimal_col":7}
+8      doris_8 {"user_id":8,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":8,"sex":8,"bool_col":true,"int_col":8,"bigint_col":8,"largeint_col":"8","float_col":8.8,"double_col":8.8,"char_col":"char8_1234","decimal_col":8}
+9      doris_9 {"user_id":9,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":"Beijing","age":9,"sex":9,"bool_col":true,"int_col":9,"bigint_col":9,"largeint_col":"9","float_col":9.9,"double_col":9.9,"char_col":"char9_1234","decimal_col":9}
+10     doris_10        
{"user_id":10,"date":"2017-10-01","datetime":"2017-10-01 
00:00:00","city":null,"age":null,"sex":null,"bool_col":null,"int_col":null,"bigint_col":null,"largeint_col":null,"float_col":null,"double_col":null,"char_col":null,"decimal_col":null}
+
diff --git a/regression-test/data/external_table_p0/hive/test_hive_other.out 
b/regression-test/data/external_table_p0/hive/test_hive_other.out
index 9df0caad17f..42770a0aa9e 100644
--- a/regression-test/data/external_table_p0/hive/test_hive_other.out
+++ b/regression-test/data/external_table_p0/hive/test_hive_other.out
@@ -1,9 +1,4 @@
 -- This file is automatically generated. You should know what you did if you 
want to edit this
--- !ext_catalog_grants --
-default
-tpch1_orc
-tpch1_parquet
-
 -- !q24 --
 zhangsan       1
 lisi   1
diff --git 
a/regression-test/suites/external_table_p0/export/hive_read/orc/test_hive_read_orc.groovy
 
b/regression-test/suites/external_table_p0/export/hive_read/orc/test_hive_read_orc.groovy
new file mode 100644
index 00000000000..cd1b06af7ef
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/export/hive_read/orc/test_hive_read_orc.groovy
@@ -0,0 +1,272 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.codehaus.groovy.runtime.IOGroovyMethods
+
+import java.nio.charset.StandardCharsets
+import java.nio.file.Files
+import java.nio.file.Paths
+
+suite("test_hive_read_orc", "external,hive,external_docker") {
+
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable Hive test.")
+        return;
+    }
+
+
+    // open nereids
+    sql """ set enable_nereids_planner=true """
+    sql """ set enable_fallback_to_original_planner=false """
+
+
+    String hdfs_port = context.config.otherConfigs.get("hdfs_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+    // It's okay to use random `hdfsUser`, but can not be empty.
+    def hdfsUserName = "doris"
+    def format = "orc"
+    def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+    def outfile_path = "/user/doris/tmp_data"
+    def uri = "${defaultFS}" + "${outfile_path}/exp_"
+
+
+    def export_table_name = "outfile_hive_read_orc_test"
+    def hive_database = "test_hive_read_orc"
+    def hive_table = "outfile_hive_read_orc_test"
+
+    def create_table = {table_name, column_define ->
+        sql """ DROP TABLE IF EXISTS ${table_name} """
+        sql """
+        CREATE TABLE IF NOT EXISTS ${table_name} (
+            ${column_define}
+            )
+            DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
+        """
+    }
+
+    def outfile_to_HDFS = {
+        // select ... into outfile ...
+        def uuid = UUID.randomUUID().toString()
+
+        outfile_path = "/user/doris/tmp_data/${uuid}"
+        uri = "${defaultFS}" + "${outfile_path}/exp_"
+
+        def res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "${uri}"
+            FORMAT AS ${format}
+            PROPERTIES (
+                "fs.defaultFS"="${defaultFS}",
+                "hadoop.username" = "${hdfsUserName}"
+            );
+        """
+        logger.info("outfile success path: " + res[0][3]);
+        return res[0][3]
+    }
+
+    def create_hive_table = {table_name, column_define ->
+        def drop_table_str = """ drop table if exists 
${hive_database}.${table_name} """
+        def drop_database_str = """ drop database if exists ${hive_database}"""
+        def create_database_str = """ create database ${hive_database}"""
+        def create_table_str = """ CREATE EXTERNAL TABLE 
${hive_database}.${table_name} (  
+                                        ${column_define}
+                                    )
+                                    stored as ${format}
+                                    LOCATION "${outfile_path}"
+                                """
+
+        logger.info("hive sql: " + drop_table_str)
+        hive_docker """ ${drop_table_str} """
+
+        logger.info("hive sql: " + drop_database_str)
+        hive_docker """ ${drop_database_str} """
+
+        logger.info("hive sql: " + create_database_str)
+        hive_docker """ ${create_database_str}"""
+
+        logger.info("hive sql: " + create_table_str)
+        hive_docker """ ${create_table_str} """
+    }
+
+    // test INT, String type
+    try {
+        def doris_column_define = """
+                                    `user_id` INT NOT NULL COMMENT "用户id",
+                                    `name` STRING NULL,
+                                    `age` INT NULL"""
+
+        def hive_column_define = """
+                                    user_id INT,
+                                    name STRING,
+                                    age INT"""
+
+        // create table
+        create_table(export_table_name, doris_column_define);
+        
+
+        // insert data
+        sql """ insert into ${export_table_name} values (1, 'doris1', 18); """
+        sql """ insert into ${export_table_name} values (2, 'doris2', 40); """
+        sql """ insert into ${export_table_name} values (3, null, null); """
+        sql """ insert into ${export_table_name} values (4, 'doris4', 
${Integer.MIN_VALUE}); """
+        sql """ insert into ${export_table_name} values (5, 'doris5', 
${Integer.MAX_VALUE}); """
+        sql """ insert into ${export_table_name} values (6, null, 
${Integer.MIN_VALUE}); """
+        sql """ insert into ${export_table_name} values (7, null, 0); """
+        sql """ insert into ${export_table_name} values (8, "nereids", null); 
"""
+
+        qt_select_base1 """ SELECT * FROM ${export_table_name} ORDER BY 
user_id; """ 
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+
+        // create hive table
+        create_hive_table(hive_table, hive_column_define)
+
+        qt_select_tvf1 """ select * from HDFS(
+                    "uri" = "${outfile_url}0.orc",
+                    "hadoop.username" = "${hdfsUserName}",
+                    "format" = "${format}");
+                    """
+
+        qt_hive_docker_01 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+
+    } finally {
+    }
+
+    // test all types
+    try {
+        def doris_column_define = """
+                                    `user_id` INT NOT NULL COMMENT "用户id",
+                                    `date` DATE NOT NULL COMMENT "数据灌入日期时间",
+                                    `datev2` DATEV2 NOT NULL COMMENT 
"数据灌入日期时间2",
+                                    `datetime` DATETIME NOT NULL COMMENT 
"数据灌入日期时间",
+                                    `datetimev2_1` DATETIMEV2 NOT NULL COMMENT 
"数据灌入日期时间",
+                                    `datetimev2_2` DATETIMEV2(3) NOT NULL 
COMMENT "数据灌入日期时间",
+                                    `datetimev2_3` DATETIMEV2(6) NOT NULL 
COMMENT "数据灌入日期时间",
+                                    `city` VARCHAR(20) COMMENT "用户所在城市",
+                                    `street` STRING COMMENT "用户所在街道",
+                                    `age` SMALLINT COMMENT "用户年龄",
+                                    `sex` TINYINT COMMENT "用户性别",
+                                    `bool_col` boolean COMMENT "",
+                                    `int_col` int COMMENT "",
+                                    `bigint_col` bigint COMMENT "",
+                                    `largeint_col` largeint COMMENT "",
+                                    `float_col` float COMMENT "",
+                                    `double_col` double COMMENT "",
+                                    `char_col` CHAR(10) COMMENT "",
+                                    `decimal_col` decimal COMMENT "",
+                                    `decimalv3_col` decimalv3 COMMENT "",
+                                    `decimalv3_col2` decimalv3(1,0) COMMENT "",
+                                    `decimalv3_col3` decimalv3(1,1) COMMENT "",
+                                    `decimalv3_col4` decimalv3(9,8) COMMENT "",
+                                    `decimalv3_col5` decimalv3(20,10) COMMENT 
"",
+                                    `decimalv3_col6` decimalv3(38,0) COMMENT 
"",
+                                    `decimalv3_col7` decimalv3(38,37) COMMENT 
"",
+                                    `decimalv3_col8` decimalv3(38,38) COMMENT 
""
+                                """
+
+        def hive_column_define = """
+                                    user_id INT,
+                                    `date` STRING,
+                                    datev2 STRING,
+                                    `datetime` STRING,
+                                    datetimev2_1 STRING,
+                                    datetimev2_2 STRING,
+                                    datetimev2_3 STRING,
+                                    city STRING,
+                                    street STRING,
+                                    age SMALLINT,
+                                    sex TINYINT,
+                                    bool_col boolean,
+                                    int_col INT,
+                                    bigint_col BIGINT,
+                                    largeint_col STRING,
+                                    `float_col` float,
+                                    `double_col` double,
+                                    `char_col` char(5),
+                                    `decimal_col` decimal ,
+                                    `decimalv3_col` decimal ,
+                                    `decimalv3_col2` decimal(1,0) ,
+                                    `decimalv3_col3` decimal(1,1) ,
+                                    `decimalv3_col4` decimal(9,8) ,
+                                    `decimalv3_col5` decimal(20,10) ,
+                                    `decimalv3_col6` decimal(38,0) ,
+                                    `decimalv3_col7` decimal(38,37) ,
+                                    `decimalv3_col8` decimal(38,38)
+                                """
+
+        // create table
+        create_table(export_table_name, doris_column_define);
+        
+
+        StringBuilder sb = new StringBuilder()
+        int i = 1
+        sb.append("""
+                (${i}, '2023-04-20', '2023-04-20', '2023-04-20 00:00:00', 
'2023-04-20 00:00:00', '2023-04-20 00:00:00', '2023-04-20 00:00:00',
+                'Beijing', 'Haidian',
+                ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, 
${i}.${i}, 'char${i}',
+                ${i}, ${i}, ${i}, 0.${i}, ${i}, ${i}, ${i}, ${i}, 0.${i}),
+            """)
+
+        sb.append("""
+            (${++i}, '9999-12-31', '9999-12-31', '9999-12-31 23:59:59', 
'9999-12-31 23:59:59', '2023-04-20 00:00:00.12', '2023-04-20 00:00:00.3344',
+            '', 'Haidian',
+            ${Short.MIN_VALUE}, ${Byte.MIN_VALUE}, true, ${Integer.MIN_VALUE}, 
${Long.MIN_VALUE}, -170141183460469231731687303715884105728, 
${Float.MIN_VALUE}, ${Double.MIN_VALUE}, 'char${i}',
+            100000000, 100000000, 4, 0.1, 0.99999999, 9999999999.9999999999, 
99999999999999999999999999999999999999, 
9.9999999999999999999999999999999999999, 
0.99999999999999999999999999999999999999),
+        """)
+        
+        sb.append("""
+                (${++i}, '2023-04-21', '2023-04-21', '2023-04-20 12:34:56', 
'2023-04-20 00:00:00', '2023-04-20 00:00:00.123', '2023-04-20 00:00:00.123456',
+                'Beijing', '', 
+                ${Short.MAX_VALUE}, ${Byte.MAX_VALUE}, true, 
${Integer.MAX_VALUE}, ${Long.MAX_VALUE}, 
170141183460469231731687303715884105727, ${Float.MAX_VALUE}, 
${Double.MAX_VALUE}, 'char${i}',
+                999999999, 999999999, 9, 0.9, 9.99999999, 
1234567890.0123456789, 12345678901234567890123456789012345678, 
1.2345678901234567890123456789012345678, 
0.12345678901234567890123456789012345678),
+            """)
+
+        sb.append("""
+                (${++i}, '0000-01-01', '0000-01-01', '2023-04-20 00:00:00', 
'2023-04-20 00:00:00', '2023-04-20 00:00:00', '2023-04-20 00:00:00',
+                'Beijing', 'Haidian',
+                ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, 
${i}.${i}, 'char${i}',
+                ${i}, ${i}, ${i}, 0.${i}, ${i}, ${i}, ${i}, ${i}, 0.${i})
+            """)
+
+        
+        sql """ INSERT INTO ${export_table_name} VALUES
+                ${sb.toString()}
+            """
+    
+        def insert_res = sql "show last insert;"
+        logger.info("insert result: " + insert_res.toString())
+        qt_select_base2 """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """    
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+        // create hive table
+        create_hive_table(hive_table, hive_column_define)
+
+        qt_select_tvf2 """ select * from HDFS(
+                    "uri" = "${outfile_url}0.orc",
+                    "hadoop.username" = "${hdfsUserName}",
+                    "format" = "${format}");
+                    """
+
+        qt_hive_docker_02""" SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+
+    } finally {
+    }
+}
diff --git 
a/regression-test/suites/external_table_p0/export/hive_read/orc/test_hive_read_orc_complex_type.groovy
 
b/regression-test/suites/external_table_p0/export/hive_read/orc/test_hive_read_orc_complex_type.groovy
new file mode 100644
index 00000000000..9edac8229a1
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/export/hive_read/orc/test_hive_read_orc_complex_type.groovy
@@ -0,0 +1,280 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.codehaus.groovy.runtime.IOGroovyMethods
+
+import java.nio.charset.StandardCharsets
+import java.nio.file.Files
+import java.nio.file.Paths
+
+suite("test_hive_read_orc_complex_type", "external,hive,external_docker") {
+
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable Hive test.")
+        return;
+    }
+
+    // open nereids
+    sql """ set enable_nereids_planner=true """
+    sql """ set enable_fallback_to_original_planner=false """
+
+
+    String hdfs_port = context.config.otherConfigs.get("hdfs_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+    // It's okay to use random `hdfsUser`, but can not be empty.
+    def hdfsUserName = "doris"
+    def format = "orc"
+    def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+    def outfile_path = "/user/doris/tmp_data"
+    def uri = "${defaultFS}" + "${outfile_path}/exp_"
+
+
+    def export_table_name = "outfile_hive_read_orc_complex_type_test"
+    def hive_database = "test_hive_read_orc_complex_type"
+    def hive_table = "outfile_hive_read_orc_complex_type_test"
+
+    def create_table = {table_name, column_define ->
+        sql """ DROP TABLE IF EXISTS ${table_name} """
+        sql """
+        CREATE TABLE IF NOT EXISTS ${table_name} (
+            `user_id` INT NOT NULL COMMENT "用户id",
+            `name` STRING COMMENT "用户年龄",
+            ${column_define}
+            )
+            DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
+        """
+    }
+
+    def create_hive_table = {table_name, column_define ->
+        def drop_table_str = """ drop table if exists 
${hive_database}.${table_name} """
+        def drop_database_str = """ drop database if exists ${hive_database}"""
+        def create_database_str = """ create database ${hive_database}"""
+        def create_table_str = """ CREATE EXTERNAL TABLE 
${hive_database}.${table_name} (  
+                                        user_id INT,
+                                        name STRING,
+                                        ${column_define}
+                                    )
+                                    stored as ${format}
+                                    LOCATION "${outfile_path}"
+                                """
+
+        logger.info("hive sql: " + drop_table_str)
+        hive_docker """ ${drop_table_str} """
+
+        logger.info("hive sql: " + drop_database_str)
+        hive_docker """ ${drop_database_str} """
+
+        logger.info("hive sql: " + create_database_str)
+        hive_docker """ ${create_database_str}"""
+
+        logger.info("hive sql: " + create_table_str)
+        hive_docker """ ${create_table_str} """
+    }
+
+    def outfile_to_HDFS = {
+        // select ... into outfile ...
+        def uuid = UUID.randomUUID().toString()
+
+        outfile_path = "/user/doris/tmp_data/${uuid}"
+        uri = "${defaultFS}" + "${outfile_path}/exp_"
+
+        def res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "${uri}"
+            FORMAT AS ${format}
+            PROPERTIES (
+                "fs.defaultFS"="${defaultFS}",
+                "hadoop.username" = "${hdfsUserName}"
+            );
+        """
+        logger.info("outfile success path: " + res[0][3]);
+        return res[0][3]
+    }
+
+    // 1. struct NULL type
+    try {
+
+        def doris_field_define = "`s_info` STRUCT<s_id:int(11), s_name:string, 
s_address:string> NULL"
+        
+        def hive_field_define = "`s_info` STRUCT<s_id:int, s_name:string, 
s_address:string>"
+        
+
+        // create table to export data
+        create_table(export_table_name, doris_field_define)
+
+        // insert data
+        sql """ insert into ${export_table_name} values (1, 'doris1', {1, 
'sn1', 'sa1'}); """
+        sql """ insert into ${export_table_name} values (2, 'doris2', 
struct(2, 'sn2', 'sa2')); """
+        sql """ insert into ${export_table_name} values (3, 'doris3', 
named_struct('s_id', 3, 's_name', 'sn3', 's_address', 'sa3')); """
+        sql """ insert into ${export_table_name} values (4, 'doris4', null); 
"""
+        sql """ insert into ${export_table_name} values (5, 'doris5', 
struct(5, null, 'sa5')); """
+        sql """ insert into ${export_table_name} values (6, 'doris6', 
struct(null, null, null)); """
+        sql """ insert into ${export_table_name} values (7, null, struct(null, 
null, null)); """
+        sql """ insert into ${export_table_name} values (8, null, null); """
+
+        // test base data
+        qt_select_base1 """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+
+        // create hive table
+        create_hive_table(hive_table, hive_field_define)
+
+        qt_select_tvf1 """ select * from HDFS(
+                        "uri" = "${outfile_url}0.orc",
+                        "hadoop.username" = "${hdfsUserName}",
+                        "format" = "${format}");
+                        """
+
+        qt_hive_docker_01 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+
+    } finally {
+    }
+
+    // 2. test Map
+    try {
+        def doris_field_define = "`m_info` Map<STRING, LARGEINT> NULL"
+        
+        def hive_field_define = "`m_info` Map<STRING, STRING>"
+        
+
+        // create table to export data
+        create_table(export_table_name, doris_field_define)
+
+        // insert data
+        sql """ insert into ${export_table_name} values (1, 'doris1', {'a': 
100, 'b': 111}), (2, 'doris2', {'a': 200, 'b': 222}); """
+        sql """ insert into ${export_table_name} values (3, 'doris3', {'a': 
null, 'b': 333, 'c':399, 'd':399999999999999}); """
+        sql """ insert into ${export_table_name} values (4, 'doris4', {null: 
null, null:null}); """
+        sql """ insert into ${export_table_name} values (5, 'doris5', {'b': 
null}); """
+        sql """ insert into ${export_table_name} values (6, null, null); """
+        sql """ insert into ${export_table_name} values (7, 'doris7', null); 
"""
+
+        // test base data
+        qt_select_base2 """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+
+        // create hive table
+        create_hive_table(hive_table, hive_field_define)
+
+        qt_select_tvf2 """ select * from HDFS(
+                        "uri" = "${outfile_url}0.orc",
+                        "hadoop.username" = "${hdfsUserName}",
+                        "format" = "${format}");
+                        """
+
+        qt_hive_docker_02 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+            
+    } finally {
+    }
+
+    // 3. test ARRAY
+    try {
+        def doris_field_define = "`a_info` ARRAY<int> NULL"
+        
+        def hive_field_define = "`a_info` ARRAY<int>"
+
+
+        // create table to export data
+        create_table(export_table_name, doris_field_define)
+
+
+        // insert data
+        sql """ insert into ${export_table_name} values (1, 'doris1', [9, 99, 
999]), (2, 'doris2', [8, 88]); """
+        sql """ insert into ${export_table_name} values (3, 'doris3', []); """
+        sql """ insert into ${export_table_name} values (4, 'doris4', null); 
"""
+        sql """ insert into ${export_table_name} values (5, 'doris5', [1, 
null, 2]); """
+        sql """ insert into ${export_table_name} values (6, 'doris6', [null, 
null, null]); """
+        sql """ insert into ${export_table_name} values (7, 'doris7', [null, 
null, null, 1, 2, 999999, 111111]); """
+        sql """ insert into ${export_table_name} values (8, 'doris8', null); 
"""
+
+        // test base data
+        qt_select_base3 """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+
+        // create hive table
+        create_hive_table(hive_table, hive_field_define)
+
+        qt_select_tvf3 """ select * from HDFS(
+                        "uri" = "${outfile_url}0.orc",
+                        "hadoop.username" = "${hdfsUserName}",
+                        "format" = "${format}");
+                        """
+
+        qt_hive_docker_03 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+
+    } finally {
+    }
+
+    // 4. test struct with all type
+    try {
+        def doris_field_define = "`s_info` STRUCT<user_id:INT, date:DATE, 
datetime:DATETIME, city:VARCHAR(20), age:SMALLINT, sex:TINYINT, 
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:LARGEINT, 
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL> 
NULL"
+        
+        def hive_field_define = "`s_info` STRUCT<user_id:INT, `date`:DATE, 
`datetime`:TIMESTAMP, city:VARCHAR(20), age:SMALLINT, sex:TINYINT, 
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:STRING, 
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL>"
+
+
+        // create table to export data
+        create_table(export_table_name, doris_field_define)
+
+
+        // insert data
+        StringBuilder sb = new StringBuilder()
+        int i = 1
+        for (; i < 10; i ++) {
+            sb.append("""
+                (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01 
00:00:00', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, 
${i}.${i}, 'char${i}_1234', ${i}}),
+            """)
+        }
+        sb.append("""
+            (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01 00:00:00', 
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL})
+        """)
+
+        sql """ INSERT INTO ${export_table_name} VALUES ${sb.toString()} """
+
+        // test base data
+        qt_select_base4 """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+
+        // create hive table
+        create_hive_table(hive_table, hive_field_define)
+
+        qt_select_tvf4 """ select * from HDFS(
+                        "uri" = "${outfile_url}0.orc",
+                        "hadoop.username" = "${hdfsUserName}",
+                        "format" = "${format}");
+                        """
+
+        def tvf_res = sql """ select * from HDFS(
+                        "uri" = "${outfile_url}0.orc",
+                        "hadoop.username" = "${hdfsUserName}",
+                        "format" = "${format}");
+                        """
+
+        qt_hive_docker_04 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+
+    } finally {
+    }
+
+}
diff --git 
a/regression-test/suites/external_table_p0/export/hive_read/parquet/test_hive_read_parquet.groovy
 
b/regression-test/suites/external_table_p0/export/hive_read/parquet/test_hive_read_parquet.groovy
new file mode 100644
index 00000000000..13a7b8e960e
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/export/hive_read/parquet/test_hive_read_parquet.groovy
@@ -0,0 +1,272 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.codehaus.groovy.runtime.IOGroovyMethods
+
+import java.nio.charset.StandardCharsets
+import java.nio.file.Files
+import java.nio.file.Paths
+
+suite("test_hive_read_parquet", "external,hive,external_docker") {
+
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable Hive test.")
+        return;
+    }
+
+
+    // open nereids
+    sql """ set enable_nereids_planner=true """
+    sql """ set enable_fallback_to_original_planner=false """
+
+
+    String hdfs_port = context.config.otherConfigs.get("hdfs_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+    // It's okay to use random `hdfsUser`, but can not be empty.
+    def hdfsUserName = "doris"
+    def format = "parquet"
+    def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+    def outfile_path = "/user/doris/tmp_data"
+    def uri = "${defaultFS}" + "${outfile_path}/exp_"
+
+
+    def export_table_name = "outfile_hive_read_parquet_test"
+    def hive_database = "test_hive_read_parquet"
+    def hive_table = "outfile_hive_read_parquet_test"
+
+    def create_table = {table_name, column_define ->
+        sql """ DROP TABLE IF EXISTS ${table_name} """
+        sql """
+        CREATE TABLE IF NOT EXISTS ${table_name} (
+            ${column_define}
+            )
+            DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
+        """
+    }
+
+    def outfile_to_HDFS = {
+        // select ... into outfile ...
+        def uuid = UUID.randomUUID().toString()
+
+        outfile_path = "/user/doris/tmp_data/${uuid}"
+        uri = "${defaultFS}" + "${outfile_path}/exp_"
+
+        def res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "${uri}"
+            FORMAT AS ${format}
+            PROPERTIES (
+                "fs.defaultFS"="${defaultFS}",
+                "hadoop.username" = "${hdfsUserName}"
+            );
+        """
+        logger.info("outfile success path: " + res[0][3]);
+        return res[0][3]
+    }
+
+    def create_hive_table = {table_name, column_define ->
+        def drop_table_str = """ drop table if exists 
${hive_database}.${table_name} """
+        def drop_database_str = """ drop database if exists ${hive_database}"""
+        def create_database_str = """ create database ${hive_database}"""
+        def create_table_str = """ CREATE EXTERNAL TABLE 
${hive_database}.${table_name} (  
+                                        ${column_define}
+                                    )
+                                    stored as ${format}
+                                    LOCATION "${outfile_path}"
+                                """
+
+        logger.info("hive sql: " + drop_table_str)
+        hive_docker """ ${drop_table_str} """
+
+        logger.info("hive sql: " + drop_database_str)
+        hive_docker """ ${drop_database_str} """
+
+        logger.info("hive sql: " + create_database_str)
+        hive_docker """ ${create_database_str}"""
+
+        logger.info("hive sql: " + create_table_str)
+        hive_docker """ ${create_table_str} """
+    }
+
+    // test INT, String type
+    try {
+        def doris_column_define = """
+                                    `user_id` INT NOT NULL COMMENT "用户id",
+                                    `name` STRING NULL,
+                                    `age` INT NULL"""
+
+        def hive_column_define = """
+                                    user_id INT,
+                                    name STRING,
+                                    age INT"""
+
+        // create table
+        create_table(export_table_name, doris_column_define);
+        
+
+        // insert data
+        sql """ insert into ${export_table_name} values (1, 'doris1', 18); """
+        sql """ insert into ${export_table_name} values (2, 'doris2', 40); """
+        sql """ insert into ${export_table_name} values (3, null, null); """
+        sql """ insert into ${export_table_name} values (4, 'doris4', 
${Integer.MIN_VALUE}); """
+        sql """ insert into ${export_table_name} values (5, 'doris5', 
${Integer.MAX_VALUE}); """
+        sql """ insert into ${export_table_name} values (6, null, 
${Integer.MIN_VALUE}); """
+        sql """ insert into ${export_table_name} values (7, null, 0); """
+        sql """ insert into ${export_table_name} values (8, "nereids", null); 
"""
+
+        qt_select_base1 """ SELECT * FROM ${export_table_name} ORDER BY 
user_id; """ 
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+
+        // create hive table
+        create_hive_table(hive_table, hive_column_define)
+
+        qt_select_tvf1 """ select * from HDFS(
+                    "uri" = "${outfile_url}0.parquet",
+                    "hadoop.username" = "${hdfsUserName}",
+                    "format" = "${format}");
+                    """
+
+        qt_hive_docker_01 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+
+    } finally {
+    }
+
+    // test all types
+    try {
+        def doris_column_define = """
+                                    `user_id` INT NOT NULL COMMENT "用户id",
+                                    `date` DATE NOT NULL COMMENT "数据灌入日期时间",
+                                    `datev2` DATEV2 NOT NULL COMMENT 
"数据灌入日期时间2",
+                                    `datetime` DATETIME NOT NULL COMMENT 
"数据灌入日期时间",
+                                    `datetimev2_1` DATETIMEV2 NOT NULL COMMENT 
"数据灌入日期时间",
+                                    `datetimev2_2` DATETIMEV2(3) NOT NULL 
COMMENT "数据灌入日期时间",
+                                    `datetimev2_3` DATETIMEV2(6) NOT NULL 
COMMENT "数据灌入日期时间",
+                                    `city` VARCHAR(20) COMMENT "用户所在城市",
+                                    `street` STRING COMMENT "用户所在街道",
+                                    `age` SMALLINT COMMENT "用户年龄",
+                                    `sex` TINYINT COMMENT "用户性别",
+                                    `bool_col` boolean COMMENT "",
+                                    `int_col` int COMMENT "",
+                                    `bigint_col` bigint COMMENT "",
+                                    `largeint_col` largeint COMMENT "",
+                                    `float_col` float COMMENT "",
+                                    `double_col` double COMMENT "",
+                                    `char_col` CHAR(10) COMMENT "",
+                                    `decimal_col` decimal COMMENT "",
+                                    `decimalv3_col` decimalv3 COMMENT "",
+                                    `decimalv3_col2` decimalv3(1,0) COMMENT "",
+                                    `decimalv3_col3` decimalv3(1,1) COMMENT "",
+                                    `decimalv3_col4` decimalv3(9,8) COMMENT "",
+                                    `decimalv3_col5` decimalv3(20,10) COMMENT 
"",
+                                    `decimalv3_col6` decimalv3(38,0) COMMENT 
"",
+                                    `decimalv3_col7` decimalv3(38,37) COMMENT 
"",
+                                    `decimalv3_col8` decimalv3(38,38) COMMENT 
""
+                                """
+
+        def hive_column_define = """
+                                    user_id INT,
+                                    `date` STRING,
+                                    datev2 STRING,
+                                    `datetime` STRING,
+                                    datetimev2_1 STRING,
+                                    datetimev2_2 STRING,
+                                    datetimev2_3 STRING,
+                                    city STRING,
+                                    street STRING,
+                                    age SMALLINT,
+                                    sex TINYINT,
+                                    bool_col boolean,
+                                    int_col INT,
+                                    bigint_col BIGINT,
+                                    largeint_col STRING,
+                                    `float_col` float,
+                                    `double_col` double,
+                                    `char_col` char(5),
+                                    `decimal_col` decimal ,
+                                    `decimalv3_col` decimal ,
+                                    `decimalv3_col2` decimal(1,0) ,
+                                    `decimalv3_col3` decimal(1,1) ,
+                                    `decimalv3_col4` decimal(9,8) ,
+                                    `decimalv3_col5` decimal(20,10) ,
+                                    `decimalv3_col6` decimal(38,0) ,
+                                    `decimalv3_col7` decimal(38,37) ,
+                                    `decimalv3_col8` decimal(38,38)
+                                """
+
+        // create table
+        create_table(export_table_name, doris_column_define);
+        
+
+        StringBuilder sb = new StringBuilder()
+        int i = 1
+        sb.append("""
+                (${i}, '2023-04-20', '2023-04-20', '2023-04-20 00:00:00', 
'2023-04-20 00:00:00', '2023-04-20 00:00:00', '2023-04-20 00:00:00',
+                'Beijing', 'Haidian',
+                ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, 
${i}.${i}, 'char${i}',
+                ${i}, ${i}, ${i}, 0.${i}, ${i}, ${i}, ${i}, ${i}, 0.${i}),
+            """)
+
+        sb.append("""
+            (${++i}, '9999-12-31', '9999-12-31', '9999-12-31 23:59:59', 
'9999-12-31 23:59:59', '2023-04-20 00:00:00.12', '2023-04-20 00:00:00.3344',
+            '', 'Haidian',
+            ${Short.MIN_VALUE}, ${Byte.MIN_VALUE}, true, ${Integer.MIN_VALUE}, 
${Long.MIN_VALUE}, -170141183460469231731687303715884105728, 
${Float.MIN_VALUE}, ${Double.MIN_VALUE}, 'char${i}',
+            100000000, 100000000, 4, 0.1, 0.99999999, 9999999999.9999999999, 
99999999999999999999999999999999999999, 
9.9999999999999999999999999999999999999, 
0.99999999999999999999999999999999999999),
+        """)
+        
+        sb.append("""
+                (${++i}, '2023-04-21', '2023-04-21', '2023-04-20 12:34:56', 
'2023-04-20 00:00:00', '2023-04-20 00:00:00.123', '2023-04-20 00:00:00.123456',
+                'Beijing', '', 
+                ${Short.MAX_VALUE}, ${Byte.MAX_VALUE}, true, 
${Integer.MAX_VALUE}, ${Long.MAX_VALUE}, 
170141183460469231731687303715884105727, ${Float.MAX_VALUE}, 
${Double.MAX_VALUE}, 'char${i}',
+                999999999, 999999999, 9, 0.9, 9.99999999, 
1234567890.0123456789, 12345678901234567890123456789012345678, 
1.2345678901234567890123456789012345678, 
0.12345678901234567890123456789012345678),
+            """)
+
+        sb.append("""
+                (${++i}, '0000-01-01', '0000-01-01', '2023-04-20 00:00:00', 
'2023-04-20 00:00:00', '2023-04-20 00:00:00', '2023-04-20 00:00:00',
+                'Beijing', 'Haidian',
+                ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, 
${i}.${i}, 'char${i}',
+                ${i}, ${i}, ${i}, 0.${i}, ${i}, ${i}, ${i}, ${i}, 0.${i})
+            """)
+
+        
+        sql """ INSERT INTO ${export_table_name} VALUES
+                ${sb.toString()}
+            """
+    
+        def insert_res = sql "show last insert;"
+        logger.info("insert result: " + insert_res.toString())
+        qt_select_base2 """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """    
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+        // create hive table
+        create_hive_table(hive_table, hive_column_define)
+
+        qt_select_tvf2 """ select * from HDFS(
+                    "uri" = "${outfile_url}0.parquet",
+                    "hadoop.username" = "${hdfsUserName}",
+                    "format" = "${format}");
+                    """
+
+        qt_hive_docker_02 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+        
+    } finally {
+    }
+}
diff --git 
a/regression-test/suites/external_table_p0/export/hive_read/parquet/test_hive_read_parquet_comlex_type.groovy
 
b/regression-test/suites/external_table_p0/export/hive_read/parquet/test_hive_read_parquet_comlex_type.groovy
new file mode 100644
index 00000000000..d6b43a2598f
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/export/hive_read/parquet/test_hive_read_parquet_comlex_type.groovy
@@ -0,0 +1,282 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.codehaus.groovy.runtime.IOGroovyMethods
+
+import java.nio.charset.StandardCharsets
+import java.nio.file.Files
+import java.nio.file.Paths
+
+suite("test_hive_read_parquet_complex_type", "external,hive,external_docker") {
+
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable Hive test.")
+        return;
+    }
+
+    // open nereids
+    sql """ set enable_nereids_planner=true """
+    sql """ set enable_fallback_to_original_planner=false """
+
+
+    String hdfs_port = context.config.otherConfigs.get("hdfs_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+    // It's okay to use random `hdfsUser`, but can not be empty.
+    def hdfsUserName = "doris"
+    def format = "parquet"
+    def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+    def outfile_path = "/user/doris/tmp_data"
+    def uri = "${defaultFS}" + "${outfile_path}/exp_"
+
+
+    def export_table_name = "outfile_hive_read_parquet_complex_type_test"
+    def hive_database = "test_hive_read_parquet_complex_type"
+    def hive_table = "outfile_hive_read_parquet_complex_type_test"
+
+    def create_table = {table_name, column_define ->
+        sql """ DROP TABLE IF EXISTS ${table_name} """
+        sql """
+        CREATE TABLE IF NOT EXISTS ${table_name} (
+            `user_id` INT NOT NULL COMMENT "用户id",
+            `name` STRING COMMENT "用户年龄",
+            ${column_define}
+            )
+            DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
+        """
+    }
+
+    def create_hive_table = {table_name, column_define ->
+        def drop_table_str = """ drop table if exists 
${hive_database}.${table_name} """
+        def drop_database_str = """ drop database if exists ${hive_database}"""
+        def create_database_str = """ create database ${hive_database}"""
+        def create_table_str = """ CREATE EXTERNAL TABLE 
${hive_database}.${table_name} (  
+                                        user_id INT,
+                                        name STRING,
+                                        ${column_define}
+                                    )
+                                    stored as ${format}
+                                    LOCATION "${outfile_path}"
+                                """
+
+        logger.info("hive sql: " + drop_table_str)
+        hive_docker """ ${drop_table_str} """
+
+        logger.info("hive sql: " + drop_database_str)
+        hive_docker """ ${drop_database_str} """
+
+        logger.info("hive sql: " + create_database_str)
+        hive_docker """ ${create_database_str}"""
+
+        logger.info("hive sql: " + create_table_str)
+        hive_docker """ ${create_table_str} """
+    }
+
+    def outfile_to_HDFS = {
+        // select ... into outfile ...
+        def uuid = UUID.randomUUID().toString()
+
+        outfile_path = "/user/doris/tmp_data/${uuid}"
+        uri = "${defaultFS}" + "${outfile_path}/exp_"
+
+        def res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "${uri}"
+            FORMAT AS ${format}
+            PROPERTIES (
+                "fs.defaultFS"="${defaultFS}",
+                "hadoop.username" = "${hdfsUserName}"
+            );
+        """
+        logger.info("outfile success path: " + res[0][3]);
+        return res[0][3]
+    }
+
+    // because for hive, `null` is null, and there is no space between two 
elements
+    def handle_doris_space_and_NULL = {res -> 
+        res = res.replaceAll(", ", ",");
+        res = res.replaceAll("NULL", "null");
+        return res
+    }
+
+
+    // 1. struct NULL type
+    try {
+
+        def doris_field_define = "`s_info` STRUCT<s_id:int(11), s_name:string, 
s_address:string> NULL"
+        
+        def hive_field_define = "`s_info` STRUCT<s_id:int, s_name:string, 
s_address:string>"
+        
+
+        // create table to export data
+        create_table(export_table_name, doris_field_define)
+
+        // insert data
+        sql """ insert into ${export_table_name} values (1, 'doris1', {1, 
'sn1', 'sa1'}); """
+        sql """ insert into ${export_table_name} values (2, 'doris2', 
struct(2, 'sn2', 'sa2')); """
+        sql """ insert into ${export_table_name} values (3, 'doris3', 
named_struct('s_id', 3, 's_name', 'sn3', 's_address', 'sa3')); """
+        sql """ insert into ${export_table_name} values (4, 'doris4', null); 
"""
+        sql """ insert into ${export_table_name} values (5, 'doris5', 
struct(5, null, 'sa5')); """
+        sql """ insert into ${export_table_name} values (6, 'doris6', 
struct(null, null, null)); """
+        sql """ insert into ${export_table_name} values (7, null, struct(null, 
null, null)); """
+        sql """ insert into ${export_table_name} values (8, null, null); """
+
+        // test base data
+        qt_select_base1 """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+
+        // create hive table
+        create_hive_table(hive_table, hive_field_define)
+
+        qt_select_tvf1 """ select * from HDFS(
+                        "uri" = "${outfile_url}0.parquet",
+                        "hadoop.username" = "${hdfsUserName}",
+                        "format" = "${format}");
+                        """
+
+        qt_hive_docker_02 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+            
+    } finally {
+    }
+
+    // 2. test Map
+    try {
+        def doris_field_define = "`m_info` Map<STRING, LARGEINT> NULL"
+        
+        def hive_field_define = "`m_info` Map<STRING, STRING>"
+        
+
+        // create table to export data
+        create_table(export_table_name, doris_field_define)
+
+        // insert data
+        sql """ insert into ${export_table_name} values (1, 'doris1', {'a': 
100, 'b': 111}), (2, 'doris2', {'a': 200, 'b': 222}); """
+        sql """ insert into ${export_table_name} values (3, 'doris3', {'a': 
null, 'b': 333, 'c':399, 'd':399999999999999}); """
+        sql """ insert into ${export_table_name} values (4, 'doris4', {}); """
+        sql """ insert into ${export_table_name} values (5, 'doris5', {'b': 
null}); """
+        sql """ insert into ${export_table_name} values (6, null, null); """
+        sql """ insert into ${export_table_name} values (7, 'doris7', null); 
"""
+
+        // test base data
+        qt_select_base2 """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+
+        // create hive table
+        create_hive_table(hive_table, hive_field_define)
+
+        qt_select_tvf2 """ select * from HDFS(
+                        "uri" = "${outfile_url}0.parquet",
+                        "hadoop.username" = "${hdfsUserName}",
+                        "format" = "${format}");
+                        """
+
+        qt_hive_docker_02 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+
+    } finally {
+    }
+
+    // 3. test ARRAY
+    try {
+        def doris_field_define = "`a_info` ARRAY<int> NULL"
+        
+        def hive_field_define = "`a_info` ARRAY<int>"
+
+
+        // create table to export data
+        create_table(export_table_name, doris_field_define)
+
+
+        // insert data
+        sql """ insert into ${export_table_name} values (1, 'doris1', [9, 99, 
999]), (2, 'doris2', [8, 88]); """
+        sql """ insert into ${export_table_name} values (3, 'doris3', []); """
+        sql """ insert into ${export_table_name} values (4, 'doris4', null); 
"""
+        sql """ insert into ${export_table_name} values (5, 'doris5', [1, 
null, 2]); """
+        sql """ insert into ${export_table_name} values (6, 'doris6', [null, 
null, null]); """
+        sql """ insert into ${export_table_name} values (7, 'doris7', [null, 
null, null, 1, 2, 999999, 111111]); """
+        sql """ insert into ${export_table_name} values (8, 'doris8', null); 
"""
+
+        // test base data
+        qt_select_base3 """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+
+        // create hive table
+        create_hive_table(hive_table, hive_field_define)
+
+        qt_select_tvf3 """ select * from HDFS(
+                        "uri" = "${outfile_url}0.parquet",
+                        "hadoop.username" = "${hdfsUserName}",
+                        "format" = "${format}");
+                        """
+
+        qt_hive_docker_03 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+
+    } finally {
+    }
+
+    // 4. test struct with all type
+    try {
+        def doris_field_define = "`s_info` STRUCT<user_id:INT, date:DATE, 
datetime:DATETIME, city:VARCHAR(20), age:SMALLINT, sex:TINYINT, 
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:LARGEINT, 
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL> 
NULL"
+        
+        def hive_field_define = "`s_info` STRUCT<user_id:INT, `date`:STRING, 
`datetime`:STRING, city:VARCHAR(20), age:SMALLINT, sex:TINYINT, 
bool_col:BOOLEAN, int_col:INT, bigint_col:BIGINT, largeint_col:STRING, 
float_col:FLOAT, double_col:DOUBLE, char_col:CHAR(10), decimal_col:DECIMAL>"
+
+
+        // create table to export data
+        create_table(export_table_name, doris_field_define)
+
+
+        // insert data
+        StringBuilder sb = new StringBuilder()
+        int i = 1
+        for (; i < 10; i ++) {
+            sb.append("""
+                (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01 
00:00:00', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, 
${i}.${i}, 'char${i}_1234', ${i}}),
+            """)
+        }
+        sb.append("""
+            (${i}, 'doris_${i}', {${i}, '2017-10-01', '2017-10-01 00:00:00', 
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL})
+        """)
+
+        sql """ INSERT INTO ${export_table_name} VALUES ${sb.toString()} """
+
+        // test base data
+        qt_select_base4 """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """
+
+        // test outfile to hdfs
+        def outfile_url = outfile_to_HDFS()
+
+        // create hive table
+        create_hive_table(hive_table, hive_field_define)
+
+        qt_select_tvf4 """ select * from HDFS(
+                        "uri" = "${outfile_url}0.parquet",
+                        "hadoop.username" = "${hdfsUserName}",
+                        "format" = "${format}");
+                        """
+
+        qt_hive_docker_04 """ SELECT * FROM ${hive_database}.${hive_table} 
ORDER BY user_id;"""
+
+    } finally {
+    }
+
+}
diff --git 
a/regression-test/suites/external_table_p0/hive/test_hive_other.groovy 
b/regression-test/suites/external_table_p0/hive/test_hive_other.groovy
index b562da294c3..5400c94fc81 100644
--- a/regression-test/suites/external_table_p0/hive/test_hive_other.groovy
+++ b/regression-test/suites/external_table_p0/hive/test_hive_other.groovy
@@ -70,7 +70,18 @@ suite("test_hive_other", 
"p0,external,hive,external_docker,external_docker_hive"
         sql """grant all on internal.${context.config.defaultDb}.* to 
ext_catalog_user"""
         sql """grant all on ${catalog_name}.*.* to ext_catalog_user"""
         connect(user = 'ext_catalog_user', password = '12345', url = 
context.config.jdbcUrl) {
-            order_qt_ext_catalog_grants """show databases from 
${catalog_name}"""
+            def database_lists = sql """show databases from ${catalog_name}"""
+            boolean ok = false;
+            for (int i = 0; i < database_lists.size(); ++j) {
+                assertEquals(1, database_lists[i].size())
+                if (database_lists[i][0].equals("default")) {
+                    ok = true;
+                    break;
+                }
+            }
+            if (!ok) {
+                throw exception
+            }
         }
         sql """drop user ext_catalog_user"""
 
diff --git 
a/regression-test/suites/external_table_p0/tvf/test_insert_from_tvf_with_common_user.groovy
 
b/regression-test/suites/external_table_p0/tvf/test_insert_from_tvf_with_common_user.groovy
index ed6557036d7..855c9f2c472 100644
--- 
a/regression-test/suites/external_table_p0/tvf/test_insert_from_tvf_with_common_user.groovy
+++ 
b/regression-test/suites/external_table_p0/tvf/test_insert_from_tvf_with_common_user.groovy
@@ -101,7 +101,7 @@ suite("test_insert_from_tvf_with_common_user", "p0") {
                 );
             """
 
-        order_qt_select_base """ SELECT * FROM ${export_table_name} t ORDER BY 
id; """
+        order_qt_select_base """ SELECT * FROM ${load_table_name} t ORDER BY 
id; """
     }
 
     sql """drop user ${common_user}"""


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to