This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new fbd6f012127 [Test](tvf) add regression tests for testing orc reader
#41606 #42188 (#42219)
fbd6f012127 is described below
commit fbd6f012127a9ae2e583ba02a0298ccc339e6c88
Author: Rayner Chen <[email protected]>
AuthorDate: Tue Oct 22 09:45:38 2024 +0800
[Test](tvf) add regression tests for testing orc reader #41606 #42188
(#42219)
cherry pick from #41606 #42188
---------
Co-authored-by: Tiewei Fang <[email protected]>
Co-authored-by: TieweiFang <[email protected]>
---
.../serde/data_type_datetimev2_serde.cpp | 10 -
.../tvf/orc_tvf/test_hdfs_orc_group0_orc_files.out | Bin 0 -> 101642 bytes
.../tvf/orc_tvf/test_hdfs_orc_group1_orc_files.out | 141 +++++++++
.../tvf/orc_tvf/test_hdfs_orc_group2_orc_files.out | 23 ++
.../tvf/orc_tvf/test_hdfs_orc_group3_orc_files.out | 106 +++++++
.../tvf/orc_tvf/test_hdfs_orc_group4_orc_files.out | 121 ++++++++
.../tvf/orc_tvf/test_hdfs_orc_group5_orc_files.out | 27 ++
.../tvf/orc_tvf/test_hdfs_orc_group6_orc_files.out | Bin 0 -> 11033 bytes
.../tvf/orc_tvf/test_hdfs_orc_group7_orc_files.out | Bin 0 -> 654 bytes
.../orc_tvf/test_hdfs_orc_group0_orc_files.groovy | 332 +++++++++++++++++++++
.../orc_tvf/test_hdfs_orc_group1_orc_files.groovy | 66 ++++
.../orc_tvf/test_hdfs_orc_group2_orc_files.groovy | 55 ++++
.../orc_tvf/test_hdfs_orc_group3_orc_files.groovy | 44 +++
.../orc_tvf/test_hdfs_orc_group4_orc_files.groovy | 72 +++++
.../orc_tvf/test_hdfs_orc_group5_orc_files.groovy | 72 +++++
.../orc_tvf/test_hdfs_orc_group6_orc_files.groovy | 317 ++++++++++++++++++++
.../orc_tvf/test_hdfs_orc_group7_orc_files.groovy | 59 ++++
17 files changed, 1435 insertions(+), 10 deletions(-)
diff --git a/be/src/vec/data_types/serde/data_type_datetimev2_serde.cpp
b/be/src/vec/data_types/serde/data_type_datetimev2_serde.cpp
index e8238af4eee..e231545ba5e 100644
--- a/be/src/vec/data_types/serde/data_type_datetimev2_serde.cpp
+++ b/be/src/vec/data_types/serde/data_type_datetimev2_serde.cpp
@@ -32,8 +32,6 @@ enum {
};
namespace doris::vectorized {
-static const int64_t timestamp_threshold = -2177481943;
-static const int64_t timestamp_diff = 343;
static const int64_t micr_to_nano_second = 1000;
Status DataTypeDateTimeV2SerDe::serialize_column_to_json(const IColumn&
column, int start_idx,
@@ -234,14 +232,6 @@ Status DataTypeDateTimeV2SerDe::write_column_to_orc(const
std::string& timezone,
return Status::InternalError("get unix timestamp error.");
}
- // -2177481943 represent '1900-12-31 23:54:17'
- // but -2177481944 represent '1900-12-31 23:59:59'
- // so for timestamp <= -2177481944, we subtract 343 (5min 43s)
- // Reference:
https://www.timeanddate.com/time/change/china/shanghai?year=1900
- if (timezone == TimezoneUtils::default_time_zone && timestamp <
timestamp_threshold) {
- timestamp -= timestamp_diff;
- }
-
cur_batch->data[row_id] = timestamp;
cur_batch->nanoseconds[row_id] = datetime_val.microsecond() *
micr_to_nano_second;
}
diff --git
a/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group0_orc_files.out
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group0_orc_files.out
new file mode 100644
index 00000000000..01158a2fb60
Binary files /dev/null and
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group0_orc_files.out
differ
diff --git
a/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group1_orc_files.out
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group1_orc_files.out
new file mode 100644
index 00000000000..12864d9c8a4
--- /dev/null
+++
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group1_orc_files.out
@@ -0,0 +1,141 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !test_0 --
+0200-02-10
+0947-02-16
+1012-02-27
+1014-02-17
+1947-02-11
+2012-02-21
+2014-02-11
+8200-02-11
+
+-- !test_3 --
+2 foo 0.8 1 1969-12-31T16:00
+5 eat 0.8 6 1969-12-31T16:00:20
+13 bar 80.0 2 1969-12-31T16:00:05
+29 cat 8.0 3 1969-12-31T16:00:10
+70 dog 1.8 4 1969-12-31T16:00:15
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+100 zebra 8.0 0 1969-12-31T16:04:10
+
+-- !test_4 --
+105685 almond antique violet chocolate turquoise Manufacturer#2
Brand#22 MEDIUM ANODIZED COPPER 14 MED CAN 1690.68 ly pending requ
+110592 almond antique salmon chartreuse burlywood Manufacturer#1
Brand#15 PROMO BURNISHED NICKEL 6 JUMBO PKG 1602.59 to the
furiously
+112398 almond antique metallic orange dim Manufacturer#3 Brand#32
MEDIUM BURNISHED BRASS 19 JUMBO JAR 1410.39 ole car
+121152 almond antique burnished rose metallic Manufacturer#1 Brand#14
PROMO PLATED TIN 2 JUMBO BOX 1173.15 e pinto beans h
+121152 almond antique burnished rose metallic Manufacturer#1 Brand#14
PROMO PLATED TIN 2 JUMBO BOX 1173.15 e pinto beans h
+132666 almond aquamarine rose maroon antique Manufacturer#2 Brand#24
SMALL POLISHED NICKEL 25 MED BOX 1698.66 even
+144293 almond antique olive coral navajo Manufacturer#3 Brand#34
STANDARD POLISHED STEEL 45 JUMBO CAN 1337.29 ag furiously about
+146985 almond aquamarine midnight light salmon Manufacturer#2 Brand#23
MEDIUM BURNISHED COPPER 2 SM CASE 2031.98 s cajole caref
+15103 almond aquamarine dodger light gainsboro Manufacturer#5
Brand#53 ECONOMY BURNISHED STEEL 46 LG PACK 1018.1 packages hinder
carefu
+155733 almond antique sky peru orange Manufacturer#5 Brand#53 SMALL
PLATED BRASS 2 WRAP DRUM 1788.73 furiously. bra
+17273 almond antique forest lavender goldenrod Manufacturer#3
Brand#35 PROMO ANODIZED TIN 14 JUMBO CASE 1190.27 along
the
+17927 almond aquamarine yellow dodger mint Manufacturer#4 Brand#41
ECONOMY BRUSHED COPPER 7 SM PKG 1844.92 ites. eve
+191709 almond antique violet turquoise frosted Manufacturer#2 Brand#22
ECONOMY POLISHED STEEL 40 MED BOX 1800.7 haggle
+192697 almond antique blue firebrick mint Manufacturer#5 Brand#52
MEDIUM BURNISHED TIN 31 LG DRUM 1789.69 ickly ir
+195606 almond aquamarine sandy cyan gainsboro Manufacturer#2 Brand#25
STANDARD PLATED TIN 18 SM PKG 1701.6 ic de
+33357 almond azure aquamarine papaya violet Manufacturer#4 Brand#41
STANDARD ANODIZED TIN 12 WRAP CASE 1290.35 reful
+40982 almond antique misty red olive Manufacturer#3 Brand#32 ECONOMY
PLATED COPPER 1 LG PKG 1922.98 c foxes can s
+42669 almond antique medium spring khaki Manufacturer#5 Brand#51
STANDARD BURNISHED TIN 6 MED CAN 1611.66 sits haggl
+45261 almond aquamarine floral ivory bisque Manufacturer#4 Brand#42
SMALL PLATED STEEL 27 WRAP CASE 1206.26 careful
+48427 almond antique violet mint lemon Manufacturer#4 Brand#42
PROMO POLISHED STEEL 39 SM CASE 1375.42 hely ironic i
+49671 almond antique gainsboro frosted violet Manufacturer#4 Brand#41
SMALL BRUSHED BRASS 10 SM BOX 1620.67 ccounts run quick
+65667 almond aquamarine pink moccasin thistle Manufacturer#1 Brand#12
LARGE BURNISHED STEEL 42 JUMBO CASE 1632.66 e across the expr
+78486 almond azure blanched chiffon midnight Manufacturer#5 Brand#52
LARGE BRUSHED BRASS 23 MED BAG 1464.48 hely blith
+85768 almond antique chartreuse lavender yellow Manufacturer#1
Brand#12 LARGE BRUSHED STEEL 34 SM BAG 1753.76 refull
+86428 almond aquamarine burnished black steel Manufacturer#1 Brand#12
STANDARD ANODIZED STEEL 28 WRAP BAG 1414.42 arefully
+90681 almond antique chartreuse khaki white Manufacturer#3 Brand#31
MEDIUM BURNISHED TIN 17 SM CASE 1671.68 are slyly after the sl
+
diff --git
a/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group2_orc_files.out
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group2_orc_files.out
new file mode 100644
index 00000000000..3046384b928
--- /dev/null
+++
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group2_orc_files.out
@@ -0,0 +1,23 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !test_0 --
+row 000000
+row 000001
+row 000002
+row 000003
+row 000004
+row 000005
+row 000006
+row 000007
+row 000008
+row 000009
+
+-- !test_1 --
+1200-01-08
+
+-- !test_2 --
+0
+
+-- !test_3 --
+Alyssa \N [3, 9, 15, 20]
+Ben red []
+
diff --git
a/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group3_orc_files.out
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group3_orc_files.out
new file mode 100644
index 00000000000..efb80d6c5fe
--- /dev/null
+++
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group3_orc_files.out
@@ -0,0 +1,106 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !test_0 --
+42
+
+-- !test_1 --
+-1155099828 0 -8072133231410116475
+-836442134 1 1238145679872042884
+-995758198 2 -7837173574477374810
+-634239373 3 -6265997042966714236
+-830330741 4 1226587808838328746
+1767665612 5 -3483296404361882349
+-234435524 6 -2492777959717761890
+-1237341112 7 -1655110853862828106
+-3666739 8 762716722992837914
+594038518 9 -414822555163282702
+680485277 10 -8438545694061423851
+1990579537 11 1725616428769673202
+1904347123 12 7487194301641871634
+318397575 13 5284947443410977522
+726871265 14 -4971309310269585972
+470886284 15 -1048347571663351969
+-469827848 16 5914087625780064167
+-1935288453 17 -7901283747506674198
+1091416458 18 6617893815478315911
+589802492 19 -715526239534668540
+1433595053 20 7812935714614531047
+1463956372 21 7736151544221720702
+538577990 22 4280956441582620229
+261903418 23 8643666910095216441
+726547892 24 4589536732286802996
+1827934852 25 5220768139842995762
+-2142712056 26 8452212129931893439
+-1979643329 27 9192676079737912910
+1439902652 28 -5485358416046815779
+1509471580 29 -2900537060362111313
+231072361 30 -1166373669027043549
+-697207184 31 -4941710932145227910
+-1897073668 32 -1214632250906731010
+492728889 33 882146759164402004
+441673793 34 -1623585845533786862
+1053599723 35 -7650598155209059322
+1220936946 36 733466437358115738
+1770977837 37 -1075813735198997759
+332258178 38 5380612601496486616
+-1323562223 39 -2783026025528108869
+2127527772 40 4508443200032579318
+1127644012 41 -6689049007808433632
+-1518802603 42 5499312000257318891
+1211783764 43 -4583514533579607671
+673031799 44 2616088513476383289
+1357179832 45 7306204410731777268
+641449848 46 -7300459700312065882
+1678603587 47 -2031057129963086472
+-479585417 48 5657078495382185794
+218433295 49 -4753393794365190618
+-168375338 50 641301706817379655
+-168670764 51 -5560416134812969260
+-688521145 52 -2939353597904605496
+1521405587 53 -7202421235678721139
+280766823 54 1482820802614970407
+2006072305 55 2582426602654761877
+2841986 56 -3096925422848703303
+1823577017 57 3525728051657376574
+86980171 58 6283812381833889592
+-729958587 59 6505103417183468045
+58615730 60 2130961958634360268
+959699894 61 2427093929580813013
+-1125707998 62 -3191632577313116988
+1004001096 63 8703825967927764437
+1139572680 64 6580459837064512104
+-474634457 65 -1099847229116957438
+-140317777 66 2216635117068313445
+492523913 67 -5990170212825458438
+-842003748 68 5108694483378094778
+794552832 69 -6096899967171477161
+-844195667 70 5179278291182829888
+1984499260 71 6237468570917148003
+-1177191130 72 -7377861005267193838
+-1938669641 73 8248065364431718993
+272665329 74 5506944610710795910
+1705319172 75 -6584616164208436602
+-1108396995 76 1345540141198804862
+440543153 77 -85607681367308891
+2032289729 78 -5986629849796103379
+-842603518 79 -1064662868388789428
+-361562994 80 1801059877260196266
+-1174579731 81 -5036539402851796276
+-1831395724 82 8364289038127439850
+-1980899554 83 -702572046954790847
+743792160 84 4229501672312896502
+1422058480 85 -391320938771541039
+1492414283 86 -1809927736944097678
+-1820060267 87 -6396528855664002559
+1375472775 88 1126092692710756057
+-839029264 89 -3470724079771464614
+1119484814 90 87521117139371251
+-365152316 91 3748318374606874591
+-1851648474 92 560370592390608245
+-524654524 93 2263722670392996598
+-1318061333 94 8352976901787855054
+1720264500 95 -5276809390947146025
+503688873 96 1116640964922907260
+-1033291508 97 8338246827928889510
+1306784267 98 -4929504406024123242
+-179756314 99 2571427694455332022
+
diff --git
a/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group4_orc_files.out
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group4_orc_files.out
new file mode 100644
index 00000000000..1f5ea974148
--- /dev/null
+++
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group4_orc_files.out
@@ -0,0 +1,121 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !test_0 --
+2022-06-10T05:26:22.753999 2022-06-09T21:26:22.753999
+
+-- !test_1 --
+1 John Doe
+
+-- !test_2 --
+\N
+127
+
+-- !test_3 --
+\N
+32767
+
+-- !test_4 --
+0 1 536870912 0 1 {"a":1}
+0 2 536870912 0 2 {"a":2}
+
+-- !test_5 --
+-1155099828 0 -8072133231410116475
+-836442134 1 1238145679872042884
+-995758198 2 -7837173574477374810
+-634239373 3 -6265997042966714236
+-830330741 4 1226587808838328746
+1767665612 5 -3483296404361882349
+-234435524 6 -2492777959717761890
+-1237341112 7 -1655110853862828106
+-3666739 8 762716722992837914
+594038518 9 -414822555163282702
+680485277 10 -8438545694061423851
+1990579537 11 1725616428769673202
+1904347123 12 7487194301641871634
+318397575 13 5284947443410977522
+726871265 14 -4971309310269585972
+470886284 15 -1048347571663351969
+-469827848 16 5914087625780064167
+-1935288453 17 -7901283747506674198
+1091416458 18 6617893815478315911
+589802492 19 -715526239534668540
+1433595053 20 7812935714614531047
+1463956372 21 7736151544221720702
+538577990 22 4280956441582620229
+261903418 23 8643666910095216441
+726547892 24 4589536732286802996
+1827934852 25 5220768139842995762
+-2142712056 26 8452212129931893439
+-1979643329 27 9192676079737912910
+1439902652 28 -5485358416046815779
+1509471580 29 -2900537060362111313
+231072361 30 -1166373669027043549
+-697207184 31 -4941710932145227910
+-1897073668 32 -1214632250906731010
+492728889 33 882146759164402004
+441673793 34 -1623585845533786862
+1053599723 35 -7650598155209059322
+1220936946 36 733466437358115738
+1770977837 37 -1075813735198997759
+332258178 38 5380612601496486616
+-1323562223 39 -2783026025528108869
+2127527772 40 4508443200032579318
+1127644012 41 -6689049007808433632
+-1518802603 42 5499312000257318891
+1211783764 43 -4583514533579607671
+673031799 44 2616088513476383289
+1357179832 45 7306204410731777268
+641449848 46 -7300459700312065882
+1678603587 47 -2031057129963086472
+-479585417 48 5657078495382185794
+218433295 49 -4753393794365190618
+-168375338 50 641301706817379655
+-168670764 51 -5560416134812969260
+-688521145 52 -2939353597904605496
+1521405587 53 -7202421235678721139
+280766823 54 1482820802614970407
+2006072305 55 2582426602654761877
+2841986 56 -3096925422848703303
+1823577017 57 3525728051657376574
+86980171 58 6283812381833889592
+-729958587 59 6505103417183468045
+58615730 60 2130961958634360268
+959699894 61 2427093929580813013
+-1125707998 62 -3191632577313116988
+1004001096 63 8703825967927764437
+1139572680 64 6580459837064512104
+-474634457 65 -1099847229116957438
+-140317777 66 2216635117068313445
+492523913 67 -5990170212825458438
+-842003748 68 5108694483378094778
+794552832 69 -6096899967171477161
+-844195667 70 5179278291182829888
+1984499260 71 6237468570917148003
+-1177191130 72 -7377861005267193838
+-1938669641 73 8248065364431718993
+272665329 74 5506944610710795910
+1705319172 75 -6584616164208436602
+-1108396995 76 1345540141198804862
+440543153 77 -85607681367308891
+2032289729 78 -5986629849796103379
+-842603518 79 -1064662868388789428
+-361562994 80 1801059877260196266
+-1174579731 81 -5036539402851796276
+-1831395724 82 8364289038127439850
+-1980899554 83 -702572046954790847
+743792160 84 4229501672312896502
+1422058480 85 -391320938771541039
+1492414283 86 -1809927736944097678
+-1820060267 87 -6396528855664002559
+1375472775 88 1126092692710756057
+-839029264 89 -3470724079771464614
+1119484814 90 87521117139371251
+-365152316 91 3748318374606874591
+-1851648474 92 560370592390608245
+-524654524 93 2263722670392996598
+-1318061333 94 8352976901787855054
+1720264500 95 -5276809390947146025
+503688873 96 1116640964922907260
+-1033291508 97 8338246827928889510
+1306784267 98 -4929504406024123242
+-179756314 99 2571427694455332022
+
diff --git
a/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group5_orc_files.out
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group5_orc_files.out
new file mode 100644
index 00000000000..3440963fcaf
--- /dev/null
+++
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group5_orc_files.out
@@ -0,0 +1,27 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !test_1 --
+123 1
+456 2
+
+-- !test_2 --
+[{"elem1":1, "elem2":"123", "elem3":9.8, "nested":[{"elem1":1, "elem2":"123",
"elem3":9.8}, {"elem1":2, "elem2":"456", "elem3":10.12}, {"elem1":3,
"elem2":"789", "elem3":11.14}]}, {"elem1":2, "elem2":"456", "elem3":10.12,
"nested":[{"elem1":4, "elem2":"101112", "elem3":123.8}, {"elem1":5,
"elem2":"131415", "elem3":10.2}, {"elem1":6, "elem2":"161718",
"elem3":11.414}]}, {"elem1":3, "elem2":"789", "elem3":11.14,
"nested":[{"elem1":7, "elem2":"101", "elem3":13.08}, {"elem1":8, "elem2":"415"
[...]
+[{"elem1":4, "elem2":"101112", "elem3":123.8, "nested":[{"elem1":1,
"elem2":"123", "elem3":9.8}, {"elem1":2, "elem2":"456", "elem3":10.12},
{"elem1":3, "elem2":"789", "elem3":11.14}]}, {"elem1":5, "elem2":"131415",
"elem3":10.2, "nested":[{"elem1":4, "elem2":"101112", "elem3":123.8},
{"elem1":5, "elem2":"131415", "elem3":10.2}, {"elem1":6, "elem2":"161718",
"elem3":11.414}]}, {"elem1":6, "elem2":"161718", "elem3":11.414,
"nested":[{"elem1":7, "elem2":"101", "elem3":13.08}, {"elem1":8, "e [...]
+[{"elem1":7, "elem2":"101", "elem3":13.08, "nested":[{"elem1":1,
"elem2":"123", "elem3":9.8}, {"elem1":2, "elem2":"456", "elem3":10.12},
{"elem1":3, "elem2":"789", "elem3":11.14}]}, {"elem1":8, "elem2":"415",
"elem3":1.12, "nested":[{"elem1":4, "elem2":"101112", "elem3":123.8},
{"elem1":5, "elem2":"131415", "elem3":10.2}, {"elem1":6, "elem2":"161718",
"elem3":11.414}]}, {"elem1":9, "elem2":"118", "elem3":0.414,
"nested":[{"elem1":7, "elem2":"101", "elem3":13.08}, {"elem1":8, "elem2":"415
[...]
+
+-- !test_3 --
+1 2024-06-30T20:00
+
+-- !test_4 --
+123 1
+456 2
+
+-- !test_5 --
+[{"elem1":1, "elem2":"123", "elem3":9.8}, {"elem1":2, "elem2":"456",
"elem3":10.12}, {"elem1":3, "elem2":"789", "elem3":11.14}]
+[{"elem1":4, "elem2":"101112", "elem3":123.8}, {"elem1":5, "elem2":"131415",
"elem3":10.2}, {"elem1":6, "elem2":"161718", "elem3":11.414}]
+[{"elem1":7, "elem2":"101", "elem3":13.08}, {"elem1":8, "elem2":"415",
"elem3":1.12}, {"elem1":9, "elem2":"118", "elem3":0.414}]
+
+-- !test_6 --
+\N \N
+[0] ["Test 0"]
+[null] [null]
+
diff --git
a/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group6_orc_files.out
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group6_orc_files.out
new file mode 100644
index 00000000000..85b52a6b28f
Binary files /dev/null and
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group6_orc_files.out
differ
diff --git
a/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group7_orc_files.out
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group7_orc_files.out
new file mode 100644
index 00000000000..bf0d7b8a515
Binary files /dev/null and
b/regression-test/data/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group7_orc_files.out
differ
diff --git
a/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group0_orc_files.groovy
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group0_orc_files.groovy
new file mode 100644
index 00000000000..924ceca4204
--- /dev/null
+++
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group0_orc_files.groovy
@@ -0,0 +1,332 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hdfs_orc_group0_orc_files","external,hive,tvf,external_docker") {
+ String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
+ String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+ def hdfsUserName = "doris"
+ def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+ def uri = ""
+
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ try {
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/decimal.orc"
+ order_qt_test_0 """ select sum(_col0) from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testSargSkipPickupGroupWithoutIndexJava.orc"
+ order_qt_test_1 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 10; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/sample2.orc"
+ order_qt_test_2 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/sample1.orc"
+ order_qt_test_3 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.test1.orc"
+ order_qt_test_4 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testWithoutIndex.orc"
+ order_qt_test_5 """ select count(*) from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestVectorOrcFile.testLz4.orc"
+ order_qt_test_6 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by y limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testSargSkipPickupGroupWithoutIndexCPlusPlus.orc"
+ order_qt_test_7 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by x limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestVectorOrcFile.testZstd.0.12.orc"
+ order_qt_test_9 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by y limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestStringDictionary.testRowIndex.orc"
+ order_qt_test_10 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by str limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testPredicatePushdown.orc"
+ order_qt_test_11 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by int1 limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.metaData.orc"
+ order_qt_test_12 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/col.dot.orc"
+ order_qt_test_13 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/zero.orc"
+ order_qt_test_15 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.columnProjection.orc"
+ order_qt_test_18 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by int1 limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/orc_no_format.orc"
+ order_qt_test_19 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testStringAndBinaryStatistics.orc"
+ order_qt_test_20 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/orc_split_elim_cpp.orc"
+ order_qt_test_21 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by userid limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/demo-11-none.orc"
+ order_qt_test_22 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by _col0 limit 100; """
+
+ order_qt_test_22_2 """ select count(_col0) from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testDate2038.orc"
+ order_qt_test_23 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by time limit 10; """
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/nulls-at-end-snappy.orc"
+ order_qt_test_25 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by _col0 DESC limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestVectorOrcFile.testLzo.orc"
+ order_qt_test_26 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by y limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/decimal64_v2_cplusplus.orc"
+ order_qt_test_27 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/complextypes_iceberg.orc"
+ order_qt_test_28 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/bad_bloom_filter_1.6.11.orc"
+ order_qt_test_30 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/demo-11-zlib.orc"
+ order_qt_test_31 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by _col0 limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/acid5k.orc"
+ order_qt_test_32 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by rowid limit 100; """
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/orc_index_int_string.orc"
+ order_qt_test_34 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by _col0 limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testMemoryManagementV12.orc"
+ order_qt_test_35 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by int1 limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testSnappy.orc"
+ order_qt_test_36 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by int1 limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/demo-12-zlib.orc"
+ order_qt_test_37 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by _col0 limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/over1k_bloom.orc"
+ order_qt_test_38 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by _col0 DESC, _col1 DESC
limit 98; """
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/orc-file-no-timezone.orc"
+ order_qt_test_41 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/decimal64_v2.orc"
+ order_qt_test_42 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/orc-file-dst-no-timezone.orc"
+ order_qt_test_43 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.emptyFile.orc"
+ order_qt_test_44 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testSeek.orc"
+ order_qt_test_45 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by int1 limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/orc-file-no-double-statistic.orc"
+ order_qt_test_46 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/orc_split_elim.orc"
+ order_qt_test_47 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by userid limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testStripeLevelStats.orc"
+ order_qt_test_48 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 10; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/orc_split_elim_new.orc"
+ order_qt_test_49 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by userid limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/bad_bloom_filter_1.6.0.orc"
+ order_qt_test_50 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group0/TestOrcFile.testMemoryManagementV11.orc"
+ order_qt_test_51 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by int1 limit 100; """
+ } finally {
+ }
+ }
+}
diff --git
a/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group1_orc_files.groovy
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group1_orc_files.groovy
new file mode 100644
index 00000000000..44176a47fd4
--- /dev/null
+++
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group1_orc_files.groovy
@@ -0,0 +1,66 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hdfs_orc_group1_orc_files","external,hive,tvf,external_docker") {
+ String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
+ String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+ def hdfsUserName = "doris"
+ def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+ def uri = ""
+
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ try {
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group1/orc_legacy_mixed_dates.orc"
+ order_qt_test_0 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+ // There are a timestamp problem in this case.
+ // uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group1/orc_legacy_mixed_timestamps.orc"
+ // order_qt_test_1 """ select * from HDFS(
+ // "uri" = "${uri}",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "orc"); """
+
+ // Doris cannot read this ORC file because of a NOT_IMPLEMENT
error.
+
+ // uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group1/orc-file-11-format.orc"
+ // order_qt_test_2 """ select * from HDFS(
+ // "uri" = "${uri}",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group1/orc_split_elim.orc"
+ qt_test_3 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by userid limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group1/part.orc"
+ order_qt_test_4 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+ } finally {
+ }
+ }
+}
diff --git
a/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group2_orc_files.groovy
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group2_orc_files.groovy
new file mode 100644
index 00000000000..4495494a3f0
--- /dev/null
+++
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group2_orc_files.groovy
@@ -0,0 +1,55 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hdfs_orc_group2_orc_files","external,hive,tvf,external_docker") {
+ String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
+ String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+ def hdfsUserName = "doris"
+ def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+ def uri = ""
+
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ try {
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group2/TestStringDictionary.testRowIndex.orc"
+ order_qt_test_0 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 10; """
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group2/before_1582_date_v2_4.snappy.orc"
+ order_qt_test_1 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group2/part-r-00000-829af031-b970-49d6-ad39-30460a0be2c8.orc"
+ order_qt_test_2 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group2/users.orc"
+ order_qt_test_3 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+ } finally {
+ }
+ }
+}
diff --git
a/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group3_orc_files.groovy
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group3_orc_files.groovy
new file mode 100644
index 00000000000..1670909ea4e
--- /dev/null
+++
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group3_orc_files.groovy
@@ -0,0 +1,44 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hdfs_orc_group3_orc_files","external,hive,tvf,external_docker") {
+ String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
+ String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+ def hdfsUserName = "doris"
+ def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+ def uri = ""
+
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ try {
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group3/data.orc"
+ order_qt_test_0 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group3/apache-lz4.orc"
+ qt_test_1 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by y limit 100; """
+ } finally {
+ }
+ }
+}
diff --git
a/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group4_orc_files.groovy
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group4_orc_files.groovy
new file mode 100644
index 00000000000..bf362d442d5
--- /dev/null
+++
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group4_orc_files.groovy
@@ -0,0 +1,72 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hdfs_orc_group4_orc_files","external,hive,tvf,external_docker") {
+ String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
+ String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+ def hdfsUserName = "doris"
+ def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+ def uri = ""
+
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ try {
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group4/timestamp-tz-micros.orc"
+ order_qt_test_0 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group4/data.orc"
+ order_qt_test_1 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group4/single-tinyint-column.orc"
+ order_qt_test_2 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group4/single-smallint-column.orc"
+ order_qt_test_3 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group4/orcFileWithoutRowGroupInfo.orc"
+ order_qt_test_4 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group4/apache-lz4.orc"
+ qt_test_5 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by y limit 100; """
+ } finally {
+ }
+ }
+}
diff --git
a/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group5_orc_files.groovy
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group5_orc_files.groovy
new file mode 100644
index 00000000000..379d6604e72
--- /dev/null
+++
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group5_orc_files.groovy
@@ -0,0 +1,72 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hdfs_orc_group5_orc_files","external,hive,tvf,external_docker") {
+ String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
+ String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+ def hdfsUserName = "doris"
+ def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+ def uri = ""
+
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ try {
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group5/case_insensitive_column_matching.orc"
+ order_qt_test_1 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group5/nested_nested_table.orc"
+ order_qt_test_2 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group5/test_reader_time_zone.snappy.orc"
+ order_qt_test_3 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group5/test_setting_input_format_use_lowercase_column_name.orc"
+ order_qt_test_4 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group5/nested_table.orc"
+ order_qt_test_5 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group5/test_null_array.orc"
+ order_qt_test_6 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+ } finally {
+ }
+ }
+}
diff --git
a/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group6_orc_files.groovy
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group6_orc_files.groovy
new file mode 100644
index 00000000000..0afe32746da
--- /dev/null
+++
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group6_orc_files.groovy
@@ -0,0 +1,317 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hdfs_orc_group6_orc_files","external,hive,tvf,external_docker") {
+ String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
+ String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+ def hdfsUserName = "doris"
+ def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+ def uri = ""
+
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ try {
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/array_data_only.orc"
+ order_qt_test_0 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/orc_test_positional_column.orc"
+ order_qt_test_1 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/issue_16365.orc"
+ order_qt_test_2 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/boolean_type.orc"
+ order_qt_test_3 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/map_data_only.orc"
+ order_qt_test_4 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/data-bb44368c-b491-49ab-b81a-eea013f94132-0.orc"
+ order_qt_test_5 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/empty_row_index.orc"
+ order_qt_test_6 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/orc_test_varchar_column.orc"
+ order_qt_test_7 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/orc_test_array_basic.orc"
+ order_qt_test_8 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/orc_test_struct_array_map_basic.orc"
+ order_qt_test_9 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/writer_tz_utc.orc"
+ order_qt_test_10 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/orc_test_binary_column.orc"
+ order_qt_test_11 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/writer_at_shanghai.orc"
+ order_qt_test_12 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/struct_data_only.orc"
+ order_qt_test_13 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/compound.orc"
+ order_qt_test_14 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/data-32204456-8395-4f77-9347-b2d40939a5d5-0.orc"
+ order_qt_test_15 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/map_decimal_date.lz4.orc"
+ order_qt_test_16 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/string-2-double.orc"
+ order_qt_test_17 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/row-4k-id.orc"
+ order_qt_test_18 """ select sum(id) from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/orc_test_struct_basic.orc"
+ order_qt_test_19 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/writer_tz_shanghai.orc"
+ order_qt_test_20 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/non_vec_orc_scanner.orc"
+ order_qt_test_21 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/tinyint.orc"
+ order_qt_test_22 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/orc_test_upper_case.orc"
+ order_qt_test_23 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/orc_test_padding_char.orc"
+ order_qt_test_24 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/date_type.orc"
+ order_qt_test_25 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/scalar_types.orc"
+ order_qt_test_26 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/orc_test_time_column.orc"
+ order_qt_test_27 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/two-strips-dict-and-nodict.orc"
+ order_qt_test_28 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/data-07ea8d48-6012-4a76-a564-c422995189f2-0.orc"
+ order_qt_test_29 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/type_mismatch.orc"
+ order_qt_test_30 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/map_type_mismatched.orc"
+ order_qt_test_31 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/decimal_and_timestamp.orc"
+ order_qt_test_32 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/string-dict-column.orc"
+ qt_test_33 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") order by col1 limit 10; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/orc_zero_size_stream.orc"
+ order_qt_test_34 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/boolean_slot_ref.orc"
+ order_qt_test_35 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/multi_stripes.orc"
+ qt_test_36 """ select c1, sum(c0) from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") group by c1 order by c1; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/padding_char_varchar_10k.orc"
+ order_qt_test_37 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc") limit 10; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/map_filter_bug.orc"
+ order_qt_test_38 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/timestamp.orc"
+ order_qt_test_39 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group6/dec_orc.orc"
+ order_qt_test_40 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+ } finally {
+ }
+ }
+}
diff --git
a/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group7_orc_files.groovy
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group7_orc_files.groovy
new file mode 100644
index 00000000000..ef43761b89c
--- /dev/null
+++
b/regression-test/suites/external_table_p0/tvf/orc_tvf/test_hdfs_orc_group7_orc_files.groovy
@@ -0,0 +1,59 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hdfs_orc_group7_orc_files","external,hive,tvf,external_docker") {
+ String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
+ String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+ def hdfsUserName = "doris"
+ def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+ def uri = ""
+
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled != null && enabled.equalsIgnoreCase("true")) {
+ try {
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group7/decimal.orc"
+ qt_test_0 """ select sum(_col0) from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group7/TestOrcFile.test1.orc"
+ qt_test_1 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+
+
+ // There are a timestamp problem in this case.
+ // uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group7/TestOrcFile.testDate1900.orc"
+ // qt_test_2 """ select * from HDFS(
+ // "uri" = "${uri}",
+ // "hadoop.username" = "${hdfsUserName}",
+ // "format" = "orc") order by time limit 100; """
+
+
+ uri = "${defaultFS}" +
"/user/doris/tvf_data/test_hdfs_orc/group7/TestOrcFile.emptyFile.orc"
+ qt_test_3 """ select * from HDFS(
+ "uri" = "${uri}",
+ "hadoop.username" = "${hdfsUserName}",
+ "format" = "orc"); """
+ } finally {
+ }
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]