This is an automated email from the ASF dual-hosted git repository.
diqiu50 pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino.git
The following commit(s) were added to refs/heads/main by this push:
new 986af9279 [#5631] Add test cases for other supported catalogs in Trino
Cascading (#5632)
986af9279 is described below
commit 986af9279caab991a9a5b51eec7a3507ef723b78
Author: danhuawang <[email protected]>
AuthorDate: Tue Dec 3 10:00:12 2024 +0800
[#5631] Add test cases for other supported catalogs in Trino Cascading
(#5632)
### What changes were proposed in this pull request?
1. Trino connector: add some external table type test cases in hive
catalog.
2. Cascading connector: Add cases to cover hive,PG,Iceberg catalogs.
3. Some test cases can't pass on the Trino cascading v0.0.1, I move to
"ignored" directory.
### Why are the changes needed?
In trino-cascading-testsets, it only covered MySQL catalog.
We need to add more test cases for other supported catalogs.
Fix: #5631
### Does this PR introduce _any_ user-facing change?
N/A
### How was this patch tested?
```
./gradlew -PskipTests -PtestMode=embedded -PjdkVersion=17
-PskipDockerTests=false :trino-connector:integration-test:test
./gradlew -PskipTests -PtestMode=deploy -PjdkVersion=17
-PskipDockerTests=false :trino-connector:integration-test:test
trino-connector/integration-test/trino-test-tools/run_test.sh
2024-11-20 19:39:05 INFO [main] TrinoQueryIT:402 - All testers have
finished. Total:116, Pass: 116
hive:hive/00001_datatype.sql, PASS, 4.743,
iceberg:lakehouse-iceberg-hive/00002_partition_sort_order.sql, PASS, 4.042,
iceberg_mysql:lakehouse-iceberg-mysql/00002_partition_sort_order.sql, PASS,
1.614,
iceberg_postgres:lakehouse-iceberg-postgres/00002_partition_sort_order.sql,
PASS, 3.290,
mysql:jdbc-mysql/00001_datatype.sql, PASS, 1.753,
mysql:jdbc-mysql/00002_pushdown.sql, PASS, 1.095,
mysql:jdbc-mysql/catalog_mysql_cascading_test.sql, PASS, 0.207,
mysql:tpcds/00000.sql, PASS, 16.305,
mysql:tpcds/00001.sql, PASS, 1.036,
mysql:tpcds/00002.sql, PASS, 1.954,
mysql:tpcds/00003.sql, PASS, 0.795,
mysql:tpcds/00004.sql, PASS, 3.311,
mysql:tpcds/00005.sql, PASS, 2.008,
mysql:tpcds/00006.sql, PASS, 1.282,
mysql:tpcds/00007.sql, PASS, 1.439,
....
mysql:tpch/00021.sql, PASS, 1.336,
mysql:tpch/00022.sql, PASS, 0.536,
pg:jdbc-postgresql/00001_datatype.sql, PASS, 1.259,
pg:jdbc-postgresql/00002_pushdown.sql, PASS, 1.583,
pg:jdbc-postgresql/catalog_pg_cascading_test.sql, PASS, 0.217,
All testers have finished. Total:116, Pass: 116
```
---
LICENSE | 1 +
.../connector/integration/test/TrinoQueryIT.java | 4 +-
.../integration/test/TrinoQueryITBase.java | 2 +-
.../integration/test/TrinoQueryRunner.java | 25 +++++
.../testsets/hive/00008_decimal.sql | 52 ++++-----
.../trino-ci-testset/testsets/hive/00009_array.sql | 60 +++++------
.../trino-ci-testset/testsets/hive/00010_map.sql | 44 ++++----
.../trino-ci-testset/testsets/hive/00011_row.sql | 44 ++++----
.../testsets/hive/00014_partition_sort_order.sql | 67 ++++++++++++
.../testsets/hive/00014_partition_sort_order.txt | 39 +++++++
.../testsets/hive/catalog_hive_cleanup.sql | 2 +-
.../testsets/jdbc-mysql/00000_create_table.sql | 11 +-
.../testsets/jdbc-mysql/00000_create_table.txt | 16 ++-
.../jdbc-postgresql/00000_create_table.sql | 22 +++-
.../jdbc-postgresql/00000_create_table.txt | 18 ++++
.../jdbc-postgresql/00001_select_table.sql | 2 +
.../jdbc-postgresql/00001_select_table.txt | 2 +
.../testsets/jdbc-postgresql/00002_alter_table.sql | 38 +++++++
.../testsets/jdbc-postgresql/00002_alter_table.txt | 36 ++++++-
.../testsets/jdbc-postgresql/00006_datatype.sql | 1 -
.../lakehouse-iceberg/00000_create_table.sql | 7 ++
.../lakehouse-iceberg/00000_create_table.txt | 2 +
.../hive/00001_datatype.sql | 65 +++++++++++
.../hive/00001_datatype.txt | 66 ++++++++++++
.../hive/catalog_hive_cleanup.sql | 0
.../hive/catalog_hive_prepare.sql | 23 ++++
.../hive/ignored/00001_datatype.sql} | 75 +++++++++----
.../hive/ignored/00001_datatype.txt | 86 +++++++++++++++
.../jdbc-mysql/00001_datatype.sql | 19 ++++
.../jdbc-mysql/00001_datatype.txt | 23 ++++
.../jdbc-mysql/00002_pushdown.sql | 49 +++++++++
.../jdbc-mysql/00002_pushdown.txt | 56 ++++++++++
.../jdbc-mysql/catalog_mysql_cleanup.sql | 0
.../jdbc-mysql/catalog_mysql_prepare.sql | 23 ++++
.../jdbc-mysql/ignored/00001_datatype.sql | 55 ++++++++++
.../jdbc-mysql/ignored/00001_datatype.txt | 59 ++++++++++
.../jdbc-postgresql/00001_datatype.sql | 42 ++++++++
.../jdbc-postgresql/00001_datatype.txt | 69 ++++++++++++
.../jdbc-postgresql/00002_pushdown.sql | 104 ++++++++++++++++++
.../jdbc-postgresql/00002_pushdown.txt | 78 ++++++++++++++
.../jdbc-postgresql/catalog_pg_cleanup.sql | 0
.../jdbc-postgresql/catalog_pg_prepare.sql | 26 +++++
.../jdbc-postgresql/ignored/00001_datatype.sql | 50 +++++++++
.../jdbc-postgresql/ignored/00001_datatype.txt | 15 +++
.../00002_partition_sort_order.sql | 11 ++
.../00002_partition_sort_order.txt | 27 +++++
.../lakehouse-iceberg/catalog_iceberg_cleanup.sql | 0
.../catalog_iceberg_mysql_cleanup.sql | 1 +
.../catalog_iceberg_mysql_prepare.sql | 119 +++++++++++++++++++++
.../catalog_iceberg_postgres_cleanup.sql | 1 +
.../catalog_iceberg_postgres_prepare.sql | 119 +++++++++++++++++++++
.../lakehouse-iceberg/catalog_iceberg_prepare.sql | 117 ++++++++++++++++++++
.../lakehouse-iceberg/ignored/00001_datatype.sql | 7 ++
.../lakehouse-iceberg/ignored/00001_datatype.txt | 45 ++++++++
.../ignored/00002_partition_sort_order.sql | 3 +
.../ignored/00002_partition_sort_order.txt | 51 +++++++++
.../tpch/catalog_mysql_prepare.sql | 2 +
.../trino-test-tools/download_jar.sh | 2 +
.../trino-cascading-env/docker-compose.yaml | 43 ++++++++
.../{inspect_ip.sh => init/hive/init.sh} | 31 +-----
.../trino-cascading-env/init/postgres/init.sql | 19 ++++
.../trino-cascading-env/init/trino-local/init.sh | 1 +
.../trino-cascading-env/init/trino-remote/init.sh | 2 +-
.../trino-cascading-env/inspect_ip.sh | 12 ++-
64 files changed, 1928 insertions(+), 163 deletions(-)
diff --git a/LICENSE b/LICENSE
index 16d1fbec5..e8bef4c1d 100644
--- a/LICENSE
+++ b/LICENSE
@@ -290,6 +290,7 @@
Trino
./integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/CloseableGroup.java
./trino-connector/trino-connector/src/main/java/org/apache/gravitino/trino/connector/catalog/hive/SortingColumn.java
+
./trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00012_format.sql
Apache Arrow
./dev/ci/util_free_space.sh
diff --git
a/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryIT.java
b/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryIT.java
index ffbd2879a..d9940de45 100644
---
a/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryIT.java
+++
b/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryIT.java
@@ -162,7 +162,7 @@ public class TrinoQueryIT extends TrinoQueryITBase {
sqls = removeSqlComments(sqls);
Matcher sqlMatcher =
- Pattern.compile("(\\w.*?);", Pattern.DOTALL |
Pattern.UNIX_LINES).matcher(sqls);
+ Pattern.compile("([<\\w].*?);", Pattern.DOTALL |
Pattern.UNIX_LINES).matcher(sqls);
while (sqlMatcher.find()) {
String sql = sqlMatcher.group(1);
sql = resolveParameters(sql);
@@ -221,7 +221,7 @@ public class TrinoQueryIT extends TrinoQueryITBase {
String testResults = TrinoQueryITBase.readFileToString(resultFileName);
Matcher sqlMatcher =
- Pattern.compile("(\\w.*?);", Pattern.DOTALL |
Pattern.UNIX_LINES).matcher(sqls);
+ Pattern.compile("([<\\w].*?);", Pattern.DOTALL |
Pattern.UNIX_LINES).matcher(sqls);
Matcher resultMatcher =
Pattern.compile("((\".*?\")\\n{2,})|((\\S.*?)\\n{2,})", Pattern.DOTALL
| Pattern.UNIX_LINES)
.matcher(testResults);
diff --git
a/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryITBase.java
b/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryITBase.java
index c7f874339..6ea2a1570 100644
---
a/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryITBase.java
+++
b/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryITBase.java
@@ -231,7 +231,7 @@ public class TrinoQueryITBase {
});
metalake.disableCatalog(catalogName);
- metalake.dropCatalog(catalogName);
+ metalake.dropCatalog(catalogName, true);
LOG.info("Drop catalog \"{}.{}\"", metalakeName, catalogName);
}
diff --git
a/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryRunner.java
b/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryRunner.java
index 08a2a50ff..0e794e45a 100644
---
a/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryRunner.java
+++
b/trino-connector/integration-test/src/test/java/org/apache/gravitino/trino/connector/integration/test/TrinoQueryRunner.java
@@ -31,8 +31,10 @@ import java.net.URI;
import java.time.ZoneId;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
import jodd.io.StringOutputStream;
import okhttp3.logging.HttpLoggingInterceptor;
+import org.awaitility.Awaitility;
import org.jline.terminal.Terminal;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -65,6 +67,29 @@ class TrinoQueryRunner {
}
String runQuery(String query) {
+ String retryFlag = "<RETRY_WITH_NOT_EXISTS>";
+ if (!query.startsWith(retryFlag)) {
+ return runQueryOnce(query);
+ } else {
+ String finalQuery = query.replace(retryFlag, "");
+ AtomicReference<String> output = new AtomicReference<>("");
+ Awaitility.await()
+ .atMost(30, TimeUnit.SECONDS)
+ .pollInterval(1, TimeUnit.SECONDS)
+ .until(
+ () -> {
+ String result = runQueryOnce(finalQuery);
+ if (!result.contains("does not exist")) {
+ output.set(result);
+ return true;
+ }
+ return false;
+ });
+ return output.get();
+ }
+ }
+
+ String runQueryOnce(String query) {
Query queryResult = queryRunner.startQuery(query);
StringOutputStream outputStream = new StringOutputStream();
queryResult.renderOutput(
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00008_decimal.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00008_decimal.sql
index 7901ea41b..4d3fcb5d2 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00008_decimal.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00008_decimal.sql
@@ -2,56 +2,56 @@ CREATE SCHEMA gt_hive.gt_decimal_db1;
USE gt_hive.gt_decimal_db1;
-CREATE TABLE test_decimal_bounds (amount DECIMAL(10, 2));
+CREATE TABLE tb01 (amount DECIMAL(10, 2));
-INSERT INTO test_decimal_bounds VALUES (12345.67), (-9999999.99), (0.01);
+INSERT INTO tb01 VALUES (12345.67), (-9999999.99), (0.01);
-INSERT INTO test_decimal_bounds VALUES (123456789.00); -- Exceeds precision
+INSERT INTO tb01 VALUES (123456789.00); -- Exceeds precision
-SELECT * FROM test_decimal_bounds;
+SELECT * FROM tb01;
-CREATE TABLE test_decimal_aggregation (value DECIMAL(12, 3));
+CREATE TABLE tb02 (value DECIMAL(12, 3));
-INSERT INTO test_decimal_aggregation VALUES (1234.567), (8901.234), (567.890);
+INSERT INTO tb02 VALUES (1234.567), (8901.234), (567.890);
-SELECT SUM(value) FROM test_decimal_aggregation;
+SELECT SUM(value) FROM tb02;
-SELECT AVG(value) FROM test_decimal_aggregation;
+SELECT AVG(value) FROM tb02;
-CREATE TABLE test_decimal_arithmetic (val1 DECIMAL(5, 2), val2 DECIMAL(4, 1));
+CREATE TABLE tb03 (val1 DECIMAL(5, 2), val2 DECIMAL(4, 1));
-INSERT INTO test_decimal_arithmetic VALUES (123.45,10.1);
+INSERT INTO tb03 VALUES (123.45,10.1);
-SELECT val1 + val2 FROM test_decimal_arithmetic;
+SELECT val1 + val2 FROM tb03;
-SELECT val1 * val2 FROM test_decimal_arithmetic;
+SELECT val1 * val2 FROM tb03;
-SELECT val1 / val2 FROM test_decimal_arithmetic;
+SELECT val1 / val2 FROM tb03;
-CREATE TABLE test_decimal_max_min (max_min_val DECIMAL(18, 4));
+CREATE TABLE tb04 (max_min_val DECIMAL(18, 4));
-INSERT INTO test_decimal_max_min VALUES (99999999999999.9999);
+INSERT INTO tb04 VALUES (99999999999999.9999);
-INSERT INTO test_decimal_max_min VALUES (-99999999999999.9999);
+INSERT INTO tb04 VALUES (-99999999999999.9999);
-INSERT INTO test_decimal_max_min VALUES (100000000000000.0000); -- Exceeds max
+INSERT INTO tb04 VALUES (100000000000000.0000); -- Exceeds max
-SELECT * FROM test_decimal_max_min ORDER BY max_min_val;
+SELECT * FROM tb04 ORDER BY max_min_val;
-CREATE TABLE test_decimal_nulls (nullable_val DECIMAL(8, 2));
+CREATE TABLE tb05 (nullable_val DECIMAL(8, 2));
-INSERT INTO test_decimal_nulls VALUES (NULL), (123.45), (NULL);
+INSERT INTO tb05 VALUES (NULL), (123.45), (NULL);
-SELECT * FROM test_decimal_nulls;
+SELECT * FROM tb05;
-DROP TABLE gt_hive.gt_decimal_db1.test_decimal_bounds;
+DROP TABLE gt_hive.gt_decimal_db1.tb01;
-DROP TABLE gt_hive.gt_decimal_db1.test_decimal_aggregation;
+DROP TABLE gt_hive.gt_decimal_db1.tb02;
-DROP TABLE gt_hive.gt_decimal_db1.test_decimal_arithmetic;
+DROP TABLE gt_hive.gt_decimal_db1.tb03;
-DROP TABLE gt_hive.gt_decimal_db1.test_decimal_max_min;
+DROP TABLE gt_hive.gt_decimal_db1.tb04;
-DROP TABLE gt_hive.gt_decimal_db1.test_decimal_nulls;
+DROP TABLE gt_hive.gt_decimal_db1.tb05;
DROP SCHEMA gt_hive.gt_decimal_db1;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00009_array.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00009_array.sql
index 77a60ea2d..3852a9361 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00009_array.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00009_array.sql
@@ -2,64 +2,64 @@ CREATE SCHEMA gt_hive.gt_array_db1;
USE gt_hive.gt_array_db1;
-CREATE TABLE test_array_basic (int_array ARRAY(INTEGER));
+CREATE TABLE tb01 (int_array ARRAY(INTEGER));
-INSERT INTO test_array_basic VALUES (ARRAY[1, 2, 3]), (ARRAY[4, 5, NULL, 7]),
(ARRAY[]);
+INSERT INTO tb01 VALUES (ARRAY[1, 2, 3]), (ARRAY[4, 5, NULL, 7]), (ARRAY[]);
-SELECT * FROM test_array_basic;
+SELECT * FROM tb01;
-SELECT int_array, CARDINALITY(int_array) AS array_length FROM test_array_basic;
+SELECT int_array, CARDINALITY(int_array) AS array_length FROM tb01;
-CREATE TABLE test_array_access (elements ARRAY(VARCHAR));
+CREATE TABLE tb02 (elements ARRAY(VARCHAR));
-INSERT INTO test_array_access VALUES (ARRAY['apple', 'banana', 'cherry']);
+INSERT INTO tb02 VALUES (ARRAY['apple', 'banana', 'cherry']);
-SELECT elements[1] AS first_element, elements[2] AS second_element FROM
test_array_access;
+SELECT elements[1] AS first_element, elements[2] AS second_element FROM tb02;
-SELECT * FROM test_array_basic WHERE contains(int_array, 2);
+SELECT * FROM tb01 WHERE contains(int_array, 2);
-CREATE TABLE test_array_concat (array1 ARRAY(INTEGER), array2 ARRAY(INTEGER));
+CREATE TABLE tb03 (array1 ARRAY(INTEGER), array2 ARRAY(INTEGER));
-INSERT INTO test_array_concat VALUES (ARRAY[1, 2, 3], ARRAY[4, 5]);
+INSERT INTO tb03 VALUES (ARRAY[1, 2, 3], ARRAY[4, 5]);
-SELECT array1, array2, CONCAT(array1, array2) AS concatenated_array FROM
test_array_concat;
+SELECT array1, array2, CONCAT(array1, array2) AS concatenated_array FROM tb03;
-CREATE TABLE test_array_sort (unsorted_array ARRAY(INTEGER));
+CREATE TABLE tb04 (unsorted_array ARRAY(INTEGER));
-INSERT INTO test_array_sort VALUES (ARRAY[3, 1, 2]), (ARRAY[9, 7, 8]);
+INSERT INTO tb04 VALUES (ARRAY[3, 1, 2]), (ARRAY[9, 7, 8]);
-SELECT unsorted_array, array_sort(unsorted_array) AS sorted_array FROM
test_array_sort;
+SELECT unsorted_array, array_sort(unsorted_array) AS sorted_array FROM tb04;
-CREATE TABLE test_array_nulls (mixed_array ARRAY(INTEGER));
+CREATE TABLE tb05 (mixed_array ARRAY(INTEGER));
-INSERT INTO test_array_nulls VALUES (ARRAY[1, NULL, 3]), (ARRAY[NULL, NULL]);
+INSERT INTO tb05 VALUES (ARRAY[1, NULL, 3]), (ARRAY[NULL, NULL]);
-SELECT mixed_array, CARDINALITY(mixed_array) FROM test_array_nulls;
+SELECT mixed_array, CARDINALITY(mixed_array) FROM tb05;
-CREATE TABLE test_array_agg (val INTEGER);
+CREATE TABLE tb06 (val INTEGER);
-INSERT INTO test_array_agg VALUES (1), (2), (3), (4);
+INSERT INTO tb06 VALUES (1), (2), (3), (4);
-SELECT ARRAY_AGG(val) AS aggregated_array FROM test_array_agg;
+SELECT ARRAY_AGG(val) AS tb07 FROM tb06;
-CREATE TABLE test_nested_array (nested_array ARRAY(ARRAY(VARCHAR)));
+CREATE TABLE tb08 (nested_array ARRAY(ARRAY(VARCHAR)));
-INSERT INTO test_nested_array VALUES (ARRAY[ARRAY['a', 'b'], ARRAY['c', 'd']]);
+INSERT INTO tb08 VALUES (ARRAY[ARRAY['a', 'b'], ARRAY['c', 'd']]);
-SELECT nested_array FROM test_nested_array;
+SELECT nested_array FROM tb08;
-DROP TABLE gt_hive.gt_array_db1.test_array_basic;
+DROP TABLE gt_hive.gt_array_db1.tb01;
-DROP TABLE gt_hive.gt_array_db1.test_array_access;
+DROP TABLE gt_hive.gt_array_db1.tb02;
-DROP TABLE gt_hive.gt_array_db1.test_array_concat;
+DROP TABLE gt_hive.gt_array_db1.tb03;
-DROP TABLE gt_hive.gt_array_db1.test_array_sort;
+DROP TABLE gt_hive.gt_array_db1.tb04;
-DROP TABLE gt_hive.gt_array_db1.test_array_nulls;
+DROP TABLE gt_hive.gt_array_db1.tb05;
-DROP TABLE gt_hive.gt_array_db1.test_array_agg;
+DROP TABLE gt_hive.gt_array_db1.tb06;
-DROP TABLE gt_hive.gt_array_db1.test_nested_array;
+DROP TABLE gt_hive.gt_array_db1.tb08;
DROP SCHEMA gt_hive.gt_array_db1;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00010_map.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00010_map.sql
index bbd89b92b..a4d2cf511 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00010_map.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00010_map.sql
@@ -2,48 +2,48 @@ CREATE SCHEMA gt_hive.gt_map_db1;
USE gt_hive.gt_map_db1;
-CREATE TABLE test_map_nulls (string_map MAP(VARCHAR, VARCHAR));
+CREATE TABLE tb01 (string_map MAP(VARCHAR, VARCHAR));
-INSERT INTO test_map_nulls VALUES (MAP(ARRAY['key1'], ARRAY[NULL]));
+INSERT INTO tb01 VALUES (MAP(ARRAY['key1'], ARRAY[NULL]));
-INSERT INTO test_map_nulls VALUES (MAP(ARRAY[NULL], ARRAY['value1']));
+INSERT INTO tb01 VALUES (MAP(ARRAY[NULL], ARRAY['value1']));
-SELECT * FROM test_map_nulls;
+SELECT * FROM tb01;
-INSERT INTO test_map_nulls VALUES (MAP(ARRAY[], ARRAY[]));
+INSERT INTO tb01 VALUES (MAP(ARRAY[], ARRAY[]));
-SELECT * FROM test_map_nulls ORDER BY cardinality(string_map);
+SELECT * FROM tb01 ORDER BY cardinality(string_map);
-INSERT INTO test_map_nulls VALUES (MAP(ARRAY['dup', 'dup'], ARRAY['value1',
'value2']));
+INSERT INTO tb01 VALUES (MAP(ARRAY['dup', 'dup'], ARRAY['value1', 'value2']));
-CREATE TABLE test_map_types (int_decimal_map MAP(INTEGER, DECIMAL(10, 2)));
+CREATE TABLE tb02 (int_decimal_map MAP(INTEGER, DECIMAL(10, 2)));
-INSERT INTO test_map_types VALUES (MAP(ARRAY[1, 2147483647], ARRAY[12345.67,
99999.99]));
+INSERT INTO tb02 VALUES (MAP(ARRAY[1, 2147483647], ARRAY[12345.67, 99999.99]));
-SELECT * FROM test_map_types;
+SELECT * FROM tb02;
-INSERT INTO test_map_nulls VALUES (MAP(ARRAY['k1', 'k2', 'k3'], ARRAY['v1',
'v2', 'v3']));
+INSERT INTO tb01 VALUES (MAP(ARRAY['k1', 'k2', 'k3'], ARRAY['v1', 'v2',
'v3']));
-SELECT element_at(string_map, 'k1') AS key1_value, element_at(string_map,
'k3') AS key3_value FROM test_map_nulls ORDER BY key1_value;
+SELECT element_at(string_map, 'k1') AS key1_value, element_at(string_map,
'k3') AS key3_value FROM tb01 ORDER BY key1_value;
-CREATE TABLE test_map_complex (map_of_arrays MAP(VARCHAR, ARRAY(INTEGER)));
+CREATE TABLE tb03 (map_of_arrays MAP(VARCHAR, ARRAY(INTEGER)));
-INSERT INTO test_map_complex VALUES (MAP(ARRAY['a', 'b'], ARRAY[ARRAY[1, 2],
ARRAY[3, 4, 5]]));
+INSERT INTO tb03 VALUES (MAP(ARRAY['a', 'b'], ARRAY[ARRAY[1, 2], ARRAY[3, 4,
5]]));
-SELECT * FROM test_map_complex;
+SELECT * FROM tb03;
-CREATE TABLE test_map_aggregation (map_data MAP(VARCHAR, INTEGER));
+CREATE TABLE tb04 (map_data MAP(VARCHAR, INTEGER));
-INSERT INTO test_map_aggregation VALUES (MAP(ARRAY['a', 'b'], ARRAY[1, 2])),
(MAP(ARRAY['a', 'b'], ARRAY[3, 4]));
+INSERT INTO tb04 VALUES (MAP(ARRAY['a', 'b'], ARRAY[1, 2])), (MAP(ARRAY['a',
'b'], ARRAY[3, 4]));
-SELECT map_data['a'] AS key_a, SUM(map_data['b']) AS sum_b FROM
test_map_aggregation GROUP BY map_data['a'] ORDER BY key_a;
+SELECT map_data['a'] AS key_a, SUM(map_data['b']) AS sum_b FROM tb04 GROUP BY
map_data['a'] ORDER BY key_a;
-DROP TABLE gt_hive.gt_map_db1.test_map_nulls;
+DROP TABLE gt_hive.gt_map_db1.tb01;
-DROP TABLE gt_hive.gt_map_db1.test_map_types;
+DROP TABLE gt_hive.gt_map_db1.tb02;
-DROP TABLE gt_hive.gt_map_db1.test_map_complex;
+DROP TABLE gt_hive.gt_map_db1.tb03;
-DROP TABLE gt_hive.gt_map_db1.test_map_aggregation;
+DROP TABLE gt_hive.gt_map_db1.tb04;
DROP SCHEMA gt_hive.gt_map_db1;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00011_row.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00011_row.sql
index 14c49edae..1c1c4f3c5 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00011_row.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00011_row.sql
@@ -2,23 +2,23 @@ CREATE SCHEMA gt_hive.gt_row_db1;
USE gt_hive.gt_row_db1;
-CREATE TABLE test_row_basic (person ROW(id INTEGER, name VARCHAR));
+CREATE TABLE tb01 (person ROW(id INTEGER, name VARCHAR));
CREATE TABLE source_tb1 (id INTEGER, name VARCHAR);
INSERT INTO source_tb1 VALUES (1, 'Alice'), (2, NULL);
-INSERT INTO test_row_basic SELECT ROW(id, name) FROM source_tb1;
+INSERT INTO tb01 SELECT ROW(id, name) FROM source_tb1;
-SELECT * FROM test_row_basic ORDER BY person.id;
+SELECT * FROM tb01 ORDER BY person.id;
INSERT INTO source_tb1 VALUES (3, 'Bob');
-INSERT INTO test_row_basic SELECT ROW(id, name) FROM source_tb1;
+INSERT INTO tb01 SELECT ROW(id, name) FROM source_tb1;
-SELECT person.id AS person_id, person.name AS person_name FROM test_row_basic
ORDER BY person.id;
+SELECT person.id AS person_id, person.name AS person_name FROM tb01 ORDER BY
person.id;
-CREATE TABLE test_nested_row (
+CREATE TABLE tb02 (
person ROW(id INTEGER, name VARCHAR, address ROW(street VARCHAR, city
VARCHAR))
);
@@ -26,11 +26,11 @@ CREATE TABLE source_tb2 (id INTEGER, name VARCHAR, street
VARCHAR, city VARCHAR)
INSERT INTO source_tb2 VALUES (1, 'Alice', '123 Elm St', 'Springfield');
-INSERT INTO test_nested_row SELECT ROW(id, name, ROW(street, city)) FROM
source_tb2;
+INSERT INTO tb02 SELECT ROW(id, name, ROW(street, city)) FROM source_tb2;
-SELECT person.address.city AS city FROM test_nested_row;
+SELECT person.address.city AS city FROM tb02;
-CREATE TABLE test_mixed_row (
+CREATE TABLE tb03 (
data ROW(int_val INTEGER, str_val VARCHAR, arr_val ARRAY(INTEGER), map_val
MAP(VARCHAR, INTEGER))
);
@@ -38,17 +38,17 @@ CREATE TABLE source_tb3 (int_val INTEGER, str_val VARCHAR,
arr_val ARRAY(INTEGER
INSERT INTO source_tb3 VALUES (100, 'text', ARRAY[1, 2, 3], MAP(ARRAY['a',
'b'], ARRAY[10, 20]));
-INSERT INTO test_mixed_row SELECT ROW(int_val, str_val, arr_val, map_val) FROM
source_tb3;
+INSERT INTO tb03 SELECT ROW(int_val, str_val, arr_val, map_val) FROM
source_tb3;
-SELECT * FROM test_mixed_row;
+SELECT * FROM tb03;
INSERT INTO source_tb1 VALUES (NULL, NULL);
-INSERT INTO test_row_basic SELECT ROW(id, name) FROM source_tb1;
+INSERT INTO tb01 SELECT ROW(id, name) FROM source_tb1;
-SELECT * FROM test_row_basic ORDER BY person.id;
+SELECT * FROM tb01 ORDER BY person.id;
-CREATE TABLE test_row_in_array_map (
+CREATE TABLE tb04 (
row_array ARRAY(ROW(id INTEGER, name VARCHAR)),
row_map MAP(VARCHAR, ROW(age INTEGER, city VARCHAR))
);
@@ -57,9 +57,9 @@ CREATE TABLE source_tb5 (id INTEGER, name VARCHAR, age
INTEGER, city VARCHAR);
INSERT INTO source_tb5 VALUES (1, 'Alice', 30, 'NY'), (2, 'Bob', 40, 'LA');
-INSERT INTO test_row_in_array_map SELECT ARRAY[ROW(id, name)],
MAP(ARRAY['person1'], ARRAY[ROW(age, city)]) FROM source_tb5;
+INSERT INTO tb04 SELECT ARRAY[ROW(id, name)], MAP(ARRAY['person1'],
ARRAY[ROW(age, city)]) FROM source_tb5;
-INSERT INTO test_row_in_array_map
+INSERT INTO tb04
SELECT ARRAY_AGG(ROW(id, name)), MAP(ARRAY_AGG(person_key), ARRAY_AGG(ROW(age,
city)))
FROM (
SELECT id, name, age, city, CONCAT('person', CAST(ROW_NUMBER() OVER() AS
VARCHAR)) AS person_key
@@ -68,23 +68,23 @@ FROM (
INSERT INTO source_tb1 VALUES (1, 'Alice'), (1, 'Alice'), (2, 'Bob');
-INSERT INTO test_row_basic SELECT ROW(id, name) FROM source_tb1;
+INSERT INTO tb01 SELECT ROW(id, name) FROM source_tb1;
-SELECT person.id, COUNT(*) FROM test_row_basic GROUP BY person.id ORDER BY
person.id;
+SELECT person.id, COUNT(*) FROM tb01 GROUP BY person.id ORDER BY person.id;
-DROP TABLE gt_hive.gt_row_db1.test_row_basic;
+DROP TABLE gt_hive.gt_row_db1.tb01;
DROP TABLE gt_hive.gt_row_db1.source_tb1;
-DROP TABLE gt_hive.gt_row_db1.test_nested_row;
+DROP TABLE gt_hive.gt_row_db1.tb02;
-DROP TABLE gt_hive.gt_row_db1.test_mixed_row;
+DROP TABLE gt_hive.gt_row_db1.tb03;
DROP TABLE gt_hive.gt_row_db1.source_tb2;
DROP TABLE gt_hive.gt_row_db1.source_tb3;
-DROP TABLE gt_hive.gt_row_db1.test_row_in_array_map;
+DROP TABLE gt_hive.gt_row_db1.tb04;
DROP TABLE gt_hive.gt_row_db1.source_tb5;
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00014_partition_sort_order.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00014_partition_sort_order.sql
new file mode 100644
index 000000000..7baee6142
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00014_partition_sort_order.sql
@@ -0,0 +1,67 @@
+CREATE SCHEMA gt_hive.gt_pbs_db1;
+
+USE gt_hive.gt_pbs_db1;
+
+CREATE TABLE nation (
+ nationkey bigint,
+ name varchar(25),
+ regionkey bigint,
+ comment varchar(152)
+);
+
+insert into nation select * from tpch.tiny.nation;
+
+CREATE TABLE tb01 (
+ n_nationkey bigint,
+ n_name varchar,
+ n_regionkey bigint,
+ n_comment varchar,
+ part_key varchar
+) WITH (bucket_count = 4, bucketed_by = ARRAY['n_regionkey'], partitioned_by =
ARRAY['part_key']);
+
+INSERT INTO tb01 SELECT nationkey, name, regionkey, comment, name as part_key
FROM nation;
+INSERT INTO tb01 SELECT nationkey, name, regionkey, comment, name as part_key
FROM nation;
+
+CREATE TABLE tb02 (
+ n_nationkey bigint,
+ n_name varchar,
+ n_regionkey bigint,
+ n_comment varchar
+) WITH (bucket_count = 10, bucketed_by = ARRAY['n_regionkey']);
+
+INSERT INTO tb02 SELECT * FROM nation;
+
+
+CREATE TABLE tb03 (
+ n_nationkey bigint,
+ n_name varchar,
+ n_regionkey bigint,
+ n_comment varchar
+) WITH (bucket_count = 2, bucketed_by = ARRAY['n_regionkey'],sorted_by =
ARRAY['n_regionkey']);
+
+INSERT INTO tb03 SELECT * FROM nation;
+
+
+CREATE TABLE tb04 (
+ n_nationkey bigint,
+ n_name varchar,
+ n_regionkey bigint,
+ n_comment varchar,
+ part_key1 varchar,
+ part_key2 bigint
+) WITH (partitioned_by = ARRAY['part_key1','part_key2']);
+
+INSERT INTO tb04 SELECT nationkey, name, regionkey, comment, name as
part_key1,regionkey as part_key2 FROM nation;
+INSERT INTO tb04 SELECT nationkey, name, regionkey, comment, name as
part_key1,regionkey as part_key2 FROM nation;
+
+SELECT count(*) FROM tb01 WHERE n_regionkey=0;
+
+SELECT count(*) FROM tb01 WHERE part_key='ALGERIA';
+
+SELECT count(*) FROM tb01 WHERE n_regionkey=0 AND part_key='ALGERIA';
+
+SELECT count(*) FROM tb02 WHERE n_regionkey=0;
+
+SELECT count(*) FROM tb03;
+
+select count(*) from tb04;
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00014_partition_sort_order.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00014_partition_sort_order.txt
new file mode 100644
index 000000000..b16188aca
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00014_partition_sort_order.txt
@@ -0,0 +1,39 @@
+CREATE SCHEMA
+
+USE
+
+CREATE TABLE
+
+INSERT: 25 rows
+
+CREATE TABLE
+
+INSERT: 25 rows
+
+INSERT: 25 rows
+
+CREATE TABLE
+
+INSERT: 25 rows
+
+CREATE TABLE
+
+INSERT: 25 rows
+
+CREATE TABLE
+
+INSERT: 25 rows
+
+INSERT: 25 rows
+
+"10"
+
+"2"
+
+"2"
+
+"5"
+
+"25"
+
+"50"
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/catalog_hive_cleanup.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/catalog_hive_cleanup.sql
index 95e005ac8..a5fbf61e9 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/catalog_hive_cleanup.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/catalog_hive_cleanup.sql
@@ -1 +1 @@
-CALL gravitino.system.drop_catalog('gt_hive');
+CALL gravitino.system.drop_catalog('gt_hive');
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.sql
index e3804cde4..306369689 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.sql
@@ -38,6 +38,13 @@ CREATE TABLE IF NOT EXISTS gt_mysql.gt_db1.tb02 (
SHOW tables FROM gt_mysql.gt_db1 like 'tb02';
+CREATE TABLE gt_mysql.gt_db1.tb03 (
+ name varchar(200),
+ salary int
+) COMMENT '' WITH (engine = 'MyISAM');
+
+SHOW CREATE TABLE gt_mysql.gt_db1.tb03;
+
DROP TABLE gt_mysql.gt_db1.tb01;
SHOW tables FROM gt_mysql.gt_db1 like 'tb01';
@@ -48,6 +55,8 @@ DROP TABLE IF EXISTS gt_mysql.gt_db1.tb01;
DROP TABLE IF EXISTS gt_mysql.gt_db1.tb02;
+DROP TABLE IF EXISTS gt_mysql.gt_db1.tb03;
+
SHOW tables FROM gt_mysql.gt_db1 like 'tb02';
DROP SCHEMA gt_mysql.gt_db1;
@@ -58,4 +67,4 @@ DROP SCHEMA IF EXISTS gt_mysql.gt_db1;
DROP SCHEMA IF EXISTS gt_mysql.gt_db2;
-SHOW SCHEMAS FROM gt_mysql like 'gt_db2'
+SHOW SCHEMAS FROM gt_mysql like 'gt_db2';
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.txt
index 43da1446f..650779805 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.txt
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.txt
@@ -33,6 +33,18 @@ CREATE TABLE
"tb02"
+CREATE TABLE
+
+"CREATE TABLE gt_mysql.gt_db1.tb03 (
+ name varchar(200),
+ salary integer
+)
+COMMENT ''
+WITH (
+ auto_increment_offset = '1',
+ engine = 'MyISAM'
+)"
+
DROP TABLE
<BLANK_LINE>
@@ -43,6 +55,8 @@ DROP TABLE
DROP TABLE
+DROP TABLE
+
<BLANK_LINE>
DROP SCHEMA
@@ -53,4 +67,4 @@ DROP SCHEMA
DROP SCHEMA
-<BLANK_LINE>
+<BLANK_LINE>
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.sql
index 9705111cd..78abc73ab 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.sql
@@ -3,8 +3,26 @@ CREATE SCHEMA gt_postgresql.gt_db1;
CREATE TABLE gt_postgresql.gt_db1.tb01 (
name varchar,
salary int
+) COMMENT 'OKK';
+
+SHOW CREATE TABLE gt_postgresql.gt_db1.tb01;
+
+CREATE TABLE IF NOT EXISTS gt_postgresql.gt_db1.tb01 (
+ name varchar,
+ salary int
);
-drop table gt_postgresql.gt_db1.tb01;
+SHOW SCHEMAS FROM gt_postgresql like 'gt_db1';
+
+SHOW CREATE SCHEMA gt_postgresql.gt_db1;
+
+CREATE SCHEMA IF NOT EXISTS gt_postgresql.gt_db1;
+
+SHOW TABLES FROM gt_postgresql.gt_db1 like 'tb01';
+
+DROP TABLE IF EXISTS gt_postgresql.gt_db1.tb01;
+
+DROP SCHEMA gt_postgresql.gt_db1;
+
+DROP SCHEMA IF EXISTS gt_postgresql.gt_db1;
-drop schema gt_postgresql.gt_db1;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.txt
index 2862f23f2..e0a2a9c19 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.txt
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.txt
@@ -2,6 +2,24 @@ CREATE SCHEMA
CREATE TABLE
+"CREATE TABLE gt_postgresql.gt_db1.tb01 (
+ name varchar,
+ salary integer
+)
+COMMENT 'OKK'"
+
+CREATE TABLE
+
+"gt_db1"
+
+"CREATE SCHEMA gt_postgresql.gt_db1"
+
+CREATE SCHEMA
+
+"tb01"
+
DROP TABLE
DROP SCHEMA
+
+DROP SCHEMA
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.sql
index b084c6568..444f481e2 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.sql
@@ -24,4 +24,6 @@ drop table gt_postgresql.gt_db1.tb02;
drop table gt_postgresql.gt_db1.tb01;
+drop table IF EXISTS gt_postgresql.gt_db1.tb01;
+
drop schema gt_postgresql.gt_db1;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.txt
index f5aa32125..afc52e26f 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.txt
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.txt
@@ -26,6 +26,8 @@ DROP TABLE
DROP TABLE
+DROP TABLE
+
DROP SCHEMA
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.sql
index da78532d1..e92f281b2 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.sql
@@ -30,6 +30,44 @@ show create table gt_postgresql.gt_db1.tb01;
alter table gt_postgresql.gt_db1.tb01 add column city varchar comment 'aaa';
show create table gt_postgresql.gt_db1.tb01;
+SHOW COLUMNS FROM gt_postgresql.gt_db1.tb01;
+
+SHOW COLUMNS FROM gt_postgresql.gt_db1.tb01 LIKE 's%';
+
+ALTER TABLE IF EXISTS gt_postgresql.gt_db1.tb01 DROP COLUMN IF EXISTS
created_at;
+
+ALTER TABLE IF EXISTS gt_postgresql.gt_db1.tb01 RENAME COLUMN IF EXISTS
available TO available_test;
+
+CREATE TABLE gt_postgresql.gt_db1.tb02 (
+ id INT,
+ name VARCHAR(50)
+);
+
+INSERT INTO gt_postgresql.gt_db1.tb02 (id, name) VALUES (1, NULL);
+
+-- ALTER TABLE gt_postgresql.gt_db1.tb02 ADD COLUMN gender boolean NOT NULL;
+
+CREATE TABLE gt_postgresql.gt_db1.tb03 (
+ id INT,
+ name VARCHAR(50)
+);
+
+COMMENT ON COLUMN gt_postgresql.gt_db1.tb03.id is 'this is id';
+
+ALTER TABLE gt_postgresql.gt_db1.tb03 ADD COLUMN gender boolean NOT NULL;
+
+INSERT INTO gt_postgresql.gt_db1.tb03 (id, name, gender) VALUES (1, NULL,
true);
+
+SELECT * FROM gt_postgresql.gt_db1.tb03;
+
+-- COMMENT ON COLUMN gt_postgresql.gt_db1.tb03.name is '';
+
+SHOW CREATE TABLE gt_postgresql.gt_db1.tb03;
+
+DROP TABLE gt_postgresql.gt_db1.tb03;
+
+DROP TABLE gt_postgresql.gt_db1.tb02;
+
drop table gt_postgresql.gt_db1.tb01;
drop schema gt_postgresql.gt_db1;
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.txt
index e34adddb6..6a47eea10 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.txt
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.txt
@@ -59,8 +59,42 @@ ADD COLUMN
)
COMMENT 'test table comments'"
+"s","varchar","","test column comments"
+"salary","bigint","",""
+"city","varchar","","aaa"
+
+"s","varchar","","test column comments"
+"salary","bigint","",""
+
+DROP COLUMN
+
+RENAME COLUMN
+
+CREATE TABLE
+
+INSERT: 1 row
+
+CREATE TABLE
+
+COMMENT
+
+ADD COLUMN
+
+INSERT: 1 row
+
+"1","","true"
+
+"CREATE TABLE gt_postgresql.gt_db1.tb03 (
+ id integer COMMENT 'this is id',
+ name varchar(50),
+ gender boolean NOT NULL
+)
+COMMENT ''"
+
DROP TABLE
-DROP SCHEMA
+DROP TABLE
+DROP TABLE
+DROP SCHEMA
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.sql
index a97224d2e..5d08cda40 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.sql
@@ -21,7 +21,6 @@ CREATE TABLE tb01 (
f16 TIMESTAMP WITH TIME ZONE
);
-
SHOW CREATE TABLE tb01;
INSERT INTO tb01 (f1, f2, f3, f4, f5, f6, f7, f9, f10, f11, f12, f13, f14,
f15, f16)
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.sql
index dc545884b..7a9bbb608 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.sql
@@ -63,6 +63,13 @@ CREATE TABLE gt_db2.tb06 (
show create table gt_db2.tb06;
+CREATE TABLE IF NOT EXISTS gt_db2.tb05 (
+ name varchar,
+ salary int
+) with (
+ partitioning = ARRAY['name']
+);
+
drop table gt_db2.tb01;
drop table gt_db2.tb02;
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.txt
index 6656e9660..57a6313cc 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.txt
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.txt
@@ -74,6 +74,8 @@ WITH (
location = 'hdfs://%/user/iceberg/warehouse/TrinoQueryIT/%/gt_db2/tb06'
)"
+CREATE TABLE
+
DROP TABLE
DROP TABLE
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/00001_datatype.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/00001_datatype.sql
new file mode 100644
index 000000000..50d32ba38
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/00001_datatype.sql
@@ -0,0 +1,65 @@
+CREATE TABLE gt_hive1_1.gt_datatype.tb02 (name char(255));
+
+INSERT INTO gt_hive1_1.gt_datatype.tb02 (name) VALUES ('Apache Gravitino is a
high-performance, geo-distributed, and federated metadata lake. It manages
metadata directly in different sources, types, and regions, providing users
with unified metadata access for data and AI assets.');
+
+SELECT * FROM gt_hive1.gt_datatype.tb02;
+
+SELECT * FROM gt_hive1.gt_datatype.tb03;
+
+CREATE TABLE gt_hive1_1.gt_datatype.tb04 (name varchar);
+INSERT INTO gt_hive1_1.gt_datatype.tb04 VALUES ('test abc');
+
+SELECT * FROM gt_hive1.gt_datatype.tb04;
+
+CREATE TABLE gt_hive1_1.gt_datatype.test_decimal_bounds (amount DECIMAL(10,
2));
+INSERT INTO gt_hive1_1.gt_datatype.test_decimal_bounds VALUES (12345.67),
(-9999999.99), (0.01);
+
+SELECT * FROM gt_hive1.gt_datatype.test_decimal_bounds;
+
+
+CREATE TABLE gt_hive1_1.gt_datatype.test_decimal_aggregation (value
DECIMAL(12, 3));
+
+INSERT INTO gt_hive1_1.gt_datatype.test_decimal_aggregation VALUES (1234.567),
(8901.234), (567.890);
+
+SELECT SUM(value) FROM gt_hive1.gt_datatype.test_decimal_aggregation;
+
+SELECT AVG(value) FROM gt_hive1.gt_datatype.test_decimal_aggregation;
+
+
+CREATE TABLE gt_hive1_1.gt_datatype.test_decimal_arithmetic (val1 DECIMAL(5,
2), val2 DECIMAL(4, 1));
+
+INSERT INTO gt_hive1_1.gt_datatype.test_decimal_arithmetic VALUES
(123.45,10.1);
+
+SELECT val1 + val2 FROM gt_hive1.gt_datatype.test_decimal_arithmetic;
+
+SELECT val1 * val2 FROM gt_hive1.gt_datatype.test_decimal_arithmetic;
+
+SELECT val1 / val2 FROM gt_hive1.gt_datatype.test_decimal_arithmetic;
+
+
+CREATE TABLE gt_hive1_1.gt_datatype.test_decimal_max_min (max_min_val
DECIMAL(18, 4));
+
+INSERT INTO gt_hive1_1.gt_datatype.test_decimal_max_min VALUES
(99999999999999.9999);
+
+INSERT INTO gt_hive1_1.gt_datatype.test_decimal_max_min VALUES
(-99999999999999.9999);
+
+SELECT * FROM gt_hive1.gt_datatype.test_decimal_max_min ORDER BY max_min_val;
+
+
+CREATE TABLE gt_hive1_1.gt_datatype.test_decimal_nulls (nullable_val
DECIMAL(8, 2));
+
+INSERT INTO gt_hive1_1.gt_datatype.test_decimal_nulls VALUES (NULL), (123.45),
(NULL);
+
+SELECT * FROM gt_hive1.gt_datatype.test_decimal_nulls;
+
+USE gt_hive1.gt_datatype;
+
+SHOW CREATE SCHEMA gt_hive1.gt_datatype;
+
+SHOW SCHEMAS LIKE 'gt_data%';
+
+SHOW TABLES LIKE '%decimal_bounds';
+
+SHOW COLUMNS FROM gt_hive1.gt_datatype.tb04;
+
+
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/00001_datatype.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/00001_datatype.txt
new file mode 100644
index 000000000..5582890cd
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/00001_datatype.txt
@@ -0,0 +1,66 @@
+CREATE TABLE
+
+INSERT: 1 row
+
+"Apache Gravitino is a high-performance, geo-distributed, and federated
metadata lake. It manages metadata directly in different sources, types, and
regions, providing users with unified metadata access for data and AI assets.
"
+
+"a"
+
+CREATE TABLE
+
+INSERT: 1 row
+
+"test abc"
+
+CREATE TABLE
+
+INSERT: 3 rows
+
+"12345.67"
+"-9999999.99"
+"0.01"
+
+CREATE TABLE
+
+INSERT: 3 rows
+
+"10703.691"
+
+"3567.897"
+
+CREATE TABLE
+
+INSERT: 1 row
+
+"133.55"
+
+"1246.845"
+
+"12.22"
+
+CREATE TABLE
+
+INSERT: 1 row
+
+INSERT: 1 row
+
+"-99999999999999.9999"
+"99999999999999.9999"
+
+CREATE TABLE
+
+INSERT: 3 rows
+
+""
+"123.45"
+""
+
+USE
+
+"CREATE SCHEMA gt_hive1.gt_datatype"
+
+"gt_datatype"
+
+"test_decimal_bounds"
+
+"name","varchar","",""
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/catalog_hive_cleanup.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/catalog_hive_cleanup.sql
new file mode 100644
index 000000000..e69de29bb
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/catalog_hive_prepare.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/catalog_hive_prepare.sql
new file mode 100644
index 000000000..67280efe7
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/catalog_hive_prepare.sql
@@ -0,0 +1,23 @@
+call gravitino.system.create_catalog(
+ 'gt_hive1',
+ 'hive',
+ MAP(
+ ARRAY['metastore.uris', 'cloud.region-code',
'cloud.trino.connection-url', 'cloud.trino.connection-user',
'cloud.trino.connection-password'],
+ ARRAY['${hive_uri}', 'c2', '${trino_remote_jdbc_uri}', 'admin', '']
+ )
+);
+
+call gravitino.system.create_catalog(
+ 'gt_hive1_1',
+ 'hive',
+ map(
+ array['metastore.uris'],
+ array['${hive_uri}']
+ )
+);
+
+CREATE SCHEMA gt_hive1_1.gt_datatype;
+CREATE TABLE gt_hive1_1.gt_datatype.tb03 (name char);
+INSERT INTO gt_hive1_1.gt_datatype.tb03 VALUES ('a');
+
+<RETRY_WITH_NOT_EXISTS> SELECT * FROM gt_hive1.gt_datatype.tb03;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00009_array.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/ignored/00001_datatype.sql
similarity index 52%
copy from
trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00009_array.sql
copy to
trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/ignored/00001_datatype.sql
index 77a60ea2d..95b15f555 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00009_array.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/ignored/00001_datatype.sql
@@ -1,23 +1,45 @@
-CREATE SCHEMA gt_hive.gt_array_db1;
-
-USE gt_hive.gt_array_db1;
+CREATE TABLE tb01 (
+ f1 VARCHAR(200),
+ f2 CHAR(20),
+ f3 VARBINARY,
+ f4 DECIMAL(10, 3),
+ f5 REAL,
+ f6 DOUBLE,
+ f7 BOOLEAN,
+ f8 TINYINT,
+ f9 SMALLINT,
+ f10 INT,
+ f11 INTEGER,
+ f12 BIGINT,
+ f13 DATE,
+ f15 TIMESTAMP
+);
+
+INSERT INTO tb01 (f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13, f15)
+VALUES ('Sample text 1', 'Text1', x'65', 123.456, 7.89, 12.34, false, 1, 100,
1000, 1000, 100000, DATE '2024-01-01', TIMESTAMP '2024-01-01 08:00:00');
+
+INSERT INTO tb01 (f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13, f15)
+VALUES (NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
NULL, NULL, NULL);
+
+SELECT * FROM tb01 order by f1;
+
+SHOW CREATE TABLE tb01;
CREATE TABLE test_array_basic (int_array ARRAY(INTEGER));
-
INSERT INTO test_array_basic VALUES (ARRAY[1, 2, 3]), (ARRAY[4, 5, NULL, 7]),
(ARRAY[]);
SELECT * FROM test_array_basic;
-SELECT int_array, CARDINALITY(int_array) AS array_length FROM test_array_basic;
-
CREATE TABLE test_array_access (elements ARRAY(VARCHAR));
-
INSERT INTO test_array_access VALUES (ARRAY['apple', 'banana', 'cherry']);
+SELECT int_array, CARDINALITY(int_array) AS array_length FROM test_array_basic;
+
SELECT elements[1] AS first_element, elements[2] AS second_element FROM
test_array_access;
SELECT * FROM test_array_basic WHERE contains(int_array, 2);
+
CREATE TABLE test_array_concat (array1 ARRAY(INTEGER), array2 ARRAY(INTEGER));
INSERT INTO test_array_concat VALUES (ARRAY[1, 2, 3], ARRAY[4, 5]);
@@ -48,18 +70,27 @@ INSERT INTO test_nested_array VALUES (ARRAY[ARRAY['a',
'b'], ARRAY['c', 'd']]);
SELECT nested_array FROM test_nested_array;
-DROP TABLE gt_hive.gt_array_db1.test_array_basic;
-
-DROP TABLE gt_hive.gt_array_db1.test_array_access;
-
-DROP TABLE gt_hive.gt_array_db1.test_array_concat;
-
-DROP TABLE gt_hive.gt_array_db1.test_array_sort;
-
-DROP TABLE gt_hive.gt_array_db1.test_array_nulls;
-
-DROP TABLE gt_hive.gt_array_db1.test_array_agg;
-
-DROP TABLE gt_hive.gt_array_db1.test_nested_array;
-
-DROP SCHEMA gt_hive.gt_array_db1;
\ No newline at end of file
+CREATE SCHEMA gt_hive1_1.gt_format_db1;
+USE gt_hive1_1.gt_format_db1;
+
+CREATE TABLE storage_formats_orc (
+c_boolean boolean,
+c_tinyint tinyint,
+c_smallint smallint,
+c_int integer,
+c_bigint bigint,
+c_real real,
+c_double double,
+c_decimal_10_0 decimal(10,0),
+c_decimal_10_2 decimal(10,2),
+c_decimal_38_5 decimal(38,5),
+c_char char(10),
+c_varchar varchar(10),
+c_string varchar,
+c_binary varbinary,
+c_date date,
+c_timestamp timestamp
+) WITH (format='ORC');
+
+INSERT INTO storage_formats_orc
+VALUES
(true,127,32767,2147483647,9223372036854775807,123.345,234.567,346,12345678.91,1234567890123456789012.34567,'ala
ma ','ala ma kot','ala ma kota',X'62696e61727920636f6e74656e74',DATE
'2024-11-11',TIMESTAMP '2024-11-11 12:15:35.123');
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/ignored/00001_datatype.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/ignored/00001_datatype.txt
new file mode 100644
index 000000000..e903b1397
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/hive/ignored/00001_datatype.txt
@@ -0,0 +1,86 @@
+CREATE TABLE
+
+INSERT: 1 row
+
+INSERT: 1 row
+
+"Sample text 1","Text1
","65","123.456","7.89","12.34","false","1","100","1000","1000","100000","2024-01-01","2024-01-01
08:00:00.000"
+"","","","","","","","","","","","","",""
+
+"CREATE TABLE gt_hive.gt_db1.tb01 (
+ f1 varchar(200),
+ f2 char(20),
+ f3 varbinary,
+ f4 decimal(10, 3),
+ f5 real,
+ f6 double,
+ f7 boolean,
+ f8 tinyint,
+ f9 smallint,
+ f10 integer,
+ f11 integer,
+ f12 bigint,
+ f13 date,
+ f15 timestamp(3)
+)
+COMMENT ''
+WITH (
+ input_format = 'org.apache.hadoop.mapred.TextInputFormat',
+ location = 'hdfs://%/user/hive/warehouse/gt_db1.db/tb01',
+ output_format =
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',
+ serde_lib = 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe',
+ serde_name = 'tb01',
+ table_type = 'MANAGED_TABLE'
+)"
+
+CREATE TABLE
+
+INSERT: 3 rows
+
+"[1, 2, 3]"
+"[4, 5, NULL, 7]"
+"[]"
+
+CREATE TABLE
+
+INSERT: 1 row
+
+"[1, 2, 3]","3"
+"[4, 5, NULL, 7]","4"
+"[]","0"
+
+"apple","banana"
+
+"[1, 2, 3]"
+
+CREATE TABLE
+
+INSERT: 1 row
+
+"[1, 2, 3]","[4, 5]","[1, 2, 3, 4, 5]"
+
+CREATE TABLE
+
+INSERT: 2 rows
+
+"[3, 1, 2]","[1, 2, 3]"
+"[9, 7, 8]","[7, 8, 9]"
+
+CREATE TABLE
+
+INSERT: 2 rows
+
+"[1, NULL, 3]","3"
+"[NULL, NULL]","2"
+
+CREATE TABLE
+
+INSERT: 4 rows
+
+"[1, 2, 3, 4]"
+
+CREATE TABLE
+
+INSERT: 1 row
+
+"[[a, b], [c, d]]"
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00001_datatype.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00001_datatype.sql
new file mode 100644
index 000000000..bf1368a6f
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00001_datatype.sql
@@ -0,0 +1,19 @@
+SELECT * FROM gt_mysql1.gt_db1.tb03;
+
+CREATE TABLE gt_mysql1_1.gt_db1.tb04 (id int, name char(255));
+SELECT * FROM gt_mysql1.gt_db1.tb04;
+
+CREATE TABLE gt_mysql1_1.gt_db1.tb05 (id int, name varchar(250));
+SELECT * FROM gt_mysql1.gt_db1.tb05;
+
+
+CREATE TABLE gt_mysql1_1.gt_db1.tb06 (id int, name varchar(256));
+SELECT * FROM gt_mysql1.gt_db1.tb06;
+
+CREATE TABLE gt_mysql1_1.gt_db1.tb07 (id int, name char);
+SELECT * FROM gt_mysql1.gt_db1.tb07;
+
+CREATE TABLE gt_mysql1_1.gt_db1.tb08 (id int, name varchar);
+SELECT * FROM gt_mysql1.gt_db1.tb08;
+
+SHOW CREATE SCHEMA gt_mysql1.gt_db1;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00001_datatype.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00001_datatype.txt
new file mode 100644
index 000000000..0205e4aea
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00001_datatype.txt
@@ -0,0 +1,23 @@
+<BLANK_LINE>
+
+CREATE TABLE
+
+<BLANK_LINE>
+
+CREATE TABLE
+
+<BLANK_LINE>
+
+CREATE TABLE
+
+<BLANK_LINE>
+
+CREATE TABLE
+
+<BLANK_LINE>
+
+CREATE TABLE
+
+<BLANK_LINE>
+
+"CREATE SCHEMA gt_mysql1.gt_db1"
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00002_pushdown.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00002_pushdown.sql
new file mode 100644
index 000000000..288c8f5e9
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00002_pushdown.sql
@@ -0,0 +1,49 @@
+CREATE TABLE gt_mysql1_1.gt_db1.customer (
+ custkey bigint NOT NULL,
+ name varchar(25) NOT NULL,
+ address varchar(40) NOT NULL,
+ nationkey bigint NOT NULL,
+ phone varchar(15) NOT NULL,
+ acctbal decimal(12, 2) NOT NULL,
+ mktsegment varchar(10) NOT NULL,
+ comment varchar(117) NOT NULL
+);
+
+CREATE TABLE gt_mysql1_1.gt_db1.orders (
+ orderkey bigint NOT NULL,
+ custkey bigint NOT NULL,
+ orderstatus varchar(1) NOT NULL,
+ totalprice decimal(12, 2) NOT NULL,
+ orderdate date NOT NULL,
+ orderpriority varchar(15) NOT NULL,
+ clerk varchar(15) NOT NULL,
+ shippriority integer NOT NULL,
+ comment varchar(79) NOT NULL
+);
+
+insert into gt_mysql1_1.gt_db1.customer select * from tpch.tiny.customer;
+
+insert into gt_mysql1_1.gt_db1.orders select * from tpch.tiny.orders;
+
+USE gt_mysql1.gt_db1;
+
+SHOW SCHEMAS LIKE 'gt_%1';
+
+SHOW TABLES LIKE 'cus%';
+
+SHOW COLUMNS FROM gt_mysql1.gt_db1.customer;
+
+-- projection push down, limit push down
+explain select custkey from customer limit 10;
+
+-- predicate push down
+explain select * from customer where phone like '%2342%' limit 10;
+
+-- aggregating push down
+explain select sum(totalprice) from orders;
+
+-- aggregating push down, TopN push down
+explain select orderdate, sum(totalprice) from orders group by orderdate order
by orderdate limit 10;
+
+-- join push down
+explain select * from customer join orders on customer.custkey =
orders.custkey limit 10;
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00002_pushdown.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00002_pushdown.txt
new file mode 100644
index 000000000..616b4e86a
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/00002_pushdown.txt
@@ -0,0 +1,56 @@
+CREATE TABLE
+
+CREATE TABLE
+
+INSERT: 1500 rows
+
+INSERT: 15000 rows
+
+USE
+
+"gt_db1"
+
+"customer"
+
+"custkey","bigint","",""
+"name","varchar(25)","",""
+"address","varchar(40)","",""
+"nationkey","bigint","",""
+"phone","varchar(15)","",""
+"acctbal","decimal(12,2)","",""
+"mktsegment","varchar(10)","",""
+"comment","varchar(117)","",""
+
+"Trino version: %
+%
+ └─ TableScan[table = gt_mysql1:gt_db1.customer->gt_db1.customer
gt_mysql1.gt_db1.customer limit=10 columns=[custkey:bigint:bigint]]
+ Layout: [custkey:bigint]
+%
+"
+
+"Trino version: %
+%
+ └─ TableScan[table = gt_mysql1:gt_db1.customer->gt_db1.customer
gt_mysql1.gt_db1.customer
constraints=[ParameterizedExpression[expression=(""phone"") LIKE (?),
parameters=[QueryParameter{jdbcType=Optional.empty, type=varchar(6),
value=Optional[Slice{base=[B@%, baseOffset=0, length=6}]}]]] limit=10]
+ Layout: [custkey:bigint, name:varchar(25), address:varchar(40),
nationkey:bigint, phone:varchar(15), acctbal:decimal(12,2),
mktsegment:varchar(10), comment:varchar(117)]
+%
+"
+
+"Trino version: %
+%
+ └─ TableScan[table = gt_mysql1:gt_db1.orders->Query[SELECT
sum(""totalprice"") AS ""_pfgnrtd_0"" FROM ""gt_mysql1"".""gt_db1"".""orders""]
columns=[_pfgnrtd_0:decimal(38,2):decimal]]
+ Layout: [_pfgnrtd:decimal(38,2)]
+%
+"
+
+"Trino version: %
+%
+ └─ TableScan[table = gt_mysql1:gt_db1.orders->Query[SELECT ""orderdate"",
sum(""totalprice"") AS ""_pfgnrtd_0"" FROM ""gt_mysql1"".""gt_db1"".""orders""
GROUP BY ""orderdate""] sortOrder=[orderdate:date:date ASC NULLS LAST] limit=10
columns=[orderdate:date:date, _pfgnrtd_0:decimal(38,2):decimal]]
+ Layout: [orderdate:date, _pfgnrtd:decimal(38,2)]
+%
+"
+
+"Trino version: %
+%
+ └─ TableScan[table = gt_mysql1:gt_db1.%->Query[SELECT % INNER JOIN %]
limit=10 columns=%]]
+%
+"
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/catalog_mysql_cleanup.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/catalog_mysql_cleanup.sql
new file mode 100644
index 000000000..e69de29bb
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/catalog_mysql_prepare.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/catalog_mysql_prepare.sql
new file mode 100644
index 000000000..3339434d3
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/catalog_mysql_prepare.sql
@@ -0,0 +1,23 @@
+call gravitino.system.create_catalog(
+ 'gt_mysql1',
+ 'jdbc-mysql',
+ map(
+ array['jdbc-url', 'jdbc-user', 'jdbc-password', 'jdbc-driver',
'trino.bypass.join-pushdown.strategy', 'cloud.region-code',
'cloud.trino.connection-url', 'cloud.trino.connection-user',
'cloud.trino.connection-password'],
+ array['${mysql_uri}/?useSSL=false', 'trino', 'ds123',
'com.mysql.cj.jdbc.Driver', 'EAGER','c2', '${trino_remote_jdbc_uri}', 'admin',
'']
+ )
+);
+
+call gravitino.system.create_catalog(
+ 'gt_mysql1_1',
+ 'jdbc-mysql',
+ map(
+ array['jdbc-url', 'jdbc-user', 'jdbc-password', 'jdbc-driver',
'trino.bypass.join-pushdown.strategy'],
+ array['${mysql_uri}/?useSSL=false', 'trino', 'ds123',
'com.mysql.cj.jdbc.Driver', 'EAGER']
+ )
+);
+
+CREATE SCHEMA gt_mysql1_1.gt_db1;
+
+CREATE TABLE gt_mysql1_1.gt_db1.tb03 (id int, name char(20));
+
+<RETRY_WITH_NOT_EXISTS> SELECT * FROM gt_mysql1.gt_db1.tb03;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/ignored/00001_datatype.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/ignored/00001_datatype.sql
new file mode 100644
index 000000000..bf890f3db
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/ignored/00001_datatype.sql
@@ -0,0 +1,55 @@
+-- Unsupported Type: BOOLEAN
+CREATE TABLE gt_mysql1_1.gt_db1.tb01 (
+ f1 VARCHAR(200),
+ f2 CHAR(20),
+ f3 VARBINARY,
+ f4 DECIMAL(10, 3),
+ f5 REAL,
+ f6 DOUBLE,
+ f8 TINYINT,
+ f9 SMALLINT,
+ f10 INT,
+ f11 INTEGER,
+ f12 BIGINT,
+ f13 DATE,
+ f14 TIME,
+ f15 TIMESTAMP,
+ f16 TIMESTAMP WITH TIME ZONE
+);
+
+INSERT INTO gt_mysql1_1.gt_db1.tb01 (f1, f2, f3, f4, f5, f6, f8, f9, f10, f11,
f12, f13, f14, f15, f16)
+VALUES ('Sample text 1', 'Text1', x'65', 123.456, 7.89, 12.34, 1, 100, 1000,
1000, 100000, DATE '2024-01-01',
+ TIME '08:00:00', TIMESTAMP '2024-01-01 08:00:00', TIMESTAMP
'2024-01-01 08:00:00 UTC');
+
+INSERT INTO gt_mysql1_1.gt_db1.tb01 (f1, f2, f3, f4, f5, f6, f8, f9, f10, f11,
f12, f13, f14, f15, f16)
+VALUES (NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
NULL, NULL, NULL, NULl);
+
+SELECT * FROM gt_mysql1.gt_db1.tb01 ORDER BY f1;
+
+SHOW CREATE TABLE gt_mysql1.gt_db1.tb01;
+
+CREATE TABLE gt_mysql1_1.gt_db1.tb02 (
+ f1 VARCHAR(200) NOT NULL ,
+ f2 CHAR(20) NOT NULL ,
+ f3 VARBINARY NOT NULL ,
+ f4 DECIMAL(10, 3) NOT NULL ,
+ f5 REAL NOT NULL ,
+ f6 DOUBLE NOT NULL ,
+ f8 TINYINT NOT NULL ,
+ f9 SMALLINT NOT NULL ,
+ f10 INT NOT NULL ,
+ f11 INTEGER NOT NULL ,
+ f12 BIGINT NOT NULL ,
+ f13 DATE NOT NULL ,
+ f14 TIME NOT NULL ,
+ f15 TIMESTAMP NOT NULL,
+ f16 TIMESTAMP WITH TIME ZONE NOT NULL
+);
+
+INSERT INTO gt_mysql1_1.gt_db1.tb02 (f1, f2, f3, f4, f5, f6, f8, f9, f10, f11,
f12, f13, f14, f15, f16)
+VALUES ('Sample text 1', 'Text1', x'65', 123.456, 7.89, 12.34, 1, 100, 1000,
1000, 100000, DATE '2024-01-01',
+ TIME '08:00:00', TIMESTAMP '2024-01-01 08:00:00', TIMESTAMP
'2024-01-01 08:00:00 UTC');
+
+SELECT * FROM gt_mysql1.gt_db1.tb02 ORDER BY f1;
+
+SHOW CREATE TABLE gt_mysql1.gt_db1.tb02;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/ignored/00001_datatype.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/ignored/00001_datatype.txt
new file mode 100644
index 000000000..009033eb2
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-mysql/ignored/00001_datatype.txt
@@ -0,0 +1,59 @@
+CREATE TABLE
+
+INSERT: 1 row
+
+INSERT: 1 row
+
+"Sample text 1","Text1
","65","123.456","7.89","12.34","1","100","1000","1000","100000","2024-01-01","08:00:00","2024-01-01
08:00:00","2024-01-01 08:00:00 UTC"
+"","","","","","","","","","","","","","",""
+
+"CREATE TABLE gt_mysql1.gt_db1.tb01 (
+ f1 varchar(200),
+ f2 char(20),
+ f3 varbinary,
+ f4 decimal(10, 3),
+ f5 real,
+ f6 double,
+ f8 tinyint,
+ f9 smallint,
+ f10 integer,
+ f11 integer,
+ f12 bigint,
+ f13 date,
+ f14 time(0),
+ f15 timestamp(0),
+ f16 timestamp(0) with time zone
+)
+COMMENT ''
+WITH (
+ engine = 'InnoDB'
+)"
+
+CREATE TABLE
+
+INSERT: 1 row
+
+"Sample text 1","Text1
","65","123.456","7.89","12.34","1","100","1000","1000","100000","2024-01-01","08:00:00","2024-01-01
08:00:00","2024-01-01 08:00:00 UTC"
+"","","","","","","","","","","","","","",""
+
+"CREATE TABLE gt_mysql1.gt_db1.tb02 (
+ f1 varchar(200) NOT NULL,
+ f2 char(20) NOT NULL,
+ f3 varbinary NOT NULL,
+ f4 decimal(10, 3) NOT NULL,
+ f5 real NOT NULL,
+ f6 double NOT NULL,
+ f8 tinyint NOT NULL,
+ f9 smallint NOT NULL,
+ f10 integer NOT NULL,
+ f11 integer NOT NULL,
+ f12 bigint NOT NULL,
+ f13 date NOT NULL,
+ f14 time(0) NOT NULL,
+ f15 timestamp(0) NOT NULL,
+ f16 timestamp(0) with time zone NOT NULL
+)
+COMMENT ''
+WITH (
+ engine = 'InnoDB'
+)"
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00001_datatype.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00001_datatype.sql
new file mode 100644
index 000000000..5a5cd8798
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00001_datatype.sql
@@ -0,0 +1,42 @@
+INSERT INTO gt_postgresql1_1.gt_datatype.tb01(name, salary) VALUES ('sam', 11);
+INSERT INTO gt_postgresql1_1.gt_datatype.tb01(name, salary) VALUES ('jerry',
13);
+INSERT INTO gt_postgresql1_1.gt_datatype.tb01(name, salary) VALUES ('bob',
14), ('tom', 12);
+
+CREATE TABLE gt_postgresql1_1.gt_datatype.tb02 (
+ name varchar,
+ salary int
+);
+
+INSERT INTO gt_postgresql1_1.gt_datatype.tb02(name, salary) SELECT * FROM
gt_postgresql1_1.gt_datatype.tb01;
+
+select * from gt_postgresql1.gt_datatype.tb02 order by name;
+
+CREATE SCHEMA gt_postgresql1_1.gt_varchar_db1;
+
+CREATE TABLE gt_postgresql1_1.gt_varchar_db1.test_char01 (id int, name
char(20));
+
+SHOW CREATE TABLE gt_postgresql1.gt_varchar_db1.test_char01;
+
+CREATE TABLE gt_postgresql1_1.gt_varchar_db1.test_char02 (id int, name
char(65536));
+
+SHOW CREATE TABLE gt_postgresql1.gt_varchar_db1.test_char02;
+
+CREATE TABLE gt_postgresql1_1.gt_varchar_db1.test_char03 (id int, name char);
+
+SHOW CREATE TABLE gt_postgresql1.gt_varchar_db1.test_char03;
+
+CREATE TABLE gt_postgresql1_1.gt_varchar_db1.test_varchar04 (id int, name
varchar(250));
+
+SHOW CREATE TABLE gt_postgresql1.gt_varchar_db1.test_varchar04;
+
+CREATE TABLE gt_postgresql1_1.gt_varchar_db1.test_varchar05 (id int, name
varchar(10485760));
+
+SHOW CREATE TABLE gt_postgresql1.gt_varchar_db1.test_varchar05;
+
+CREATE TABLE gt_postgresql1_1.gt_varchar_db1.test_varchar06 (id int, name
varchar);
+
+SHOW CREATE TABLE gt_postgresql1.gt_varchar_db1.test_varchar06;
+
+CREATE SCHEMA gt_postgresql1_1.gt_push_db1;
+
+SHOW CREATE SCHEMA gt_postgresql1.gt_push_db1;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00001_datatype.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00001_datatype.txt
new file mode 100644
index 000000000..c7f3fd1e2
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00001_datatype.txt
@@ -0,0 +1,69 @@
+INSERT: 1 row
+
+INSERT: 1 row
+
+INSERT: 2 rows
+
+CREATE TABLE
+
+INSERT: 4 rows
+
+"bob","14"
+"jerry","13"
+"sam","11"
+"tom","12"
+
+CREATE SCHEMA
+
+CREATE TABLE
+
+"CREATE TABLE gt_postgresql1.gt_varchar_db1.test_char01 (
+ id integer,
+ name char(20)
+)
+COMMENT ''"
+
+CREATE TABLE
+
+"CREATE TABLE gt_postgresql1.gt_varchar_db1.test_char02 (
+ id integer,
+ name char(65536)
+)
+COMMENT ''"
+
+CREATE TABLE
+
+"CREATE TABLE gt_postgresql1.gt_varchar_db1.test_char03 (
+ id integer,
+ name char(1)
+)
+COMMENT ''"
+
+CREATE TABLE
+
+
+"CREATE TABLE gt_postgresql1.gt_varchar_db1.test_varchar04 (
+ id integer,
+ name varchar(250)
+)
+COMMENT ''"
+
+CREATE TABLE
+
+"CREATE TABLE gt_postgresql1.gt_varchar_db1.test_varchar05 (
+ id integer,
+ name varchar(10485760)
+)
+COMMENT ''"
+
+CREATE TABLE
+
+"CREATE TABLE gt_postgresql1.gt_varchar_db1.test_varchar06 (
+ id integer,
+ name varchar
+)
+COMMENT ''"
+
+CREATE SCHEMA
+
+"CREATE SCHEMA gt_postgresql1.gt_push_db1"
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00002_pushdown.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00002_pushdown.sql
new file mode 100644
index 000000000..6554cdb32
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00002_pushdown.sql
@@ -0,0 +1,104 @@
+CREATE TABLE gt_postgresql1_1.gt_push_db1.employee_performance (
+ employee_id integer,
+ evaluation_date date,
+ rating integer
+)
+COMMENT 'comment';
+
+CREATE TABLE gt_postgresql1_1.gt_push_db1.employees (
+ employee_id integer,
+ department_id integer,
+ job_title varchar(100),
+ given_name varchar(100),
+ family_name varchar(100),
+ birth_date date,
+ hire_date date
+)
+COMMENT 'comment';
+
+INSERT INTO gt_postgresql1_1.gt_push_db1.employee_performance (employee_id,
evaluation_date, rating) VALUES
+(1, DATE '2018-02-24', 4),
+(1, DATE '2016-12-25', 7),
+(1, DATE '2023-04-07', 4),
+(3, DATE '2012-11-08', 7),
+(3, DATE '2019-09-15', 2),
+(3, DATE '2017-06-21', 8),
+(3, DATE '2019-07-16', 4),
+(3, DATE '2015-10-06', 4),
+(3, DATE '2021-01-05', 6),
+(3, DATE '2014-10-24', 4);
+
+INSERT INTO gt_postgresql1_1.gt_push_db1.employees (employee_id,
department_id, job_title, given_name, family_name, birth_date, hire_date) VALUES
+(1, 1, 'Manager', 'Gregory', 'Smith', DATE '1968-04-15', DATE '2014-06-04'),
+(2, 1, 'Sales Assistant', 'Owen', 'Rivers', DATE '1988-08-13', DATE
'2021-02-05'),
+(3, 1, 'Programmer', 'Avram', 'Lawrence', DATE '1969-11-21', DATE
'2010-09-29'),
+(4, 1, 'Sales Assistant', 'Burton', 'Everett', DATE '2001-12-07', DATE
'2016-06-25'),
+(5, 1, 'Sales Assistant', 'Cedric', 'Barlow', DATE '1972-02-02', DATE
'2012-08-15'),
+(6, 2, 'Sales Assistant', 'Jasper', 'Mack', DATE '2002-03-29', DATE
'2020-09-13'),
+(7, 1, 'Sales Assistant', 'Felicia', 'Robinson', DATE '1973-08-21', DATE
'2023-05-14'),
+(8, 3, 'Sales Assistant', 'Mason', 'Steele', DATE '1964-05-19', DATE
'2019-02-06'),
+(9, 3, 'Programmer', 'Bernard', 'Cameron', DATE '1995-08-27', DATE
'2018-07-12'),
+(10, 2, 'Programmer', 'Chelsea', 'Wade', DATE '2007-01-29', DATE '2016-04-16');
+
+USE gt_postgresql1.gt_push_db1;
+
+SELECT
+ given_name,
+ family_name,
+ rating
+FROM gt_postgresql1.gt_push_db1.employee_performance AS p
+JOIN gt_postgresql1.gt_push_db1.employees AS e
+ ON p.employee_id = e.employee_id
+ORDER BY
+rating DESC, given_name
+LIMIT 10;
+
+CREATE TABLE gt_postgresql1_1.gt_push_db1.customer (
+ custkey bigint NOT NULL,
+ name varchar(25) NOT NULL,
+ address varchar(40) NOT NULL,
+ nationkey bigint NOT NULL,
+ phone varchar(15) NOT NULL,
+ acctbal decimal(12, 2) NOT NULL,
+ mktsegment varchar(10) NOT NULL,
+ comment varchar(117) NOT NULL
+);
+
+CREATE TABLE gt_postgresql1_1.gt_push_db1.orders (
+ orderkey bigint NOT NULL,
+ custkey bigint NOT NULL,
+ orderstatus varchar(1) NOT NULL,
+ totalprice decimal(12, 2) NOT NULL,
+ orderdate date NOT NULL,
+ orderpriority varchar(15) NOT NULL,
+ clerk varchar(15) NOT NULL,
+ shippriority integer NOT NULL,
+ comment varchar(79) NOT NULL
+);
+
+INSERT INTO gt_postgresql1_1.gt_push_db1.customer SELECT * FROM
tpch.tiny.customer;
+
+INSERT INTO gt_postgresql1_1.gt_push_db1.orders SELECT * FROM tpch.tiny.orders;
+
+USE gt_postgresql1.gt_push_db1;
+
+SHOW SCHEMAS LIKE 'gt_push_%1';
+
+SHOW TABLES LIKE 'cus%';
+
+SHOW COLUMNS FROM gt_postgresql1.gt_push_db1.customer;
+
+-- projection push down, limit push down
+explain select custkey from customer limit 10;
+
+-- predicate push down
+explain select * from customer where phone like '%2342%' limit 10;
+
+-- aggregating push down
+explain select sum(totalprice) from orders;
+
+-- aggregating push down, TopN push down
+explain select orderdate, sum(totalprice) from orders group by orderdate order
by orderdate limit 10;
+
+-- join push down
+explain select customer.custkey, orders.orderkey from customer join orders on
customer.custkey = orders.custkey order by orders.orderkey limit 10;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00002_pushdown.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00002_pushdown.txt
new file mode 100644
index 000000000..aaaaf80f5
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/00002_pushdown.txt
@@ -0,0 +1,78 @@
+CREATE TABLE
+
+CREATE TABLE
+
+INSERT: 10 rows
+
+INSERT: 10 rows
+
+USE
+
+"Avram","Lawrence","8"
+"Avram","Lawrence","7"
+"Gregory","Smith","7"
+"Avram","Lawrence","6"
+"Avram","Lawrence","4"
+"Avram","Lawrence","4"
+"Avram","Lawrence","4"
+"Gregory","Smith","4"
+"Gregory","Smith","4"
+"Avram","Lawrence","2"
+
+CREATE TABLE
+
+CREATE TABLE
+
+INSERT: 1500 rows
+
+INSERT: 15000 rows
+
+USE
+
+"gt_push_db1"
+
+"customer"
+
+"custkey","bigint","",""
+"name","varchar(25)","",""
+"address","varchar(40)","",""
+"nationkey","bigint","",""
+"phone","varchar(15)","",""
+"acctbal","decimal(12,2)","",""
+"mktsegment","varchar(10)","",""
+"comment","varchar(117)","",""
+
+"Trino version: %
+%
+ └─ TableScan[table =
gt_postgresql1:gt_push_db1.customer->gt_push_db1.customer
gt_postgresql1.gt_push_db1.customer limit=10 columns=[custkey:bigint:bigint]]
+ Layout: [custkey:bigint]
+%
+"
+
+"Trino version: %
+%
+ └─ TableScan[table =
gt_postgresql1:gt_push_db1.customer->gt_push_db1.customer
gt_postgresql1.gt_push_db1.customer
constraints=[ParameterizedExpression[expression=(""phone"") LIKE (?),
parameters=[QueryParameter{jdbcType=Optional.empty, type=varchar(6),
value=Optional[Slice{base=[B@%, baseOffset=0, length=6}]}]]] limit=10]
+ Layout: [custkey:bigint, name:varchar(25), address:varchar(40),
nationkey:bigint, phone:varchar(15), acctbal:decimal(12,2),
mktsegment:varchar(10), comment:varchar(117)]
+%
+"
+
+
+"Trino version: %
+%
+ └─ TableScan[table = gt_postgresql1:gt_push_db1.orders->Query[SELECT
sum(""totalprice"") AS ""_pfgnrtd_0"" FROM
""gt_postgresql1"".""gt_push_db1"".""orders""]
columns=[_pfgnrtd_0:decimal(38,2):decimal]]
+ Layout: [_pfgnrtd:decimal(38,2)]
+%
+"
+
+"Trino version: %
+%
+ └─ TableScan[table = gt_postgresql1:gt_push_db1.%->Query[SELECT
""orderdate"", sum(""totalprice"") AS ""_pfgnrtd_0"" FROM
""gt_postgresql1"".""gt_push_db1"".""orders"" GROUP BY ""orderdate""]
sortOrder=[orderdate:date:date ASC NULLS LAST] limit=10
columns=[orderdate:date:date, _pfgnrtd_0:decimal(38,2):decimal]]
+ Layout: [orderdate:date, _pfgnrtd:decimal(38,2)]
+%
+"
+
+"Trino version: %
+%
+ TableScan[table = gt_postgresql1:gt_push_db1.%->Query[SELECT % INNER JOIN
%] limit=10 columns=%]
+%
+"
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/catalog_pg_cleanup.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/catalog_pg_cleanup.sql
new file mode 100644
index 000000000..e69de29bb
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/catalog_pg_prepare.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/catalog_pg_prepare.sql
new file mode 100644
index 000000000..a56f68c68
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/catalog_pg_prepare.sql
@@ -0,0 +1,26 @@
+call gravitino.system.create_catalog(
+ 'gt_postgresql1',
+ 'jdbc-postgresql',
+ MAP(
+ ARRAY['jdbc-url', 'jdbc-user', 'jdbc-password', 'jdbc-database',
'jdbc-driver', 'trino.bypass.join-pushdown.strategy', 'cloud.region-code',
'cloud.trino.connection-url', 'cloud.trino.connection-user',
'cloud.trino.connection-password'],
+ ARRAY['${postgresql_uri}/db', 'postgres', 'postgres', 'db',
'org.postgresql.Driver', 'EAGER','c2', '${trino_remote_jdbc_uri}', 'admin', '']
+ )
+);
+
+call gravitino.system.create_catalog(
+ 'gt_postgresql1_1',
+ 'jdbc-postgresql',
+ map(
+ array['jdbc-url', 'jdbc-user', 'jdbc-password', 'jdbc-database',
'jdbc-driver', 'trino.bypass.join-pushdown.strategy'],
+ array['${postgresql_uri}/db', 'postgres', 'postgres', 'db',
'org.postgresql.Driver', 'EAGER']
+ )
+);
+
+CREATE SCHEMA gt_postgresql1_1.gt_datatype;
+
+CREATE TABLE gt_postgresql1_1.gt_datatype.tb01 (
+ name varchar,
+ salary int
+);
+
+<RETRY_WITH_NOT_EXISTS> SELECT * FROM gt_postgresql1.gt_datatype.tb01;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/ignored/00001_datatype.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/ignored/00001_datatype.sql
new file mode 100644
index 000000000..3be0ff739
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/ignored/00001_datatype.sql
@@ -0,0 +1,50 @@
+CREATE TABLE gt_postgresql1.gt_datatype.tb03 (
+ f1 VARCHAR(200),
+ f2 CHAR(20),
+ f3 VARBINARY,
+ f4 DECIMAL(10, 3),
+ f5 REAL,
+ f6 DOUBLE,
+ f7 BOOLEAN,
+ f9 SMALLINT,
+ f10 INT,
+ f11 INTEGER,
+ f12 BIGINT,
+ f13 DATE,
+ f14 TIME,
+ f15 TIMESTAMP,
+ f16 TIMESTAMP WITH TIME ZONE
+);
+
+INSERT INTO gt_postgresql1.gt_datatype.tb03 (f1, f2, f3, f4, f5, f6, f7, f9,
f10, f11, f12, f13, f14, f15, f16)
+VALUES ('Sample text 1', 'Text1', x'65', 123.456, 7.89, 12.34, false, 100,
1000, 1000, 100000, DATE '2024-01-01',
+ TIME '08:00:00', TIMESTAMP '2024-01-01 08:00:00', TIMESTAMP
'2024-01-01 08:00:00 UTC');
+
+INSERT INTO gt_postgresql1.gt_datatype.tb03 (f1, f2, f3, f4, f5, f6, f7, f9,
f10, f11, f12, f13, f14, f15, f16)
+VALUES (NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
NULL, NULL, NULL, NULL);
+
+SELECT * FROM gt_postgresql1.gt_datatype.tb03 ORDER BY f1;
+
+CREATE TABLE gt_postgresql1.gt_datatype.tb04 (
+ f1 VARCHAR(200) not null ,
+ f2 CHAR(20) not null ,
+ f3 VARBINARY not null ,
+ f4 DECIMAL(10, 3) not null ,
+ f5 REAL not null ,
+ f6 DOUBLE not null ,
+ f7 BOOLEAN not null ,
+ f9 SMALLINT not null ,
+ f10 INT not null ,
+ f11 INTEGER not null ,
+ f12 BIGINT not null ,
+ f13 DATE not null ,
+ f14 TIME not null ,
+ f15 TIMESTAMP not null,
+ f16 TIMESTAMP WITH TIME ZONE not null
+);
+
+INSERT INTO gt_postgresql1.gt_datatype.tb04 (f1, f2, f3, f4, f5, f6, f7, f9,
f10, f11, f12, f13, f14, f15, f16)
+VALUES ('Sample text 1', 'Text1', x'65', 123.456, 7.89, 12.34, false, 100,
1000, 1000, 100000, DATE '2024-01-01',
+ TIME '08:00:00', TIMESTAMP '2024-01-01 08:00:00', TIMESTAMP
'2024-01-01 08:00:00 UTC');
+
+SELECT * FROM gt_postgresql1.gt_datatype.tb04 ORDER BY f1;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/ignored/00001_datatype.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/ignored/00001_datatype.txt
new file mode 100644
index 000000000..b81e1d745
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/jdbc-postgresql/ignored/00001_datatype.txt
@@ -0,0 +1,15 @@
+CREATE TABLE
+
+INSERT: 1 row
+
+INSERT: 1 row
+
+"Sample text 1","Text1
","65","123.456","7.89","12.34","false","100","1000","1000","100000","2024-01-01","08:00:00.000000","2024-01-01
08:00:00.000000","2024-01-01 08:00:00.000000 UTC"
+"","","","","","","","","","","","","","",""
+
+CREATE TABLE
+
+INSERT: 1 row
+
+"Sample text 1","Text1
","65","123.456","7.89","12.34","false","100","1000","1000","100000","2024-01-01","08:00:00.000000","2024-01-01
08:00:00.000000","2024-01-01 08:00:00.000000 UTC"
+"","","","","","","","","","","","","","",""
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/00002_partition_sort_order.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/00002_partition_sort_order.sql
new file mode 100644
index 000000000..57f320a1a
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/00002_partition_sort_order.sql
@@ -0,0 +1,11 @@
+select * from lineitem order by orderkey, partkey limit 5;
+
+select * from tb03;
+
+SHOW CREATE SCHEMA gt_db2;
+
+SHOW SCHEMAS LIKE 'gt_%2';
+
+SHOW TABLES LIKE '%item';
+
+SHOW COLUMNS FROM gt_db2.tb01;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/00002_partition_sort_order.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/00002_partition_sort_order.txt
new file mode 100644
index 000000000..c8a5db28d
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/00002_partition_sort_order.txt
@@ -0,0 +1,27 @@
+"1","22","48","4","28.00","25816.56","0.09","0.06","N","O","1996-04-21","1996-03-30","1996-05-16","NONE","AIR","lites.
fluffily even de"
+"1","157","10","6","32.00","33828.80","0.07","0.02","N","O","1996-01-30","1996-02-07","1996-02-03","DELIVER
IN PERSON","MAIL","arefully slyly ex"
+"1","241","23","5","24.00","27389.76","0.10","0.04","N","O","1996-03-30","1996-03-14","1996-04-01","NONE","FOB","
pending foxes. slyly re"
+"1","637","38","3","8.00","12301.04","0.10","0.02","N","O","1996-01-29","1996-03-05","1996-01-31","TAKE
BACK RETURN","REG AIR","riously. regular, express dep"
+"1","674","75","2","36.00","56688.12","0.09","0.06","N","O","1996-04-12","1996-02-28","1996-04-20","TAKE
BACK RETURN","MAIL","ly final dependencies: slyly bold "
+
+<BLANK_LINE>
+
+"CREATE SCHEMA %gt_db2"
+
+"gt_db2"
+
+"lineitem"
+
+"f1","varchar","",""
+"f3","varbinary","",""
+"f4","decimal(10,3)","",""
+"f5","real","",""
+"f6","double","",""
+"f7","boolean","",""
+"f10","integer","",""
+"f11","integer","",""
+"f12","bigint","",""
+"f13","date","",""
+"f14","time(3)","",""
+"f15","timestamp(3)","",""
+"f16","timestamp(3) with time zone","",""
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_cleanup.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_cleanup.sql
new file mode 100644
index 000000000..e69de29bb
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_mysql_cleanup.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_mysql_cleanup.sql
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_mysql_cleanup.sql
@@ -0,0 +1 @@
+
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_mysql_prepare.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_mysql_prepare.sql
new file mode 100644
index 000000000..0ff3ce472
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_mysql_prepare.sql
@@ -0,0 +1,119 @@
+call gravitino.system.create_catalog(
+ 'gt_iceberg_mysql1_1',
+ 'lakehouse-iceberg',
+ map(
+ array['uri', 'catalog-backend', 'warehouse', 'jdbc-user',
'jdbc-password', 'jdbc-driver'],
+
array['${mysql_uri}/iceberg_db?createDatabaseIfNotExist=true&useSSL=false',
'jdbc',
+ '${hdfs_uri}/user/iceberg/warehouse/TrinoQueryIT', 'trino',
'ds123', 'com.mysql.cj.jdbc.Driver']
+ )
+);
+
+call gravitino.system.create_catalog(
+ 'gt_iceberg_mysql1',
+ 'lakehouse-iceberg',
+ map(
+ array['uri', 'catalog-backend', 'warehouse', 'jdbc-user',
'jdbc-password', 'jdbc-driver', 'cloud.region-code',
'cloud.trino.connection-url', 'cloud.trino.connection-user',
'cloud.trino.connection-password'],
+
array['${mysql_uri}/iceberg_db?createDatabaseIfNotExist=true&useSSL=false',
'jdbc',
+ '${hdfs_uri}/user/iceberg/warehouse/TrinoQueryIT', 'trino',
'ds123', 'com.mysql.cj.jdbc.Driver','c2', '${trino_remote_jdbc_uri}', 'admin',
'']
+ )
+);
+
+CREATE SCHEMA gt_iceberg_mysql1_1.gt_db2;
+
+USE gt_iceberg_mysql1_1.gt_db2;
+
+-- Unsupported Type: CHAR TINYINT, SMALLINT
+CREATE TABLE tb01 (
+ f1 VARCHAR,
+ f3 VARBINARY,
+ f4 DECIMAL(10, 3),
+ f5 REAL,
+ f6 DOUBLE,
+ f7 BOOLEAN,
+ f10 INT,
+ f11 INTEGER,
+ f12 BIGINT,
+ f13 DATE,
+ f14 TIME,
+ f15 TIMESTAMP,
+ f16 TIMESTAMP WITH TIME ZONE
+);
+
+INSERT INTO tb01 (f1, f3, f4, f5, f6, f7, f10, f11, f12, f13, f14, f15, f16)
+VALUES ('Sample text 1', x'65', 123.456, 7.89, 12.34, true, 1000, 1000,
100000, DATE '2024-01-01', TIME '08:00:00',
+ TIMESTAMP '2024-01-01 08:00:00', TIMESTAMP '2024-01-01 08:00:00 UTC');
+
+INSERT INTO tb01 (f1, f3, f4, f5, f6, f7, f10, f11, f12, f13, f14, f15, f16)
+VALUES (NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
NULL, NULL);
+
+CREATE TABLE tb02 (
+ f1 VARCHAR NOT NULL ,
+ f3 VARBINARY NOT NULL ,
+ f4 DECIMAL(10, 3) NOT NULL ,
+ f5 REAL NOT NULL ,
+ f6 DOUBLE NOT NULL ,
+ f7 BOOLEAN NOT NULL ,
+ f10 INT NOT NULL ,
+ f11 INTEGER NOT NULL ,
+ f12 BIGINT NOT NULL ,
+ f13 DATE NOT NULL ,
+ f14 TIME NOT NULL ,
+ f15 TIMESTAMP NOT NULL,
+ f16 TIMESTAMP WITH TIME ZONE NOT NULL
+);
+
+INSERT INTO tb02 (f1, f3, f4, f5, f6, f7, f10, f11, f12, f13, f14, f15, f16)
+VALUES ('Sample text 1', x'65', 123.456, 7.89, 12.34, true, 1000, 1000,
100000, DATE '2024-01-01', TIME '08:00:00',
+ TIMESTAMP '2024-01-01 08:00:00', TIMESTAMP '2024-01-01 08:00:00 UTC');
+
+CREATE TABLE lineitem(
+ orderkey bigint,
+ partkey bigint,
+ suppkey bigint,
+ linenumber integer,
+ quantity decimal(12, 2),
+ extendedprice decimal(12, 2),
+ discount decimal(12, 2),
+ tax decimal(12, 2),
+ returnflag varchar,
+ linestatus varchar,
+ shipdate date,
+ commitdate date,
+ receiptdate date,
+ shipinstruct varchar,
+ shipmode varchar,
+ comment varchar
+)
+WITH (
+ partitioning = ARRAY['year(commitdate)'],
+ sorted_by = ARRAY['partkey', 'extendedprice desc']
+);
+
+insert into lineitem select * from tpch.tiny.lineitem;
+
+CREATE TABLE tb03(
+ orderkey bigint,
+ partkey bigint,
+ suppkey bigint,
+ linenumber integer,
+ quantity decimal(12, 2),
+ extendedprice decimal(12, 2),
+ discount decimal(12, 2),
+ tax decimal(12, 2),
+ returnflag varchar,
+ linestatus varchar,
+ shipdate date,
+ commitdate date,
+ receiptdate date,
+ shipinstruct varchar,
+ shipmode varchar,
+ comment varchar
+)
+WITH (
+ partitioning = ARRAY['day(commitdate)', 'month(shipdate)',
'bucket(partkey, 2)', 'truncate(shipinstruct, 2)'],
+ sorted_by = ARRAY['partkey asc nulls last', 'extendedprice DESC NULLS
FIRST']
+);
+
+USE gt_iceberg_mysql1.gt_db2;
+
+<RETRY_WITH_NOT_EXISTS> SELECT * FROM gt_iceberg_mysql1.gt_db2.tb03;
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_postgres_cleanup.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_postgres_cleanup.sql
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_postgres_cleanup.sql
@@ -0,0 +1 @@
+
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_postgres_prepare.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_postgres_prepare.sql
new file mode 100644
index 000000000..d15eb6fa0
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_postgres_prepare.sql
@@ -0,0 +1,119 @@
+call gravitino.system.create_catalog(
+ 'gt_iceberg_postgresql1_1',
+ 'lakehouse-iceberg',
+ map(
+ array['uri', 'catalog-backend', 'warehouse', 'jdbc-user',
'jdbc-password', 'jdbc-database', 'jdbc-driver',
'trino.bypass.join-pushdown.strategy'],
+ array['${postgresql_uri}/db', 'jdbc',
+ '${hdfs_uri}/user/iceberg/warehouse/TrinoQueryIT', 'postgres',
'postgres', 'db' ,'org.postgresql.Driver', 'EAGER']
+ )
+);
+
+call gravitino.system.create_catalog(
+ 'gt_iceberg_postgresql1',
+ 'lakehouse-iceberg',
+ map(
+ array['uri', 'catalog-backend', 'warehouse', 'jdbc-user',
'jdbc-password', 'jdbc-database', 'jdbc-driver',
'trino.bypass.join-pushdown.strategy', 'cloud.region-code',
'cloud.trino.connection-url', 'cloud.trino.connection-user',
'cloud.trino.connection-password'],
+ array['${postgresql_uri}/db', 'jdbc',
+ '${hdfs_uri}/user/iceberg/warehouse/TrinoQueryIT', 'postgres',
'postgres', 'db' ,'org.postgresql.Driver', 'EAGER', 'c2',
'${trino_remote_jdbc_uri}', 'admin', '']
+ )
+);
+
+CREATE SCHEMA gt_iceberg_postgresql1_1.gt_db2;
+
+USE gt_iceberg_postgresql1_1.gt_db2;
+
+-- Unsupported Type: CHAR TINYINT, SMALLINT
+CREATE TABLE tb01 (
+ f1 VARCHAR,
+ f3 VARBINARY,
+ f4 DECIMAL(10, 3),
+ f5 REAL,
+ f6 DOUBLE,
+ f7 BOOLEAN,
+ f10 INT,
+ f11 INTEGER,
+ f12 BIGINT,
+ f13 DATE,
+ f14 TIME,
+ f15 TIMESTAMP,
+ f16 TIMESTAMP WITH TIME ZONE
+);
+
+INSERT INTO tb01 (f1, f3, f4, f5, f6, f7, f10, f11, f12, f13, f14, f15, f16)
+VALUES ('Sample text 1', x'65', 123.456, 7.89, 12.34, true, 1000, 1000,
100000, DATE '2024-01-01', TIME '08:00:00',
+ TIMESTAMP '2024-01-01 08:00:00', TIMESTAMP '2024-01-01 08:00:00 UTC');
+
+INSERT INTO tb01 (f1, f3, f4, f5, f6, f7, f10, f11, f12, f13, f14, f15, f16)
+VALUES (NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
NULL, NULL);
+
+CREATE TABLE tb02 (
+ f1 VARCHAR NOT NULL ,
+ f3 VARBINARY NOT NULL ,
+ f4 DECIMAL(10, 3) NOT NULL ,
+ f5 REAL NOT NULL ,
+ f6 DOUBLE NOT NULL ,
+ f7 BOOLEAN NOT NULL ,
+ f10 INT NOT NULL ,
+ f11 INTEGER NOT NULL ,
+ f12 BIGINT NOT NULL ,
+ f13 DATE NOT NULL ,
+ f14 TIME NOT NULL ,
+ f15 TIMESTAMP NOT NULL,
+ f16 TIMESTAMP WITH TIME ZONE NOT NULL
+);
+
+INSERT INTO tb02 (f1, f3, f4, f5, f6, f7, f10, f11, f12, f13, f14, f15, f16)
+VALUES ('Sample text 1', x'65', 123.456, 7.89, 12.34, true, 1000, 1000,
100000, DATE '2024-01-01', TIME '08:00:00',
+ TIMESTAMP '2024-01-01 08:00:00', TIMESTAMP '2024-01-01 08:00:00 UTC');
+
+CREATE TABLE lineitem(
+ orderkey bigint,
+ partkey bigint,
+ suppkey bigint,
+ linenumber integer,
+ quantity decimal(12, 2),
+ extendedprice decimal(12, 2),
+ discount decimal(12, 2),
+ tax decimal(12, 2),
+ returnflag varchar,
+ linestatus varchar,
+ shipdate date,
+ commitdate date,
+ receiptdate date,
+ shipinstruct varchar,
+ shipmode varchar,
+ comment varchar
+)
+WITH (
+ partitioning = ARRAY['year(commitdate)'],
+ sorted_by = ARRAY['partkey', 'extendedprice desc']
+);
+
+insert into lineitem select * from tpch.tiny.lineitem;
+
+CREATE TABLE tb03(
+ orderkey bigint,
+ partkey bigint,
+ suppkey bigint,
+ linenumber integer,
+ quantity decimal(12, 2),
+ extendedprice decimal(12, 2),
+ discount decimal(12, 2),
+ tax decimal(12, 2),
+ returnflag varchar,
+ linestatus varchar,
+ shipdate date,
+ commitdate date,
+ receiptdate date,
+ shipinstruct varchar,
+ shipmode varchar,
+ comment varchar
+)
+WITH (
+ partitioning = ARRAY['day(commitdate)', 'month(shipdate)',
'bucket(partkey, 2)', 'truncate(shipinstruct, 2)'],
+ sorted_by = ARRAY['partkey asc nulls last', 'extendedprice DESC NULLS
FIRST']
+);
+
+USE gt_iceberg_postgresql1.gt_db2;
+
+<RETRY_WITH_NOT_EXISTS> SELECT * FROM gt_iceberg_postgresql1.gt_db2.tb03;
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_prepare.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_prepare.sql
new file mode 100644
index 000000000..a666a58c5
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/catalog_iceberg_prepare.sql
@@ -0,0 +1,117 @@
+call gravitino.system.create_catalog(
+ 'gt_iceberg1_1',
+ 'lakehouse-iceberg',
+ map(
+ array['uri', 'catalog-backend', 'warehouse'],
+ array['${hive_uri}', 'hive',
'${hdfs_uri}/user/iceberg/warehouse/TrinoQueryIT']
+ )
+);
+
+call gravitino.system.create_catalog(
+ 'gt_iceberg1',
+ 'lakehouse-iceberg',
+ map(
+ array['uri', 'catalog-backend', 'warehouse', 'cloud.region-code',
'cloud.trino.connection-url', 'cloud.trino.connection-user',
'cloud.trino.connection-password'],
+ array['${hive_uri}', 'hive',
'${hdfs_uri}/user/iceberg/warehouse/TrinoQueryIT','c2',
'${trino_remote_jdbc_uri}', 'admin', '']
+ )
+);
+
+CREATE SCHEMA gt_iceberg1_1.gt_db2;
+
+USE gt_iceberg1_1.gt_db2;
+
+-- Unsupported Type: CHAR TINYINT, SMALLINT
+CREATE TABLE tb01 (
+ f1 VARCHAR,
+ f3 VARBINARY,
+ f4 DECIMAL(10, 3),
+ f5 REAL,
+ f6 DOUBLE,
+ f7 BOOLEAN,
+ f10 INT,
+ f11 INTEGER,
+ f12 BIGINT,
+ f13 DATE,
+ f14 TIME,
+ f15 TIMESTAMP,
+ f16 TIMESTAMP WITH TIME ZONE
+);
+
+INSERT INTO tb01 (f1, f3, f4, f5, f6, f7, f10, f11, f12, f13, f14, f15, f16)
+VALUES ('Sample text 1', x'65', 123.456, 7.89, 12.34, true, 1000, 1000,
100000, DATE '2024-01-01', TIME '08:00:00',
+ TIMESTAMP '2024-01-01 08:00:00', TIMESTAMP '2024-01-01 08:00:00 UTC');
+
+INSERT INTO tb01 (f1, f3, f4, f5, f6, f7, f10, f11, f12, f13, f14, f15, f16)
+VALUES (NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
NULL, NULL);
+
+CREATE TABLE tb02 (
+ f1 VARCHAR NOT NULL ,
+ f3 VARBINARY NOT NULL ,
+ f4 DECIMAL(10, 3) NOT NULL ,
+ f5 REAL NOT NULL ,
+ f6 DOUBLE NOT NULL ,
+ f7 BOOLEAN NOT NULL ,
+ f10 INT NOT NULL ,
+ f11 INTEGER NOT NULL ,
+ f12 BIGINT NOT NULL ,
+ f13 DATE NOT NULL ,
+ f14 TIME NOT NULL ,
+ f15 TIMESTAMP NOT NULL,
+ f16 TIMESTAMP WITH TIME ZONE NOT NULL
+);
+
+INSERT INTO tb02 (f1, f3, f4, f5, f6, f7, f10, f11, f12, f13, f14, f15, f16)
+VALUES ('Sample text 1', x'65', 123.456, 7.89, 12.34, true, 1000, 1000,
100000, DATE '2024-01-01', TIME '08:00:00',
+ TIMESTAMP '2024-01-01 08:00:00', TIMESTAMP '2024-01-01 08:00:00 UTC');
+
+CREATE TABLE lineitem(
+ orderkey bigint,
+ partkey bigint,
+ suppkey bigint,
+ linenumber integer,
+ quantity decimal(12, 2),
+ extendedprice decimal(12, 2),
+ discount decimal(12, 2),
+ tax decimal(12, 2),
+ returnflag varchar,
+ linestatus varchar,
+ shipdate date,
+ commitdate date,
+ receiptdate date,
+ shipinstruct varchar,
+ shipmode varchar,
+ comment varchar
+)
+WITH (
+ partitioning = ARRAY['year(commitdate)'],
+ sorted_by = ARRAY['partkey', 'extendedprice desc']
+);
+
+insert into lineitem select * from tpch.tiny.lineitem;
+
+CREATE TABLE tb03(
+ orderkey bigint,
+ partkey bigint,
+ suppkey bigint,
+ linenumber integer,
+ quantity decimal(12, 2),
+ extendedprice decimal(12, 2),
+ discount decimal(12, 2),
+ tax decimal(12, 2),
+ returnflag varchar,
+ linestatus varchar,
+ shipdate date,
+ commitdate date,
+ receiptdate date,
+ shipinstruct varchar,
+ shipmode varchar,
+ comment varchar
+)
+WITH (
+ partitioning = ARRAY['day(commitdate)', 'month(shipdate)',
'bucket(partkey, 2)', 'truncate(shipinstruct, 2)'],
+ sorted_by = ARRAY['partkey asc nulls last', 'extendedprice DESC NULLS
FIRST']
+);
+
+USE gt_iceberg1.gt_db2;
+
+<RETRY_WITH_NOT_EXISTS> SELECT * FROM gt_iceberg1.gt_db2.tb03;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00001_datatype.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00001_datatype.sql
new file mode 100644
index 000000000..8c59b3dda
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00001_datatype.sql
@@ -0,0 +1,7 @@
+SHOW CREATE TABLE tb01;
+
+show create table tb02;
+
+select * from tb01 order by f1;
+
+select * from tb02 order by f1;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00001_datatype.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00001_datatype.txt
new file mode 100644
index 000000000..ca58569ad
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00001_datatype.txt
@@ -0,0 +1,45 @@
+"CREATE TABLE %.gt_db2.tb01 (
+ f1 varchar,
+ f3 varbinary,
+ f4 decimal(10, 3),
+ f5 real,
+ f6 double,
+ f7 boolean,
+ f10 integer,
+ f11 integer,
+ f12 bigint,
+ f13 date,
+ f14 time(6),
+ f15 timestamp(6),
+ f16 timestamp(6) with time zone
+)
+COMMENT ''
+WITH (
+ location = 'hdfs://%/user/iceberg/warehouse/TrinoQueryIT/gt_db2%/tb01'
+)"
+
+"CREATE TABLE %.gt_db2.tb02 (
+ f1 varchar NOT NULL,
+ f3 varbinary NOT NULL,
+ f4 decimal(10, 3) NOT NULL,
+ f5 real NOT NULL,
+ f6 double NOT NULL,
+ f7 boolean NOT NULL,
+ f10 integer NOT NULL,
+ f11 integer NOT NULL,
+ f12 bigint NOT NULL,
+ f13 date NOT NULL,
+ f14 time(6) NOT NULL,
+ f15 timestamp(6) NOT NULL,
+ f16 timestamp(6) with time zone NOT NULL
+)
+COMMENT ''
+WITH (
+ location = 'hdfs://%/user/iceberg/warehouse/TrinoQueryIT/gt_db2%/tb02'
+)"
+
+"Sample text
1","65","123.456","7.89","12.34","true","1000","1000","100000","2024-01-01","08:00:00.000000","2024-01-01
08:00:00.000000","2024-01-01 08:00:00.000000 UTC"
+"","","","","","","","","","","","",""
+
+"Sample text
1","65","123.456","7.89","12.34","true","1000","1000","100000","2024-01-01","08:00:00.000000","2024-01-01
08:00:00.000000","2024-01-01 08:00:00.000000 UTC"
+"","","","","","","","","","","","",""
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00002_partition_sort_order.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00002_partition_sort_order.sql
new file mode 100644
index 000000000..7c7f2401b
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00002_partition_sort_order.sql
@@ -0,0 +1,3 @@
+show create table lineitem;
+
+show create table tb03;
\ No newline at end of file
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00002_partition_sort_order.txt
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00002_partition_sort_order.txt
new file mode 100644
index 000000000..2e4fe5568
--- /dev/null
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/lakehouse-iceberg/ignored/00002_partition_sort_order.txt
@@ -0,0 +1,51 @@
+
+"CREATE TABLE %.gt_db2.lineitem (
+ orderkey bigint,
+ partkey bigint,
+ suppkey bigint,
+ linenumber integer,
+ quantity decimal(12, 2),
+ extendedprice decimal(12, 2),
+ discount decimal(12, 2),
+ tax decimal(12, 2),
+ returnflag varchar,
+ linestatus varchar,
+ shipdate date,
+ commitdate date,
+ receiptdate date,
+ shipinstruct varchar,
+ shipmode varchar,
+ comment varchar
+)
+COMMENT ''
+WITH (
+ location = 'hdfs://%/user/iceberg/warehouse/TrinoQueryIT/gt_db2%/lineitem',
+ partitioning = ARRAY['year(commitdate)'],
+ sorted_by = ARRAY['partkey','extendedprice DESC']
+)"
+
+
+"CREATE TABLE %.gt_db2.tb01 (
+ orderkey bigint,
+ partkey bigint,
+ suppkey bigint,
+ linenumber integer,
+ quantity decimal(12, 2),
+ extendedprice decimal(12, 2),
+ discount decimal(12, 2),
+ tax decimal(12, 2),
+ returnflag varchar,
+ linestatus varchar,
+ shipdate date,
+ commitdate date,
+ receiptdate date,
+ shipinstruct varchar,
+ shipmode varchar,
+ comment varchar
+)
+COMMENT ''
+WITH (
+ location = 'hdfs://%/user/iceberg/warehouse/TrinoQueryIT/gt_db2%/tb01',
+ partitioning = ARRAY['day(commitdate)','month(shipdate)','bucket(partkey,
2)','truncate(shipinstruct, 2)'],
+ sorted_by = ARRAY['partkey ASC NULLS LAST','extendedprice DESC NULLS FIRST']
+)"
diff --git
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/tpch/catalog_mysql_prepare.sql
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/tpch/catalog_mysql_prepare.sql
index 962734423..9a87a2a57 100644
---
a/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/tpch/catalog_mysql_prepare.sql
+++
b/trino-connector/integration-test/src/test/resources/trino-ci-testset/trino-cascading-testsets/tpch/catalog_mysql_prepare.sql
@@ -114,3 +114,5 @@ insert into part select * from tpch.tiny.part;
insert into partsupp select * from tpch.tiny.partsupp;
insert into region select * from tpch.tiny.region;
insert into supplier select * from tpch.tiny.supplier;
+
+use gt_mysql2.gt_tpch;
\ No newline at end of file
diff --git a/trino-connector/integration-test/trino-test-tools/download_jar.sh
b/trino-connector/integration-test/trino-test-tools/download_jar.sh
index 9e1aefeee..36cba2bce 100755
--- a/trino-connector/integration-test/trino-test-tools/download_jar.sh
+++ b/trino-connector/integration-test/trino-test-tools/download_jar.sh
@@ -51,12 +51,14 @@ download_mysql_jar() {
download_jar "mysql-connector-java-8.0.26.jar" \
"https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.26/mysql-connector-java-8.0.26.jar"
\
"$GRAVITINO_SERVER_DIR/catalogs/jdbc-mysql/libs"
+ cp -rp
$GRAVITINO_SERVER_DIR/catalogs/jdbc-mysql/libs/mysql-connector-java-8.0.26.jar
$GRAVITINO_SERVER_DIR/catalogs/lakehouse-iceberg/libs
}
download_postgresql_jar() {
download_jar "postgresql-42.7.0.jar" \
"https://jdbc.postgresql.org/download/postgresql-42.7.0.jar" \
"$GRAVITINO_SERVER_DIR/catalogs/jdbc-postgresql/libs"
+ cp -rp
$GRAVITINO_SERVER_DIR/catalogs/jdbc-postgresql/libs/postgresql-42.7.0.jar
$GRAVITINO_SERVER_DIR/catalogs/lakehouse-iceberg/libs
}
download_iceberg_aws_bundle() {
diff --git
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/docker-compose.yaml
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/docker-compose.yaml
index dcc7a55e4..e38c92b3b 100644
---
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/docker-compose.yaml
+++
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/docker-compose.yaml
@@ -43,6 +43,41 @@ services:
timeout: 60s
retries: 5
+ hive:
+ image: apache/gravitino-playground:hive-2.7.3
+ container_name: trino-ci-hive
+ networks:
+ - trino-net
+ environment:
+ - HADOOP_USER_NAME=root
+ entrypoint: /bin/bash /tmp/hive/init.sh
+ volumes:
+ - ./init/hive:/tmp/hive
+ healthcheck:
+ test: [ "CMD", "/tmp/check-status.sh" ]
+ interval: 10s
+ timeout: 60s
+ retries: 5
+ start_period: 20s
+
+ postgresql:
+ image: postgres:16
+ container_name: trino-ci-postgresql
+ networks:
+ - trino-net
+ restart: always
+ environment:
+ POSTGRES_USER: postgres
+ POSTGRES_PASSWORD: postgres
+ volumes:
+ - ./init/postgres:/docker-entrypoint-initdb.d/
+ healthcheck:
+ test: [ "CMD-SHELL", "pg_isready -U postgres" ]
+ interval: 10s
+ timeout: 60s
+ retries: 5
+ start_period: 30s
+
trino-remote:
image: trinodb/trino:435
networks:
@@ -69,6 +104,10 @@ services:
depends_on:
mysql:
condition: service_healthy
+ hive:
+ condition: service_healthy
+ postgresql:
+ condition: service_healthy
trino-local:
image: trinodb/trino:435
@@ -97,6 +136,10 @@ services:
depends_on:
mysql:
condition: service_healthy
+ hive:
+ condition: service_healthy
+ postgresql:
+ condition: service_healthy
networks:
trino-net:
diff --git
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/inspect_ip.sh
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/hive/init.sh
old mode 100755
new mode 100644
similarity index 50%
copy from
trino-connector/integration-test/trino-test-tools/trino-cascading-env/inspect_ip.sh
copy to
trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/hive/init.sh
index d7fb8bf68..6d0870917
---
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/inspect_ip.sh
+++
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/hive/init.sh
@@ -1,4 +1,3 @@
-#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
@@ -17,31 +16,5 @@
# specific language governing permissions and limitations
# under the License.
-#
-
-output=$(docker inspect --format='{{.Name}}:{{range
.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $(docker ps -q) |grep
"/trino-ci-" | sed 's/\/trino-ci-//g')
-
-gravitino_uri=""
-mysql_uri=""
-trino_uri=""
-trino_remote_jdbc_uri=""
-
-while IFS= read -r line; do
- name=$(echo $line | cut -d':' -f1)
- ip=$(echo $line | cut -d':' -f2)
-
- case $name in
- trino-local)
- gravitino_uri="--gravitino_uri=http://$ip:8090"
- trino_uri="--trino_uri=http://$ip:8080"
- ;;
- trino-remote)
- trino_remote_jdbc_uri="jdbc:trino://$ip:8080"
- ;;
- mysql)
- mysql_uri="--mysql_uri=jdbc:mysql://$ip"
- ;;
- esac
-done <<< "$output"
-
-echo "$gravitino_uri $mysql_uri $trino_uri
--params=trino_remote_jdbc_uri,$trino_remote_jdbc_uri"
+sed -i 's|hdfs://localhost:9000|hdfs://hive:9000|g'
/usr/local/hive/conf/hive-site.xml
+/bin/bash /usr/local/sbin/start.sh
\ No newline at end of file
diff --git
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/postgres/init.sql
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/postgres/init.sql
new file mode 100644
index 000000000..735fa6f67
--- /dev/null
+++
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/postgres/init.sql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+CREATE DATABASE db;
diff --git
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/trino-local/init.sh
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/trino-local/init.sh
index f2a29f5a1..641ddafeb 100644
---
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/trino-local/init.sh
+++
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/trino-local/init.sh
@@ -27,6 +27,7 @@ cp "$trino_conf_dir/config/jvm.config" /etc/trino/jvm.config
cp "$trino_conf_dir/config/log4j2.properties" /etc/trino/log4j2.properties
cp "$trino_conf_dir/config/catalog/gravitino.properties"
/etc/trino/catalog/gravitino.properties
cp "$trino_conf_dir/config/catalog/trino.properties"
/etc/trino/catalog/trino.properties
+cp /usr/lib/trino/plugin/mysql/mysql-connector-j-8.2.0.jar
/usr/lib/trino/plugin/iceberg
#start the gravitino server
gravitino_server_dir=/tmp/gravitino-server
diff --git
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/trino-remote/init.sh
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/trino-remote/init.sh
index d521b0fbb..6b9549fce 100644
---
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/trino-remote/init.sh
+++
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/init/trino-remote/init.sh
@@ -26,13 +26,13 @@ cp "$trino_conf_dir/config/config.properties"
/etc/trino/config.properties
cp "$trino_conf_dir/config/jvm.config" /etc/trino/jvm.config
cp "$trino_conf_dir/config/log4j2.properties" /etc/trino/log4j2.properties
cp "$trino_conf_dir/config/catalog/gravitino.properties"
/etc/trino/catalog/gravitino.properties
+cp /usr/lib/trino/plugin/mysql/mysql-connector-j-8.2.0.jar
/usr/lib/trino/plugin/iceberg
#
# Update `gravitino.uri = http://GRAVITINO_HOST_IP:GRAVITINO_HOST_PORT` in the
`conf/catalog/gravitino.properties`
sed -i
"s/GRAVITINO_HOST_IP:GRAVITINO_HOST_PORT/${GRAVITINO_HOST_IP}:${GRAVITINO_HOST_PORT}/g"
/etc/trino/catalog/gravitino.properties
# Update `gravitino.metalake = GRAVITINO_METALAKE_NAME` in the
`conf/catalog/gravitino.properties`
sed -i "s/GRAVITINO_METALAKE_NAME/${GRAVITINO_METALAKE_NAME}/g"
/etc/trino/catalog/gravitino.properties
-
# Check the number of Gravitino connector plugins present in the Trino plugin
directory
num_of_gravitino_connector=$(ls /usr/lib/trino/plugin/gravitino | grep
gravitino-trino-connector-* | wc -l)
if [[ "${num_of_gravitino_connector}" -ne 1 ]]; then
diff --git
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/inspect_ip.sh
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/inspect_ip.sh
index d7fb8bf68..b3b8bc9a2 100755
---
a/trino-connector/integration-test/trino-test-tools/trino-cascading-env/inspect_ip.sh
+++
b/trino-connector/integration-test/trino-test-tools/trino-cascading-env/inspect_ip.sh
@@ -25,6 +25,9 @@ gravitino_uri=""
mysql_uri=""
trino_uri=""
trino_remote_jdbc_uri=""
+hive_uri=""
+postgresql_uri=""
+hdfs_uri=""
while IFS= read -r line; do
name=$(echo $line | cut -d':' -f1)
@@ -41,7 +44,14 @@ while IFS= read -r line; do
mysql)
mysql_uri="--mysql_uri=jdbc:mysql://$ip"
;;
+ hive)
+ hive_uri="--hive_uri=thrift://$ip:9083"
+ hdfs_uri="--hdfs_uri=hdfs://$ip:9000"
+ ;;
+ postgresql)
+ postgresql_uri="--postgresql_uri=jdbc:postgresql://$ip:5432"
+ ;;
esac
done <<< "$output"
-echo "$gravitino_uri $mysql_uri $trino_uri
--params=trino_remote_jdbc_uri,$trino_remote_jdbc_uri"
+echo "$gravitino_uri $mysql_uri $trino_uri $hive_uri $postgresql_uri $hdfs_uri
--params=trino_remote_jdbc_uri,$trino_remote_jdbc_uri"