morningman commented on code in PR #59720:
URL: https://github.com/apache/doris/pull/59720#discussion_r2697548734


##########
regression-test/suites/external_table_p0/iceberg/test_iceberg_export_timestamp_tz.groovy:
##########
@@ -0,0 +1,168 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.codehaus.groovy.runtime.IOGroovyMethods
+
+import java.nio.charset.StandardCharsets
+import java.nio.file.Files
+import java.nio.file.Paths
+
+suite("test_iceberg_export_timestamp_tz", "external,hive,external_docker") {
+
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable Hive test.")
+        return;
+    }
+
+    for (String hivePrefix : ["hive2"]) {
+        setHivePrefix(hivePrefix)
+        String hms_port = context.config.otherConfigs.get(hivePrefix + 
"HmsPort")
+        String hdfs_port = context.config.otherConfigs.get(hivePrefix + 
"HdfsPort")
+        String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+        // It's okay to use random `hdfsUser`, but can not be empty.
+        def hdfsUserName = "doris"
+        def defaultFS = "hdfs://${externalEnvIp}:${hdfs_port}"
+        def outfile_path = "/user/doris/tmp_data"
+        def uri = "${defaultFS}" + "${outfile_path}/exp_"
+
+        def outfile_to_HDFS = {format,export_table_name ->
+            // select ... into outfile ...
+            def uuid = UUID.randomUUID().toString()
+            outfile_path = "/user/doris/tmp_data/${uuid}"
+            uri = "${defaultFS}" + "${outfile_path}/exp_"
+
+            def res = sql """
+                SELECT * FROM ${export_table_name} t ORDER BY id
+                INTO OUTFILE "${uri}"
+                FORMAT AS ${format}
+                PROPERTIES (
+                    "fs.defaultFS"="${defaultFS}",
+                    "hadoop.username" = "${hdfsUserName}"
+                );
+            """
+            logger.info("outfile success path: " + res[0][3]);
+            return res[0][3]
+        }
+
+        try {
+            String catalog_name_with_export = 
"test_iceberg_timestamp_tz_with_mapping_export"
+            String db_name = "test_timestamp_tz"
+            String rest_port = 
context.config.otherConfigs.get("iceberg_rest_uri_port")
+            String minio_port = 
context.config.otherConfigs.get("iceberg_minio_port")
+            sql """set time_zone = 'Asia/Shanghai';"""
+            
+            sql """drop catalog if exists ${catalog_name_with_export}"""
+            sql """
+            CREATE CATALOG ${catalog_name_with_export} PROPERTIES (
+                'type'='iceberg',
+                'iceberg.catalog.type'='rest',
+                'uri' = 'http://${externalEnvIp}:${rest_port}',
+                "s3.access_key" = "admin",
+                "s3.secret_key" = "password",
+                "s3.endpoint" = "http://${externalEnvIp}:${minio_port}";,
+                "s3.region" = "us-east-1",
+                "s3.path.style.access" = "true",
+                "s3.connection.ssl.enabled" = "false",
+                "enable.mapping.varbinary"="true",
+                "enable.mapping.timestamp_tz"="true"
+            );"""
+
+
+            sql """switch ${catalog_name_with_export}"""
+            sql """use ${db_name}"""
+            order_qt_select_desc_orc """ desc test_ice_timestamp_tz_orc; """
+            order_qt_select_desc_parquet """ desc 
test_ice_timestamp_tz_parquet; """
+            // TODO: seems write to parquet with timestamp_tz has some problem

Review Comment:
   What is the problem?



##########
docker/thirdparties/docker-compose/mysql/init/04-insert.sql:
##########
@@ -1212,6 +1212,15 @@ INSERT INTO doris_test.`test_cast` VALUES (2, '2', 
'2022-01-02', '2022-01-02 00:
 INSERT INTO test_varbinary_db.`test_varbinary` VALUES (1, 
X'48656C6C6F20576F726C64'), (2, X'48656C6C6F20576F726C6421');
 INSERT INTO test_varbinary_db.`test_varbinary_udf` VALUES (1, 
X'48656C6C6F20576F726C64'), (2, X'48656C6C6F20576F726C6421'), (3, NULL), (4, 
X'AB'), (5, X'ABCDEF');
 
+
+
+
+
+
+SET time_zone = '+08:00';

Review Comment:
   Same suggestion for other jdbc cases



##########
docker/thirdparties/docker-compose/mysql/init/04-insert.sql:
##########
@@ -1212,6 +1212,15 @@ INSERT INTO doris_test.`test_cast` VALUES (2, '2', 
'2022-01-02', '2022-01-02 00:
 INSERT INTO test_varbinary_db.`test_varbinary` VALUES (1, 
X'48656C6C6F20576F726C64'), (2, X'48656C6C6F20576F726C6421');
 INSERT INTO test_varbinary_db.`test_varbinary_udf` VALUES (1, 
X'48656C6C6F20576F726C64'), (2, X'48656C6C6F20576F726C6421'), (3, NULL), (4, 
X'AB'), (5, X'ABCDEF');
 
+
+
+
+
+
+SET time_zone = '+08:00';

Review Comment:
   Do we need to set time_zone back to origin?
   In case we may add other `insert` after



##########
docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run10.sql:
##########
@@ -0,0 +1,65 @@
+use paimon;
+create database if not exists paimon_test_timestamp_tz;
+USE paimon_test_timestamp_tz;
+
+DROP TABLE IF EXISTS test_ice_timestamp_tz_orc;
+DROP TABLE IF EXISTS test_ice_timestamp_tz_parquet;
+
+SET spark.sql.session.timeZone = Asia/Shanghai;
+
+CREATE TABLE test_ice_timestamp_tz_orc (
+    id INT,
+    ts_tz TIMESTAMP_LTZ
+)
+USING paimon
+TBLPROPERTIES(
+    'file.format' = 'orc', 
+    'primary-key' = 'id'
+);
+
+CREATE TABLE test_ice_timestamp_tz_parquet (
+    id INT,
+    ts_tz TIMESTAMP_LTZ
+)
+USING paimon
+TBLPROPERTIES(
+    'file.format' = 'parquet',
+    'primary-key' = 'id'
+);
+
+INSERT INTO test_ice_timestamp_tz_orc VALUES (1, TIMESTAMP_LTZ '2025-01-01 
00:00:00');
+INSERT INTO test_ice_timestamp_tz_orc VALUES (2, TIMESTAMP_LTZ '2025-06-01 
12:34:56.789');
+INSERT INTO test_ice_timestamp_tz_orc VALUES (3, TIMESTAMP_LTZ '2025-12-31 
23:59:59.999999');
+INSERT INTO test_ice_timestamp_tz_orc VALUES (4, NULL);
+
+
+INSERT INTO test_ice_timestamp_tz_parquet VALUES (1, TIMESTAMP_LTZ '2025-01-01 
00:00:00');
+INSERT INTO test_ice_timestamp_tz_parquet VALUES (2, TIMESTAMP_LTZ '2025-06-01 
12:34:56.789');
+INSERT INTO test_ice_timestamp_tz_parquet VALUES (3, TIMESTAMP_LTZ '2025-12-31 
23:59:59.999999');
+INSERT INTO test_ice_timestamp_tz_parquet VALUES (4, NULL);
+
+SELECT * FROM test_ice_timestamp_tz_orc;

Review Comment:
   remove `select`



##########
docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/iceberg/run25.sql:
##########
@@ -0,0 +1,64 @@
+create database if not exists demo.test_timestamp_tz;
+USE demo.test_timestamp_tz;
+
+DROP TABLE IF EXISTS test_ice_timestamp_tz_orc;
+DROP TABLE IF EXISTS test_ice_timestamp_tz_parquet;
+
+SET spark.sql.session.timeZone = Asia/Shanghai;
+
+CREATE TABLE test_ice_timestamp_tz_orc (
+    id INT,
+    ts_tz TIMESTAMP_LTZ
+)
+USING iceberg
+TBLPROPERTIES(
+    'write.format.default' = 'orc',
+    'format-version' = '1'
+);
+
+CREATE TABLE test_ice_timestamp_tz_parquet (
+    id INT,
+    ts_tz TIMESTAMP_LTZ
+)
+USING iceberg
+TBLPROPERTIES(
+    'write.format.default' = 'parquet',
+    'format-version' = '1'
+);
+
+INSERT INTO test_ice_timestamp_tz_orc VALUES (1, TIMESTAMP_LTZ '2025-01-01 
00:00:00');
+INSERT INTO test_ice_timestamp_tz_orc VALUES (2, TIMESTAMP_LTZ '2025-06-01 
12:34:56.789');
+INSERT INTO test_ice_timestamp_tz_orc VALUES (3, TIMESTAMP_LTZ '2025-12-31 
23:59:59.999999');
+INSERT INTO test_ice_timestamp_tz_orc VALUES (4, NULL);
+
+
+INSERT INTO test_ice_timestamp_tz_parquet VALUES (1, TIMESTAMP_LTZ '2025-01-01 
00:00:00');
+INSERT INTO test_ice_timestamp_tz_parquet VALUES (2, TIMESTAMP_LTZ '2025-06-01 
12:34:56.789');
+INSERT INTO test_ice_timestamp_tz_parquet VALUES (3, TIMESTAMP_LTZ '2025-12-31 
23:59:59.999999');
+INSERT INTO test_ice_timestamp_tz_parquet VALUES (4, NULL);
+
+SELECT * FROM test_ice_timestamp_tz_orc;

Review Comment:
   Why call `select` in this script?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to