davidzollo commented on code in PR #9743:
URL: https://github.com/apache/seatunnel/pull/9743#discussion_r2463810889


##########
seatunnel-connectors-v2/connector-hive/src/main/java/org/apache/seatunnel/connectors/seatunnel/hive/sink/HiveSaveModeHandler.java:
##########
@@ -0,0 +1,418 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.connectors.seatunnel.hive.sink;
+
+import org.apache.seatunnel.api.configuration.ReadonlyConfig;
+import org.apache.seatunnel.api.sink.DataSaveMode;
+import org.apache.seatunnel.api.sink.SaveModeHandler;
+import org.apache.seatunnel.api.sink.SchemaSaveMode;
+import org.apache.seatunnel.api.table.catalog.Catalog;
+import org.apache.seatunnel.api.table.catalog.CatalogTable;
+import org.apache.seatunnel.api.table.catalog.TablePath;
+import org.apache.seatunnel.api.table.catalog.TableSchema;
+import org.apache.seatunnel.connectors.seatunnel.hive.config.HiveOptions;
+import 
org.apache.seatunnel.connectors.seatunnel.hive.exception.HiveConnectorErrorCode;
+import 
org.apache.seatunnel.connectors.seatunnel.hive.exception.HiveConnectorException;
+import 
org.apache.seatunnel.connectors.seatunnel.hive.utils.HiveMetaStoreCatalog;
+import 
org.apache.seatunnel.connectors.seatunnel.hive.utils.HiveTableTemplateUtils;
+import org.apache.seatunnel.connectors.seatunnel.hive.utils.HiveTypeConvertor;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.thrift.TException;
+
+import lombok.extern.slf4j.Slf4j;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@Slf4j
+public class HiveSaveModeHandler implements SaveModeHandler, AutoCloseable {
+
+    private final ReadonlyConfig readonlyConfig;
+    private final CatalogTable catalogTable;
+    private final SchemaSaveMode schemaSaveMode;
+    private final TablePath tablePath;
+    private final String dbName;
+    private final String tableName;
+    private final TableSchema tableSchema;
+
+    private HiveMetaStoreCatalog hiveCatalog;
+
+    public HiveSaveModeHandler(
+            ReadonlyConfig readonlyConfig,
+            CatalogTable catalogTable,
+            SchemaSaveMode schemaSaveMode) {
+        this.readonlyConfig = readonlyConfig;
+        this.catalogTable = catalogTable;
+        this.schemaSaveMode = schemaSaveMode;
+        this.tablePath = 
TablePath.of(readonlyConfig.get(HiveOptions.TABLE_NAME));
+        this.dbName = tablePath.getDatabaseName();
+        this.tableName = tablePath.getTableName();
+        this.tableSchema = catalogTable.getTableSchema();
+    }
+
+    @Override
+    public void open() {
+        this.hiveCatalog = HiveMetaStoreCatalog.create(readonlyConfig);
+    }
+
+    @Override
+    public void handleSchemaSaveModeWithRestore() {
+        // For Hive, we use the same logic as handleSchemaSaveMode
+        handleSchemaSaveMode();
+    }
+
+    @Override
+    public TablePath getHandleTablePath() {
+        return tablePath;
+    }
+
+    @Override
+    public Catalog getHandleCatalog() {
+        return hiveCatalog;
+    }
+
+    @Override
+    public SchemaSaveMode getSchemaSaveMode() {
+        return schemaSaveMode;
+    }
+
+    @Override
+    public DataSaveMode getDataSaveMode() {
+        return readonlyConfig.get(HiveSinkOptions.DATA_SAVE_MODE);
+    }
+
+    @Override
+    public void close() throws Exception {
+        if (hiveCatalog != null) {
+            hiveCatalog.close();
+        }
+    }
+
+    @Override
+    public void handleSchemaSaveMode() {
+        try {
+            switch (schemaSaveMode) {
+                case RECREATE_SCHEMA:
+                    handleRecreateSchema();
+                    break;
+                case CREATE_SCHEMA_WHEN_NOT_EXIST:
+                    handleCreateSchemaWhenNotExist();
+                    break;
+                case ERROR_WHEN_SCHEMA_NOT_EXIST:
+                    handleErrorWhenSchemaNotExist();
+                    break;
+                case IGNORE:
+                    log.info(
+                            "Ignore schema save mode, skip schema handling for 
table {}.{}",
+                            dbName,
+                            tableName);
+                    break;
+                default:
+                    throw new HiveConnectorException(
+                            HiveConnectorErrorCode.CREATE_HIVE_TABLE_FAILED,
+                            "Unsupported schema save mode: " + schemaSaveMode);
+            }
+        } catch (Exception e) {
+            throw new HiveConnectorException(
+                    HiveConnectorErrorCode.CREATE_HIVE_TABLE_FAILED,
+                    "Failed to handle schema save mode: " + e.getMessage(),
+                    e);
+        }
+    }
+
+    @Override
+    public void handleDataSaveMode() {
+        // No-op: data cleanup is handled in AggregatedCommitter via overwrite 
or DROP_DATA
+    }
+
+    private void handleRecreateSchema() throws TException {
+        log.info("Recreate schema mode: dropping and recreating table {}.{}", 
dbName, tableName);
+
+        // Do NOT create database automatically. Ensure database exists first.
+        if (!hiveCatalog.databaseExists(dbName)) {
+            throw new HiveConnectorException(
+                    HiveConnectorErrorCode.CREATE_HIVE_TABLE_FAILED,
+                    "Database " + dbName + " does not exist. Please create it 
manually.");
+        }
+
+        // Drop table if exists
+        if (hiveCatalog.tableExists(dbName, tableName)) {
+            hiveCatalog.dropTable(dbName, tableName);
+            log.info("Dropped existing table {}.{}", dbName, tableName);
+        }
+
+        // Create table using template
+        createTable();
+    }
+
+    private void handleCreateSchemaWhenNotExist() throws TException {
+        log.info("Create schema when not exist mode for table {}.{}", dbName, 
tableName);
+
+        if (!hiveCatalog.databaseExists(dbName)) {
+            throw new HiveConnectorException(
+                    HiveConnectorErrorCode.CREATE_HIVE_TABLE_FAILED,
+                    "Database " + dbName + " does not exist. Please create it 
manually.");
+        }
+
+        if (!hiveCatalog.tableExists(dbName, tableName)) {
+            createTable();
+        } else {
+            log.info("Table {}.{} already exists, skipping creation", dbName, 
tableName);
+        }
+    }
+
+    private void handleErrorWhenSchemaNotExist() throws TException {
+        log.info("Error when schema not exist mode: checking table {}.{}", 
dbName, tableName);
+
+        // Check if database exists
+        if (!hiveCatalog.databaseExists(dbName)) {
+            throw new HiveConnectorException(
+                    HiveConnectorErrorCode.CREATE_HIVE_TABLE_FAILED,
+                    "Database " + dbName + " does not exist");
+        }
+
+        // Check if table exists
+        if (!hiveCatalog.tableExists(dbName, tableName)) {
+            throw new HiveConnectorException(
+                    HiveConnectorErrorCode.CREATE_HIVE_TABLE_FAILED,
+                    "Table " + dbName + "." + tableName + " does not exist");
+        }
+    }
+
+    private void createTable() throws TException {
+        log.info("Creating table {}.{} using template-based approach", dbName, 
tableName);
+        Table table = buildTableFromTemplate();
+        hiveCatalog.createTableFromTemplate(table);
+        log.info("Successfully created table {}.{}", dbName, tableName);
+    }
+
+    private List<String> extractPartitionFieldsFromConfig() {
+        if 
(readonlyConfig.getOptional(HiveSinkOptions.SAVE_MODE_CREATE_TEMPLATE).isPresent())
 {
+            String template = 
readonlyConfig.get(HiveSinkOptions.SAVE_MODE_CREATE_TEMPLATE);
+            return 
HiveTableTemplateUtils.extractPartitionFieldsFromTemplate(template);
+        }
+        return new ArrayList<>();
+    }
+
+    private Table buildTableFromTemplate() {

Review Comment:
   @yzeng1618 I reviewed this PR, It looks fine, but to be honest, I also have 
such concern. it is not easy for maintenance. I believe that one day you will 
become a committer and pmc, and you will also need to consider the difficulty 
of project maintenance. 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to