RocMarshal commented on code in PR #123:
URL: 
https://github.com/apache/flink-connector-jdbc/pull/123#discussion_r1675007647


##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/converter/AbstractJdbcRowConverter.java:
##########
@@ -18,252 +18,17 @@
 
 package org.apache.flink.connector.jdbc.converter;
 
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-import org.apache.flink.connector.jdbc.utils.JdbcTypeUtil;
-import org.apache.flink.table.data.DecimalData;
-import org.apache.flink.table.data.GenericRowData;
-import org.apache.flink.table.data.RowData;
-import org.apache.flink.table.data.StringData;
-import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.types.logical.DecimalType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
+import 
org.apache.flink.connector.jdbc.core.database.dialect.AbstractDialectConverter;
 import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.TimestampType;
-
-import java.io.Serializable;
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.sql.Date;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Time;
-import java.sql.Timestamp;
-import java.time.LocalDate;
-import java.time.LocalDateTime;
-import java.time.LocalTime;
-
-import static org.apache.flink.util.Preconditions.checkNotNull;
-
-/** Base class for all converters that convert between JDBC object and Flink 
internal object. */
-public abstract class AbstractJdbcRowConverter implements JdbcRowConverter {
-
-    protected final RowType rowType;
-    protected final JdbcDeserializationConverter[] toInternalConverters;
-    protected final JdbcSerializationConverter[] toExternalConverters;
-    protected final LogicalType[] fieldTypes;
-
-    public abstract String converterName();
 
+/**
+ * Base class for all converters that convert between JDBC object and Flink 
internal object.
+ *
+ * @deprecated use AbstractDialectConverter

Review Comment:
   ```suggestion
    * @deprecated Use {@link AbstractDialectConverter}.
   ```



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/catalog/AbstractJdbcCatalog.java:
##########
@@ -18,551 +18,22 @@
 
 package org.apache.flink.connector.jdbc.catalog;
 
-import org.apache.flink.connector.jdbc.table.JdbcDynamicTableFactory;
-import org.apache.flink.table.api.Schema;
-import org.apache.flink.table.api.ValidationException;
-import org.apache.flink.table.catalog.AbstractCatalog;
-import org.apache.flink.table.catalog.CatalogBaseTable;
-import org.apache.flink.table.catalog.CatalogDatabase;
-import org.apache.flink.table.catalog.CatalogDatabaseImpl;
-import org.apache.flink.table.catalog.CatalogFunction;
-import org.apache.flink.table.catalog.CatalogPartition;
-import org.apache.flink.table.catalog.CatalogPartitionSpec;
-import org.apache.flink.table.catalog.CatalogTable;
-import org.apache.flink.table.catalog.ObjectPath;
-import org.apache.flink.table.catalog.UniqueConstraint;
-import org.apache.flink.table.catalog.exceptions.CatalogException;
-import org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException;
-import org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException;
-import org.apache.flink.table.catalog.exceptions.DatabaseNotExistException;
-import org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException;
-import org.apache.flink.table.catalog.exceptions.FunctionNotExistException;
-import 
org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException;
-import org.apache.flink.table.catalog.exceptions.PartitionNotExistException;
-import org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException;
-import org.apache.flink.table.catalog.exceptions.TableAlreadyExistException;
-import org.apache.flink.table.catalog.exceptions.TableNotExistException;
-import org.apache.flink.table.catalog.exceptions.TableNotPartitionedException;
-import org.apache.flink.table.catalog.exceptions.TablePartitionedException;
-import org.apache.flink.table.catalog.stats.CatalogColumnStatistics;
-import org.apache.flink.table.catalog.stats.CatalogTableStatistics;
-import org.apache.flink.table.expressions.Expression;
-import org.apache.flink.table.factories.Factory;
-import org.apache.flink.table.types.DataType;
-import org.apache.flink.util.Preconditions;
-import org.apache.flink.util.StringUtils;
-import org.apache.flink.util.TemporaryClassLoaderContext;
-
-import org.apache.commons.compress.utils.Lists;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.Properties;
-import java.util.function.Predicate;
-
-import static 
org.apache.flink.connector.jdbc.JdbcConnectionOptions.PASSWORD_KEY;
-import static org.apache.flink.connector.jdbc.JdbcConnectionOptions.USER_KEY;
-import static 
org.apache.flink.connector.jdbc.JdbcConnectionOptions.getBriefAuthProperties;
-import static 
org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.PASSWORD;
-import static 
org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.TABLE_NAME;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.URL;
-import static 
org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.USERNAME;
-import static 
org.apache.flink.connector.jdbc.table.JdbcDynamicTableFactory.IDENTIFIER;
-import static org.apache.flink.table.factories.FactoryUtil.CONNECTOR;
-import static org.apache.flink.util.Preconditions.checkArgument;
-import static org.apache.flink.util.Preconditions.checkNotNull;
-
-/** Abstract catalog for any JDBC catalogs. */
-public abstract class AbstractJdbcCatalog extends AbstractCatalog {
-
-    private static final Logger LOG = 
LoggerFactory.getLogger(AbstractJdbcCatalog.class);
-
-    protected final ClassLoader userClassLoader;
-    protected final String baseUrl;
-    protected final String defaultUrl;
-    protected final Properties connectionProperties;
+/**
+ * Abstract catalog for any JDBC catalogs.
+ *
+ * @deprecated use 
org.apache.flink.connector.jdbc.core.table.catalog.AbstractJdbcCatalog

Review Comment:
   ```suggestion
    * @deprecated Use {@link 
org.apache.flink.connector.jdbc.core.database.catalog.AbstractJdbcCatalog}.
   ```



##########
flink-connector-jdbc-core/src/main/java/org/apache/flink/connector/jdbc/internal/options/InternalJdbcConnectionOptions.java:
##########
@@ -179,7 +179,7 @@ public Builder setDBUrl(String dbURL) {
 
         /**
          * optional, Handle the SQL dialect of jdbc driver. If not set, it 
will be infer by {@link
-         * JdbcDialectLoader#load} from DB url.
+         * JdbcFactoryLoader#load} from DB url.

Review Comment:
   ```suggestion
            * JdbcFactoryLoader#load(String, ClassLoader)} from DB url.
   ```



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/AbstractDialect.java:
##########
@@ -41,228 +28,11 @@
  *
  * <p>Implementors should be careful to check the default SQL statements are 
performant for their
  * specific dialect and override them if necessary.
+ *
+ * @deprecated use 
org.apache.flink.connector.jdbc.core.table.dialect.AbstractDialect

Review Comment:
   ```suggestion
    * @deprecated Use {@link 
org.apache.flink.connector.jdbc.core.database.dialect.AbstractDialect}.
   ```
   How about using the 'link' tag to point the target new Class ?



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/JdbcDialect.java:
##########
@@ -19,139 +19,16 @@
 package org.apache.flink.connector.jdbc.dialect;
 
 import org.apache.flink.annotation.PublicEvolving;
-import org.apache.flink.connector.jdbc.converter.JdbcRowConverter;
-import org.apache.flink.table.api.ValidationException;
-import org.apache.flink.table.types.logical.RowType;
-
-import java.io.Serializable;
-import java.util.Optional;
+import org.apache.flink.connector.jdbc.core.database.JdbcFactory;
 
 /**
  * Represents a dialect of SQL implemented by a particular JDBC system. 
Dialects should be immutable
  * and stateless.
  *
- * @see JdbcDialectFactory
+ * @see JdbcFactory
+ * @deprecated use 
org.apache.flink.connector.jdbc.core.table.dialect.JdbcDialect

Review Comment:
   ```suggestion
    * @deprecated Use {@link 
org.apache.flink.connector.jdbc.core.database.dialect.JdbcDialect}.
   ```



##########
flink-connector-jdbc-core/src/main/java/org/apache/flink/connector/jdbc/core/database/catalog/JdbcCatalogTypeMapper.java:
##########
@@ -16,22 +16,19 @@
  * limitations under the License.
  */
 
-package org.apache.flink.connector.jdbc.databases.db2.dialect;
+package org.apache.flink.connector.jdbc.core.database.catalog;
 
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
+import org.apache.flink.annotation.PublicEvolving;
+import org.apache.flink.table.catalog.ObjectPath;
+import org.apache.flink.table.types.DataType;
 
-/** Factory for {@link Db2Dialect}. */
-@Internal
-public class Db2DialectFactory implements JdbcDialectFactory {
-    @Override
-    public boolean acceptsURL(String url) {
-        return url.startsWith("jdbc:db2:");
-    }
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
 
-    @Override
-    public JdbcDialect create() {
-        return new Db2Dialect();
-    }
+/** Separate the jdbc meta-information type to flink table type into the 
interface. */
+@PublicEvolving
+public interface JdbcCatalogTypeMapper {

Review Comment:
   How about rename to 'JdbcTypeMapper' ?



##########
flink-connector-jdbc-core/src/main/java/org/apache/flink/connector/jdbc/core/datastream/Jdbc.java:
##########
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.connector.jdbc.core.datastream;
+
+import org.apache.flink.annotation.PublicEvolving;
+import org.apache.flink.connector.jdbc.core.datastream.sink.JdbcSink;
+import org.apache.flink.connector.jdbc.core.datastream.sink.JdbcSinkBuilder;
+import org.apache.flink.connector.jdbc.core.datastream.source.JdbcSource;
+import 
org.apache.flink.connector.jdbc.core.datastream.source.JdbcSourceBuilder;
+
+/** Facade to create JDBC stream sources and sinks. */

Review Comment:
   
   The repo is lacking of documentation pages to describe the new jdbc sink2 
and the current usage for here.
   Would you like to supplement it by 
https://issues.apache.org/jira/browse/FLINK-35811 ?
   



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/JdbcDialectTypeMapper.java:
##########
@@ -18,15 +18,12 @@
 
 package org.apache.flink.connector.jdbc.dialect;
 
-import org.apache.flink.table.catalog.ObjectPath;
-import org.apache.flink.table.types.DataType;
+import 
org.apache.flink.connector.jdbc.core.database.catalog.JdbcCatalogTypeMapper;
 
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-
-/** Separate the jdbc meta-information type to flink table type into the 
interface. */
-public interface JdbcDialectTypeMapper {
-
-    DataType mapping(ObjectPath tablePath, ResultSetMetaData metadata, int 
colIndex)
-            throws SQLException;
-}
+/**
+ * Separate the jdbc meta-information type to flink table type into the 
interface.
+ *
+ * @deprecated use JdbcCatalogTypeMapper

Review Comment:
   ```suggestion
    * @deprecated Use {@link JdbcCatalogTypeMapper}.
   ```
   
   Maybe it need the renamed name mentioned above~



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/source/JdbcSourceBuilder.java:
##########
@@ -94,8 +89,11 @@
  * @see PreparedStatement
  * @see DriverManager
  * @see JdbcSource
+ * @deprecated please use {@link
+ *     
org.apache.flink.connector.jdbc.core.datastream.source.JdbcSourceBuilder}

Review Comment:
   ```suggestion
    * @deprecated Please use {@link
    *     
org.apache.flink.connector.jdbc.core.datastream.source.JdbcSourceBuilder}.
   ```



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/source/JdbcSource.java:
##########
@@ -50,8 +50,13 @@
 import java.util.ArrayList;
 import java.util.Objects;
 
-/** JDBC source. */
+/**
+ * JDBC source.
+ *
+ * @deprecated please use {@link 
org.apache.flink.connector.jdbc.core.datastream.source.JdbcSource}

Review Comment:
   ```suggestion
    * @deprecated Please use {@link 
org.apache.flink.connector.jdbc.core.datastream.source.JdbcSource}.
   ```



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/JdbcDialectFactory.java:
##########
@@ -18,41 +18,34 @@
 
 package org.apache.flink.connector.jdbc.dialect;
 
-import org.apache.flink.annotation.PublicEvolving;
+import org.apache.flink.connector.jdbc.core.database.JdbcFactory;
 import org.apache.flink.util.StringUtils;
 
 /**
- * A factory to create a specific {@link JdbcDialect}. This factory is used 
with Java's Service
- * Provider Interfaces (SPI) for discovering.
+ * A factory to create a specific {@link 
org.apache.flink.connector.jdbc.dialect.JdbcDialect}. This
+ * factory is used with Java's Service Provider Interfaces (SPI) for 
discovering.
  *
  * <p>Classes that implement this interface can be added to the
- * 
"META_INF/services/org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory" 
file of a JAR file
- * in the current classpath to be found.
+ * "META_INF/services/org.apache.flink.connector.jdbc.JdbcDialectFactory" file 
of a JAR file in the
+ * current classpath to be found.
  *
- * @see JdbcDialect
+ * @see org.apache.flink.connector.jdbc.dialect.JdbcDialect
+ * @deprecated use JdbcFactory

Review Comment:
   ```suggestion
    * @deprecated Use {@link JdbcFactory}.
   ```



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/source/reader/extractor/ResultExtractor.java:
##########
@@ -19,39 +19,19 @@
 package org.apache.flink.connector.jdbc.source.reader.extractor;
 
 import org.apache.flink.annotation.PublicEvolving;
-import org.apache.flink.types.Row;
 
-import java.io.Serializable;
 import java.sql.ResultSet;
-import java.sql.SQLException;
 
 /**
  * The Extractor to extract the data from {@link ResultSet}.
  *
  * @param <T> The target data type.
+ * @deprecated please use {@link

Review Comment:
   ```suggestion
    * @deprecated Please use {@link
   ```



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/converter/JdbcRowConverter.java:
##########
@@ -19,34 +19,15 @@
 package org.apache.flink.connector.jdbc.converter;
 
 import org.apache.flink.annotation.PublicEvolving;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
+import 
org.apache.flink.connector.jdbc.core.database.dialect.JdbcDialectConverter;
 import org.apache.flink.table.data.RowData;
 
-import java.io.Serializable;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
 /**
  * Converter that is responsible to convert between JDBC object and Flink SQL 
internal data
  * structure {@link RowData}.
+ *
+ * @deprecated use JdbcDialectConverter

Review Comment:
   ```suggestion
    * @deprecated Use {@link JdbcDialectConverter}.
   ```



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/catalog/JdbcCatalog.java:
##########
@@ -33,7 +34,12 @@
 
 import static 
org.apache.flink.connector.jdbc.JdbcConnectionOptions.getBriefAuthProperties;
 
-/** Catalogs for relational databases via JDBC. */
+/**
+ * Catalogs for relational databases via JDBC.
+ *
+ * @deprecated user 
org.apache.flink.connector.jdbc.core.table.catalog.JdbcCatalog

Review Comment:
   ```suggestion
    * @deprecated Use {@link 
org.apache.flink.connector.jdbc.core.database.catalog.JdbcCatalog}.
   ```



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/catalog/factory/JdbcCatalogFactory.java:
##########
@@ -30,18 +31,26 @@
 import java.util.HashSet;
 import java.util.Set;
 
-import static 
org.apache.flink.connector.jdbc.catalog.factory.JdbcCatalogFactoryOptions.BASE_URL;
-import static 
org.apache.flink.connector.jdbc.catalog.factory.JdbcCatalogFactoryOptions.COMPATIBLE_MODE;
-import static 
org.apache.flink.connector.jdbc.catalog.factory.JdbcCatalogFactoryOptions.DEFAULT_DATABASE;
-import static 
org.apache.flink.connector.jdbc.catalog.factory.JdbcCatalogFactoryOptions.PASSWORD;
-import static 
org.apache.flink.connector.jdbc.catalog.factory.JdbcCatalogFactoryOptions.USERNAME;
+import static 
org.apache.flink.connector.jdbc.core.database.catalog.factory.JdbcCatalogFactoryOptions.BASE_URL;
+import static 
org.apache.flink.connector.jdbc.core.database.catalog.factory.JdbcCatalogFactoryOptions.COMPATIBLE_MODE;
+import static 
org.apache.flink.connector.jdbc.core.database.catalog.factory.JdbcCatalogFactoryOptions.DEFAULT_DATABASE;
+import static 
org.apache.flink.connector.jdbc.core.database.catalog.factory.JdbcCatalogFactoryOptions.PASSWORD;
+import static 
org.apache.flink.connector.jdbc.core.database.catalog.factory.JdbcCatalogFactoryOptions.USERNAME;
 import static 
org.apache.flink.connector.jdbc.utils.JdbcUtils.getConnectionProperties;
 import static org.apache.flink.table.factories.FactoryUtil.PROPERTY_VERSION;
 
-/** Factory for {@link JdbcCatalog}. */
+/**
+ * Factory for {@link JdbcCatalog}.
+ *
+ * @deprecated use 
org.apache.flink.connector.jdbc.core.table.catalog.factory.JdbcCatalogFactory

Review Comment:
   ```suggestion
    * @deprecated Use {@link 
org.apache.flink.connector.jdbc.core.database.catalog.factory.JdbcCatalogFactory}.
   ```



##########
flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/table/JdbcConnectorOptions.java:
##########
@@ -19,165 +19,13 @@
 package org.apache.flink.connector.jdbc.table;
 
 import org.apache.flink.annotation.PublicEvolving;
-import org.apache.flink.configuration.ConfigOption;
-import org.apache.flink.configuration.ConfigOptions;
-import org.apache.flink.table.connector.source.lookup.LookupOptions;
-import org.apache.flink.table.factories.FactoryUtil;
 
-import java.time.Duration;
-
-/** Options for the JDBC connector. */
+/**
+ * Options for the JDBC connector.
+ *
+ * @deprecated please use {@link 
org.apache.flink.connector.jdbc.core.table.JdbcConnectorOptions}

Review Comment:
   ```suggestion
    * @deprecated Please use {@link 
org.apache.flink.connector.jdbc.core.table.JdbcConnectorOptions}.
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to