docete commented on a change in pull request #10745: 
[FLINK-15445][connectors/jdbc] JDBC Table Source didn't work for Type…
URL: https://github.com/apache/flink/pull/10745#discussion_r377585237
 
 

 ##########
 File path: 
flink-connectors/flink-jdbc/src/main/java/org/apache/flink/api/java/io/jdbc/dialect/JDBCDialects.java
 ##########
 @@ -46,10 +59,84 @@
                return Optional.empty();
        }
 
-       private static class DerbyDialect implements JDBCDialect {
+       private abstract static class AbstractDialect implements JDBCDialect {
+
+               @Override
+               public void validate(TableSchema schema) throws 
ValidationException {
+                       for (int i = 0; i < schema.getFieldCount(); i++) {
+                               DataType dt = schema.getFieldDataType(i).get();
+                               String fieldName = schema.getFieldName(i).get();
+
+                               // TODO: We can't convert VARBINARY(n) data 
type to
+                               //  
PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO in 
LegacyTypeInfoDataTypeConverter
+                               //  when n is smaller than Integer.MAX_VALUE
+                               if 
(unsupportedTypes().contains(dt.getLogicalType().getTypeRoot()) ||
+                                               (!(dt.getLogicalType() 
instanceof LegacyTypeInformationType) &&
+                                               (VARBINARY == 
dt.getLogicalType().getTypeRoot()
 
 Review comment:
   Yes, I think so. Will update soon.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to