zjureel commented on code in PR #423:
URL: https://github.com/apache/flink-table-store/pull/423#discussion_r1061186421
##########
flink-table-store-core/src/main/java/org/apache/flink/table/store/file/schema/SchemaEvolutionUtil.java:
##########
@@ -304,4 +307,56 @@ private static int indexOf(DataField dataField,
LinkedHashMap<Integer, DataField
throw new IllegalArgumentException(
String.format("Can't find data field %s", dataField.name()));
}
+
+ /**
+ * Create converter mapping from table fields to underlying data fields.
For example, the table
+ * and data fields are as follows
+ *
+ * <ul>
+ * <li>table fields: 1->c INT, 6->b STRING, 3->a BIGINT
+ * <li>data fields: 1->a BIGINT, 3->c DOUBLE
+ * </ul>
+ *
+ * <p>We can get the column types (1->a BIGINT), (3->c DOUBLE) from data
fields for (1->c INT)
+ * and (3->a BIGINT) in table fields through index mapping [0, -1, 1],
then compare the data
+ * type and create converter mapping.
+ *
+ * <p>/// TODO should support nest index mapping when nest schema
evolution is supported.
+ *
+ * @param tableFields the fields of table
+ * @param dataFields the fields of underlying data
+ * @param indexMapping the index mapping from table fields to data fields
+ * @return the index mapping
+ */
+ public static CastExecutor<?, ?>[] createConvertMapping(
+ List<DataField> tableFields, List<DataField> dataFields, int[]
indexMapping) {
+ CastExecutor<?, ?>[] converterMapping = new CastExecutor<?,
?>[tableFields.size()];
+ boolean castExist = false;
+ for (int i = 0; i < tableFields.size(); i++) {
+ int dataIndex = indexMapping == null ? i : indexMapping[i];
+ if (dataIndex < 0) {
+ converterMapping[i] = null;
+ } else {
+ DataField tableField = tableFields.get(i);
+ DataField dataField = dataFields.get(dataIndex);
+ if (dataField.type().equals(tableField.type())) {
Review Comment:
Done
##########
flink-table-store-core/src/main/java/org/apache/flink/table/store/file/schema/SchemaEvolutionUtil.java:
##########
@@ -304,4 +307,56 @@ private static int indexOf(DataField dataField,
LinkedHashMap<Integer, DataField
throw new IllegalArgumentException(
String.format("Can't find data field %s", dataField.name()));
}
+
+ /**
+ * Create converter mapping from table fields to underlying data fields.
For example, the table
+ * and data fields are as follows
+ *
+ * <ul>
+ * <li>table fields: 1->c INT, 6->b STRING, 3->a BIGINT
+ * <li>data fields: 1->a BIGINT, 3->c DOUBLE
+ * </ul>
+ *
+ * <p>We can get the column types (1->a BIGINT), (3->c DOUBLE) from data
fields for (1->c INT)
+ * and (3->a BIGINT) in table fields through index mapping [0, -1, 1],
then compare the data
+ * type and create converter mapping.
+ *
+ * <p>/// TODO should support nest index mapping when nest schema
evolution is supported.
+ *
+ * @param tableFields the fields of table
+ * @param dataFields the fields of underlying data
+ * @param indexMapping the index mapping from table fields to data fields
+ * @return the index mapping
+ */
+ public static CastExecutor<?, ?>[] createConvertMapping(
+ List<DataField> tableFields, List<DataField> dataFields, int[]
indexMapping) {
+ CastExecutor<?, ?>[] converterMapping = new CastExecutor<?,
?>[tableFields.size()];
+ boolean castExist = false;
+ for (int i = 0; i < tableFields.size(); i++) {
+ int dataIndex = indexMapping == null ? i : indexMapping[i];
+ if (dataIndex < 0) {
+ converterMapping[i] = null;
+ } else {
+ DataField tableField = tableFields.get(i);
+ DataField dataField = dataFields.get(dataIndex);
+ if (dataField.type().equals(tableField.type())) {
+ converterMapping[i] = null;
Review Comment:
Done
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]