This is an automated email from the ASF dual-hosted git repository.
lincoln pushed a commit to branch release-1.17
in repository https://gitbox.apache.org/repos/asf/flink.git
The following commit(s) were added to refs/heads/release-1.17 by this push:
new 7c2631b8ba8 [FLINK-32249][table-planner] Use proper toString
conversion when constructing column comments for CatalogTable
7c2631b8ba8 is described below
commit 7c2631b8ba8c935be03c91fd44b7aa42937a9698
Author: lincoln lee <[email protected]>
AuthorDate: Tue Jun 6 08:52:14 2023 +0800
[FLINK-32249][table-planner] Use proper toString conversion when
constructing column comments for CatalogTable
This closes #22715
---
.../planner/operations/SqlCreateTableConverter.java | 4 +++-
.../operations/SqlDdlToOperationConverterTest.java | 21 ++++++++++++++-------
2 files changed, 17 insertions(+), 8 deletions(-)
diff --git
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlCreateTableConverter.java
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlCreateTableConverter.java
index 5d878341577..1340669d105 100644
---
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlCreateTableConverter.java
+++
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlCreateTableConverter.java
@@ -39,6 +39,7 @@ import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.operations.ddl.CreateTableOperation;
import org.apache.flink.table.planner.calcite.FlinkCalciteSqlValidator;
import org.apache.flink.table.planner.calcite.FlinkPlannerImpl;
+import org.apache.flink.table.planner.utils.OperationConverterUtils;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
@@ -165,7 +166,8 @@ class SqlCreateTableConverter {
col -> col.getName().getSimple(),
col ->
StringUtils.strip(
-
col.getComment().get().toString(), "'")));
+
OperationConverterUtils.getComment(col),
+ "'")));
TableSchema mergedSchema =
mergeTableLikeUtil.mergeTables(
diff --git
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/operations/SqlDdlToOperationConverterTest.java
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/operations/SqlDdlToOperationConverterTest.java
index d5d097f9f78..0c9927238b0 100644
---
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/operations/SqlDdlToOperationConverterTest.java
+++
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/operations/SqlDdlToOperationConverterTest.java
@@ -32,11 +32,13 @@ import org.apache.flink.table.catalog.CatalogDatabaseImpl;
import org.apache.flink.table.catalog.CatalogFunction;
import org.apache.flink.table.catalog.CatalogFunctionImpl;
import org.apache.flink.table.catalog.CatalogTable;
+import org.apache.flink.table.catalog.Column;
import org.apache.flink.table.catalog.ContextResolvedTable;
import org.apache.flink.table.catalog.FunctionLanguage;
import org.apache.flink.table.catalog.GenericInMemoryCatalog;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.catalog.ObjectPath;
+import org.apache.flink.table.catalog.ResolvedCatalogTable;
import org.apache.flink.table.catalog.TableChange;
import org.apache.flink.table.catalog.exceptions.DatabaseNotExistException;
import org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException;
@@ -218,12 +220,12 @@ public class SqlDdlToOperationConverterTest extends
SqlToOperationConverterTestB
public void testCreateTableWithPrimaryKey() {
final String sql =
"CREATE TABLE tbl1 (\n"
- + " a bigint,\n"
+ + " a bigint comment '测试utf8中文',\n"
+ " b varchar, \n"
+ " c int, \n"
- + " d varchar, \n"
+ + " d varchar comment _utf8'测试_utf8中文', \n"
+ " constraint ct1 primary key(a, b) not enforced\n"
- + ") with (\n"
+ + ") comment '测试中文 table comment' with (\n"
+ " 'connector' = 'kafka', \n"
+ " 'kafka.topic' = 'log.test'\n"
+ ")\n";
@@ -249,6 +251,12 @@ public class SqlDdlToOperationConverterTest extends
SqlToOperationConverterTestB
DataTypes.INT(),
DataTypes.STRING()
});
+ // verify chinese characters both in table comment and column comments
+ assertThat(catalogTable.getComment()).isEqualTo("测试中文 table comment");
+ assertThat(catalogTable).isInstanceOf(ResolvedCatalogTable.class);
+ List<Column> cols = ((ResolvedCatalogTable)
catalogTable).getResolvedSchema().getColumns();
+ assertThat(cols.get(0).getComment().get()).isEqualTo("测试utf8中文");
+ assertThat(cols.get(3).getComment().get()).isEqualTo("测试_utf8中文");
}
@Test
@@ -1326,19 +1334,18 @@ public class SqlDdlToOperationConverterTest extends
SqlToOperationConverterTestB
// add a single column
Operation operation =
- parse(
- "alter table if exists tb1 add h double not null
comment 'h is double not null'");
+ parse("alter table if exists tb1 add h double not null comment
'utf 测试中文'");
assertThat(operation.asSummaryString())
.isEqualTo(
"ALTER TABLE IF EXISTS cat1.db1.tb1\n"
- + " ADD `h` DOUBLE NOT NULL COMMENT 'h is
double not null' ");
+ + " ADD `h` DOUBLE NOT NULL COMMENT 'utf
测试中文' ");
assertAlterTableSchema(
operation,
tableIdentifier,
Schema.newBuilder()
.fromSchema(originalSchema)
.column("h", DataTypes.DOUBLE().notNull())
- .withComment("h is double not null")
+ .withComment("utf 测试中文")
.build());
// add multiple columns with pk, computed/metadata column