twalthr commented on code in PR #19490:
URL: https://github.com/apache/flink/pull/19490#discussion_r852734551
##########
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableEnvironmentTest.scala:
##########
@@ -215,6 +215,87 @@ class TableEnvironmentTest {
TableTestUtil.replaceNodeIdInOperator(TableTestUtil.replaceStreamNodeId(actual)))
}
+ @Test
+ def testCreateTableWithMultipleColumnsFromSameMetadataKey(): Unit = {
+ assertThatThrownBy(() =>
+ tableEnv.executeSql(
+ """
+ |CREATE TABLE source (
+ | a INT METADATA,
+ | b INT METADATA FROM 'a'
+ |) WITH (
+ | 'connector' = 'COLLECTION'
+ |)
+ |""".stripMargin)).satisfies(
+ FlinkAssertions.anyCauseMatches(
+ classOf[ValidationException],
+ "The column `a` and `b` in the table are both from the same metadata
key 'a'. " +
+ "Please specify one of the columns as the metadata column and use
the computed column" +
+ " syntax to specify the others."))
+ }
+
+ @Test
+ def testCreateTableLikeWithMultipleColumnsFromSameMetadataKey(): Unit = {
+ tableEnv.executeSql(
+ """
+ |CREATE TABLE source (
+ | a INT METADATA
+ |) WITH (
+ | 'connector' = 'COLLECTION'
+ |)
+ |""".stripMargin)
+ assertThatThrownBy(() =>
+ tableEnv.executeSql(
+ """
+ |CREATE TABLE like_source (
+ | b INT METADATA FROM 'a'
+ |)
+ |WITH (
+ | 'connector' = 'COLLECTION'
+ |) LIKE source (
+ | INCLUDING METADATA
+ |)
+ |""".stripMargin
+ )).satisfies(FlinkAssertions.anyCauseMatches(
+ "The column `a` and `b` in the table are both from the same metadata key
'a'. " +
+ "Please specify one of the columns as the metadata column and use the
computed column" +
+ " syntax to specify the others."
+ ))
+ }
+
+ @Test
+ def testCreateTableLikeExcludeColumnsFromSameMetadataKey(): Unit = {
+ tableEnv.executeSql(
+ """
+ |CREATE TABLE source (
+ | a INT METADATA
+ |) WITH (
+ | 'connector' = 'COLLECTION'
+ |)
+ |""".stripMargin)
+ tableEnv.executeSql(
+ """
+ |CREATE TABLE like_source (
+ | b INT METADATA FROM 'a'
+ |)
+ |WITH (
+ | 'connector' = 'COLLECTION'
+ |) LIKE source (
+ | EXCLUDING METADATA
+ |)
+ |""".stripMargin
+ )
+ assertEquals(
Review Comment:
not sure if testing this is still necessary. this should be covered by tests
already?
##########
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/connectors/DynamicSourceUtils.java:
##########
@@ -168,26 +165,40 @@ public static void prepareDynamicSource(
// TODO: isUpsertSource(), isSourceChangeEventsDuplicate()
/**
- * Returns a list of required metadata keys. Ordered by the iteration
order of {@link
+ * Returns a list of required metadata columns. Ordered by the iteration
order of {@link
* SupportsReadingMetadata#listReadableMetadata()}.
*
* <p>This method assumes that source and schema have been validated via
{@link
* #prepareDynamicSource(String, ResolvedCatalogTable, DynamicTableSource,
boolean,
* ReadableConfig)}.
*/
- public static List<String> createRequiredMetadataKeys(
+ public static List<MetadataColumn> createRequiredMetadataColumns(
ResolvedSchema schema, DynamicTableSource source) {
final List<MetadataColumn> metadataColumns =
extractMetadataColumns(schema);
- final Set<String> requiredMetadataKeys =
- metadataColumns.stream()
- .map(c -> c.getMetadataKey().orElse(c.getName()))
- .collect(Collectors.toSet());
+ Map<String, MetadataColumn> metadataKeysToMetadataColumns = new
HashMap<>();
+
+ for (MetadataColumn column : metadataColumns) {
+ String metadataKey =
column.getMetadataKey().orElse(column.getName());
+ if (metadataKeysToMetadataColumns.containsKey(metadataKey)) {
+ throw new ValidationException(
Review Comment:
We don't need this check here. `DefaultSchemaResolver` should be the only
location for validation.
##########
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableEnvironmentTest.scala:
##########
@@ -215,6 +215,87 @@ class TableEnvironmentTest {
TableTestUtil.replaceNodeIdInOperator(TableTestUtil.replaceStreamNodeId(actual)))
}
+ @Test
+ def testCreateTableWithMultipleColumnsFromSameMetadataKey(): Unit = {
Review Comment:
`TableEnvironmentTest` is meant for code that is located in
`TableEnvironment` implementation. Move these tests to
`org.apache.flink.table.planner.plan.batch.sql.TableScanTest`? Most DDL stuff
like `testDDLWithComputedColumn` is located there currently.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]