This is an automated email from the ASF dual-hosted git repository.

snuyanzin pushed a commit to branch release-2.2
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-2.2 by this push:
     new 81b1469bc93 [FLINK-38950][table] `SqlNodeConvertUtils` should use 
validated query output
81b1469bc93 is described below

commit 81b1469bc93dd589904b1e91ff48967033f9f387
Author: Sebastien Pereira <[email protected]>
AuthorDate: Thu Jan 22 00:42:27 2026 +0100

    [FLINK-38950][table] `SqlNodeConvertUtils` should use validated query output
    
    ---------
    
    Co-authored-by: Sebastien Pereira <[email protected]>
    Co-authored-by: Sergey Nuyanzin <[email protected]>
---
 .../operations/converters/SqlNodeConvertUtils.java |  6 +++--
 .../table/planner/catalog/CatalogViewITCase.scala  | 30 ++++++++++++++++++++++
 2 files changed, 34 insertions(+), 2 deletions(-)

diff --git 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/SqlNodeConvertUtils.java
 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/SqlNodeConvertUtils.java
index db9140a084a..57ad2c281da 100644
--- 
a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/SqlNodeConvertUtils.java
+++ 
b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/SqlNodeConvertUtils.java
@@ -77,15 +77,17 @@ class SqlNodeConvertUtils {
         // This bug is fixed in CALCITE-3877 of Calcite 1.23.0.
         String originalQuery = context.toQuotedSqlString(query);
         SqlNode validateQuery = context.getSqlValidator().validate(query);
+        // FLINK-38950: SqlValidator.validate() mutates its input parameter. 
Always use the
+        // returned validateQuery instead of the mutated query for all 
subsequent operations.
 
         // Check name is unique.
         // Don't rely on the calcite because if the field names are duplicate, 
calcite will add
         // index to identify the duplicate names.
         SqlValidatorNamespace validatedNamespace =
                 context.getSqlValidator().getNamespace(validateQuery);
-        validateDuplicatedColumnNames(query, viewFields, validatedNamespace);
+        validateDuplicatedColumnNames(validateQuery, viewFields, 
validatedNamespace);
 
-        String expandedQuery = context.toQuotedSqlString(query);
+        String expandedQuery = context.toQuotedSqlString(validateQuery);
 
         PlannerQueryOperation operation = toQueryOperation(validateQuery, 
context);
         ResolvedSchema schema = operation.getResolvedSchema();
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/catalog/CatalogViewITCase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/catalog/CatalogViewITCase.scala
index 061d2053833..0d4560eba95 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/catalog/CatalogViewITCase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/catalog/CatalogViewITCase.scala
@@ -317,6 +317,7 @@ class CatalogViewITCase(isStreamingMode: Boolean) extends 
TableITCaseBase {
           .column("a", DataTypes.INT())
           .column("b", DataTypes.STRING())
           .column("c", DataTypes.INT())
+          .column("ts", DataTypes.TIMESTAMP())
           .build())
       .sink()
       .build()
@@ -521,6 +522,35 @@ class CatalogViewITCase(isStreamingMode: Boolean) extends 
TableITCaseBase {
     )
   }
 
+  @TestTemplate
+  def testShowCreateViewWithTumbleWindow(): Unit = {
+    // FLINK-38950: Verify window TVF with ORDER BY generates correct expanded 
SQL
+    // without duplicating the ORDER BY clause in the catalog view definition.
+    tableEnv.createTable("t1", buildTableDescriptor())
+    val viewWithInnerJoinDDL: String =
+      s"""CREATE VIEW tumbleWindowViewWithOrderBy AS
+         |SELECT a
+         |FROM TUMBLE(TABLE t1, DESCRIPTOR(ts), INTERVAL '1' MINUTE)
+         |ORDER BY ts""".stripMargin
+    tableEnv.executeSql(viewWithInnerJoinDDL)
+    val showCreateInnerJoinViewResult: util.List[Row] = 
CollectionUtil.iteratorToList(
+      tableEnv
+        .executeSql("SHOW CREATE VIEW tumbleWindowViewWithOrderBy")
+        .collect()
+    )
+    assertThatList(showCreateInnerJoinViewResult).containsExactly(
+      Row.of(
+        s"""CREATE VIEW 
`default_catalog`.`default_database`.`tumbleWindowViewWithOrderBy` (
+           |  `a`
+           |)
+           |AS SELECT `EXPR$$0`.`a`
+           |FROM TABLE(TUMBLE((SELECT `t1`.`a`, `t1`.`b`, `t1`.`c`, `t1`.`ts`
+           |FROM `default_catalog`.`default_database`.`t1` AS `t1`), 
DESCRIPTOR(`ts`), INTERVAL '1' MINUTE)) AS `EXPR$$0`
+           |ORDER BY `EXPR$$0`.`ts`
+           |""".stripMargin
+      )
+    )
+  }
 }
 
 object CatalogViewITCase {

Reply via email to