This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 2893f3f597ea3574636d0376bd7823b547fd2217
Author: JingsongLi <lzljs3620...@aliyun.com>
AuthorDate: Wed Jul 17 20:20:22 2019 +0800

    [FLINK-13287][table-planner] Support Reinterpret cast call in blink planner
---
 .../expressions/PlannerExpressionConverter.scala   |  8 ++++
 .../flink/table/expressions/Reinterpret.scala      | 45 ++++++++++++++++++++++
 2 files changed, 53 insertions(+)

diff --git 
a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/expressions/PlannerExpressionConverter.scala
 
b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/expressions/PlannerExpressionConverter.scala
index 208cad9..8b5dada 100644
--- 
a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/expressions/PlannerExpressionConverter.scala
+++ 
b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/expressions/PlannerExpressionConverter.scala
@@ -56,6 +56,14 @@ class PlannerExpressionConverter private extends 
ApiExpressionVisitor[PlannerExp
           fromDataTypeToLegacyInfo(
             children(1).asInstanceOf[TypeLiteralExpression].getOutputDataType))
 
+      case REINTERPRET_CAST =>
+        assert(children.size == 3)
+        Reinterpret(
+          children.head.accept(this),
+          fromDataTypeToLegacyInfo(
+            children(1).asInstanceOf[TypeLiteralExpression].getOutputDataType),
+          getValue[Boolean](children(2).accept(this)))
+
       case WINDOW_START =>
         assert(children.size == 1)
         val windowReference = translateWindowReference(children.head)
diff --git 
a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/expressions/Reinterpret.scala
 
b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/expressions/Reinterpret.scala
new file mode 100644
index 0000000..530fd3c
--- /dev/null
+++ 
b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/expressions/Reinterpret.scala
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.expressions
+
+import org.apache.flink.api.common.typeinfo.TypeInformation
+import 
org.apache.flink.table.types.TypeInfoLogicalTypeConverter.fromTypeInfoToLogicalType
+import org.apache.flink.table.typeutils.TypeCoercion
+import org.apache.flink.table.validate._
+
+case class Reinterpret(child: PlannerExpression, resultType: 
TypeInformation[_],
+                       checkOverflow: Boolean) extends UnaryExpression {
+
+  override def toString = s"$child.reinterpret($resultType)"
+
+  override private[flink] def makeCopy(anyRefs: Array[AnyRef]): this.type = {
+    val child: PlannerExpression = anyRefs.head.asInstanceOf[PlannerExpression]
+    copy(child, resultType).asInstanceOf[this.type]
+  }
+
+  override private[flink] def validateInput(): ValidationResult = {
+    if (TypeCoercion.canReinterpret(
+      fromTypeInfoToLogicalType(child.resultType), 
fromTypeInfoToLogicalType(resultType))) {
+      ValidationSuccess
+    } else {
+      ValidationFailure(s"Unsupported reinterpret from ${child.resultType} to 
$resultType")
+    }
+  }
+}
+

Reply via email to