[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239688529 ## File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/table.scala ## @@ -999,6 +1006,121 @@ class Table( new OverWindowedTable(this, overWindows.toArray) } + /** +* Performs a flatMap operation with an user-defined table function. +* +* Scala Example: +* {{{ +* class MyFlatMapFunction extends TableFunction[Row] { +* def eval(str : String) { +* if (str.contains("#")) { +* val splits = str.split("#") +* collect(Row.of(splits(0), splits(1))) +* } +* } +* +* def getResultType(signature: Array[Class[_]]): TypeInformation[_] = +* Types.ROW(Types.STRING, Types.STRING) +* } +* +* val func = new MyFlatMapFunction +* table.flatMap(func('c)).as('a, 'b) +* }}} +* +* Java Example: +* {{{ +* class MyFlatMapFunction extends TableFunction { +* public void eval(String str) { +* if (str.contains("#")) { +* String[] splits = str.split("#"); +* collect(Row.of(splits[0], splits[1])); +* } +* } +* +* public TypeInformation getResultType(Class[] signature) { +* return Types.ROW(Types.STRING, Types.STRING); +* } +* } +* +* TableFunction func = new MyFlatMapFunction(); +* tableEnv.registerFunction("func", func); +* table.flatMap("func(c)").as("a, b"); +* }}} +*/ + def flatMap(tableFunction: Expression): Table = { +unwrap(tableFunction, tableEnv) match { + case _: TableFunctionCall => + case _ => throw new ValidationException("Only TableFunction can be used in flatMap.") +} + +// rename output fields names to avoid ambiguous name +val originalCall = UserDefinedFunctionUtils.createLogicalFunctionCall(tableEnv, tableFunction) +val originalOutputFieldNames = originalCall.output.map(_.name) +val usedFieldNames: mutable.HashSet[String] = + logicalPlan.output.foldLeft(mutable.HashSet[String]()) { (s, o) => s += o.name } + +val newOutputFieldNames = originalOutputFieldNames.zipWithIndex.map { case (name, index) => + val newName = if (usedFieldNames.contains(name)) { +index + "_" + usedFieldNames.mkString("_") + "_" + name Review comment: OK. Make sense. This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239308521 ## File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/UserDefinedFunctionUtils.scala ## @@ -795,7 +795,7 @@ object UserDefinedFunctionUtils { "define table function followed by some Alias.") } -val functionCall: LogicalTableFunctionCall = unwrap(ExpressionParser.parseExpression(udtf)) +val functionCall: LogicalTableFunctionCall = unwrap(udtfExpr) Review comment: Done. This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239308496 ## File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/call.scala ## @@ -328,7 +340,9 @@ case class TableFunctionCall( * @return this table function call */ private[flink] def as(aliasList: Option[Seq[String]]): TableFunctionCall = { -this.aliases = aliasList +if (aliasList.isDefined) { Review comment: Agree. Done. This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239308474 ## File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/call.scala ## @@ -320,6 +320,18 @@ case class TableFunctionCall( override private[flink] def children: Seq[Expression] = parameters + def as(fields: Symbol*): TableFunctionCall = { +this.aliases = Some(fields.map(_.name)) +this + } + + def as(fields: String): TableFunctionCall = { +val fieldExprs = ExpressionParser Review comment: Make sense. Done. This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239302128 ## File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/table.scala ## @@ -999,6 +1001,131 @@ class Table( new OverWindowedTable(this, overWindows.toArray) } + /** +* Performs a flatMap operation with an user-defined table function. +* +* Scala Example: +* {{{ +* class MyFlatMapFunction extends TableFunction[Row] { +* def eval(str : String) { +* if (str.contains("#")) { +* val splits = str.split("#") +* collect(Row.of(splits(0), splits(1))) +* } +* } +* +* def getResultType(signature: Array[Class[_]]): TypeInformation[_] = +* Types.ROW(Types.STRING, Types.STRING) +* } +* +* val func = new MyFlatMapFunction() Review comment: Done This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239301814 ## File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/table.scala ## @@ -21,7 +21,7 @@ import org.apache.calcite.rel.RelNode import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.operators.join.JoinType import org.apache.flink.table.calcite.{FlinkRelBuilder, FlinkTypeFactory} -import org.apache.flink.table.expressions.{Alias, Asc, Expression, ExpressionParser, Ordering, ResolvedFieldReference, UnresolvedAlias, UnresolvedFieldReference, WindowProperty} +import org.apache.flink.table.expressions.{Alias, Asc, Expression, ExpressionParser, Ordering, ResolvedFieldReference, TableFunctionCall, UnresolvedAlias, UnresolvedFieldReference, WindowProperty} Review comment: Done This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239318443 ## File path: flink-libraries/flink-table/src/test/scala/org/apache/flink/table/runtime/batch/table/FlatMapITCase.scala ## @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.table.runtime.batch.table + +import org.apache.flink.api.scala._ +import org.apache.flink.table.api.TableEnvironment +import org.apache.flink.table.api.scala._ +import org.apache.flink.table.runtime.utils.TableProgramsClusterTestBase +import org.apache.flink.table.runtime.utils.TableProgramsTestBase.TableConfigMode +import org.apache.flink.table.utils.TableFunc2 +import org.apache.flink.test.util.MultipleProgramsTestBase.TestExecutionMode +import org.apache.flink.test.util.TestBaseUtils +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.Parameterized + +import scala.collection.JavaConverters._ +import scala.collection.mutable + +@RunWith(classOf[Parameterized]) +class FlatMapITCase( +mode: TestExecutionMode, +configMode: TableConfigMode) + extends TableProgramsClusterTestBase(mode, configMode) { + + private def testData(env: ExecutionEnvironment): DataSet[(Int, Long, String)] = { +val data = new mutable.MutableList[(Int, Long, String)] +data.+=((1, 1L, "Jack#22")) +data.+=((2, 2L, "John#333")) +data.+=((3, 2L, "Anna#")) +data.+=((4, 3L, "nosharp#5")) +env.fromCollection(data) + } + + @Test + def testFlatMap(): Unit = { +val env = ExecutionEnvironment.getExecutionEnvironment +val tEnv = TableEnvironment.getTableEnvironment(env) + +val func2 = new TableFunc2 +val results = testData(env).toTable(tEnv, 'a, 'b, 'c) + .flatMap(func2('c)) Review comment: Agree. Done. This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239308968 ## File path: flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/stream/table/validation/FlatMapValidationTest.scala ## @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.table.api.stream.table.validation + +import org.apache.flink.api.scala._ +import org.apache.flink.table.api.ValidationException +import org.apache.flink.table.api.scala._ +import org.apache.flink.table.expressions.utils.Func15 +import org.apache.flink.table.runtime.utils.JavaUserDefinedAggFunctions.WeightedAvg +import org.apache.flink.table.utils.{TableFunc0, TableTestBase} +import org.junit.Test + +class FlatMapValidationTest extends TableTestBase { + + @Test(expected = classOf[ValidationException]) + def testInvalidMapFunctionTypeAggregation(): Unit = { +val util = streamTestUtil() +util.addTable[(Int)]( + "MyTable", 'int) + .flatMap('int.sum) // do not support AggregateFunction as input Review comment: Done This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239302052 ## File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/table.scala ## @@ -79,7 +80,8 @@ class Table( * @param udtfCall A String expression of the TableFunction call. */ def this(tableEnv: TableEnvironment, udtfCall: String) { -this(tableEnv, UserDefinedFunctionUtils.createLogicalFunctionCall(tableEnv, udtfCall)) +this(tableEnv, UserDefinedFunctionUtils.createLogicalFunctionCall( + tableEnv, ExpressionParser.parseExpression(udtfCall))) Review comment: Done This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239307495 ## File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/table.scala ## @@ -999,6 +1001,131 @@ class Table( new OverWindowedTable(this, overWindows.toArray) } + /** +* Performs a flatMap operation with an user-defined table function. +* +* Scala Example: +* {{{ +* class MyFlatMapFunction extends TableFunction[Row] { +* def eval(str : String) { +* if (str.contains("#")) { +* val splits = str.split("#") +* collect(Row.of(splits(0), splits(1))) +* } +* } +* +* def getResultType(signature: Array[Class[_]]): TypeInformation[_] = +* Types.ROW(Types.STRING, Types.STRING) +* } +* +* val func = new MyFlatMapFunction() +* table.flatMap(func('c)).as('a, 'b) +* }}} +* +* Java Example: +* {{{ +* class MyFlatMapFunction extends TableFunction { +* public void eval(String str) { +* if (str.contains("#")) { +* String[] splits = str.split("#"); +* collect(Row.of(splits[0], splits[1])); +* } +* } +* +* public TypeInformation getResultType(Class[] signature) { +* return Types.ROW(Types.STRING, Types.STRING); +* } +* } +* +* TableFunction func = new MyFlatMapFunction(); +* tableEnv.registerFunction("func", func); +* table.flatMap("func(c)").as("a, b"); +* }}} +*/ + def flatMap(tableFunction: Expression): Table = { +unwrap(tableFunction, tableEnv) match { + case _: TableFunctionCall => + case _ => throw new ValidationException("Only TableFunction can be used in flatMap.") +} + +// rename output fields names to avoid ambiguous name +val originalCall = UserDefinedFunctionUtils.createLogicalFunctionCall(tableEnv, tableFunction) +val originalOutputFieldNames = originalCall.output.map(_.name) +val usedFieldNames: mutable.HashSet[String] = mutable.HashSet() +logicalPlan.output.map(_.name).foreach(usedFieldNames.add) + +var i: Int = 0 +def findNewName(n: String): String = { + val newName = n + "_" + i + i += 1 + if (usedFieldNames.contains(newName)) { +findNewName(n) + } else { +usedFieldNames.add(newName) +newName + } +} + +val newOutputFieldNames = originalOutputFieldNames.map(n => + if (usedFieldNames.contains(n)) { +findNewName(n) + } else { +n + } +) Review comment: Good suggestion. Done. This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239307765 ## File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/call.scala ## @@ -320,6 +320,18 @@ case class TableFunctionCall( override private[flink] def children: Seq[Expression] = parameters + def as(fields: Symbol*): TableFunctionCall = { Review comment: Done This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services
[GitHub] dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API
dianfu commented on a change in pull request #7196: [FLINK-10974] [table] Add support for flatMap to table API URL: https://github.com/apache/flink/pull/7196#discussion_r239302508 ## File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/table.scala ## @@ -999,6 +1001,131 @@ class Table( new OverWindowedTable(this, overWindows.toArray) } + /** +* Performs a flatMap operation with an user-defined table function. +* +* Scala Example: +* {{{ +* class MyFlatMapFunction extends TableFunction[Row] { +* def eval(str : String) { +* if (str.contains("#")) { +* val splits = str.split("#") +* collect(Row.of(splits(0), splits(1))) +* } +* } +* +* def getResultType(signature: Array[Class[_]]): TypeInformation[_] = +* Types.ROW(Types.STRING, Types.STRING) +* } +* +* val func = new MyFlatMapFunction() +* table.flatMap(func('c)).as('a, 'b) +* }}} +* +* Java Example: +* {{{ +* class MyFlatMapFunction extends TableFunction { +* public void eval(String str) { +* if (str.contains("#")) { +* String[] splits = str.split("#"); +* collect(Row.of(splits[0], splits[1])); +* } +* } +* +* public TypeInformation getResultType(Class[] signature) { +* return Types.ROW(Types.STRING, Types.STRING); +* } +* } +* +* TableFunction func = new MyFlatMapFunction(); +* tableEnv.registerFunction("func", func); +* table.flatMap("func(c)").as("a, b"); +* }}} +*/ + def flatMap(tableFunction: Expression): Table = { +unwrap(tableFunction, tableEnv) match { + case _: TableFunctionCall => + case _ => throw new ValidationException("Only TableFunction can be used in flatMap.") +} + +// rename output fields names to avoid ambiguous name +val originalCall = UserDefinedFunctionUtils.createLogicalFunctionCall(tableEnv, tableFunction) +val originalOutputFieldNames = originalCall.output.map(_.name) +val usedFieldNames: mutable.HashSet[String] = mutable.HashSet() +logicalPlan.output.map(_.name).foreach(usedFieldNames.add) Review comment: Good suggestion, done. This is an automated message from the Apache Git Service. To respond to the message, please log on GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services