hequn8128 commented on a change in pull request #9370: [FLINK-13594][python]
Improve the 'from_element' method of flink python api to apply to blink planner
URL: https://github.com/apache/flink/pull/9370#discussion_r310960019
##########
File path:
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/util/python/PythonTableUtils.scala
##########
@@ -24,61 +24,28 @@ import java.time.{LocalDate, LocalDateTime, LocalTime}
import java.util.TimeZone
import java.util.function.BiConsumer
-import org.apache.flink.api.common.functions.MapFunction
+import org.apache.flink.api.common.ExecutionConfig
+import org.apache.flink.api.common.io.InputFormat
import org.apache.flink.api.common.typeinfo.{BasicArrayTypeInfo,
BasicTypeInfo, PrimitiveArrayTypeInfo, TypeInformation}
-import org.apache.flink.api.java.DataSet
+import org.apache.flink.api.java.io.CollectionInputFormat
import org.apache.flink.api.java.typeutils.{MapTypeInfo, ObjectArrayTypeInfo,
RowTypeInfo}
-import org.apache.flink.streaming.api.datastream.DataStream
-import org.apache.flink.table.api.java.{BatchTableEnvironment,
StreamTableEnvironment}
-import org.apache.flink.table.api.{Table, Types}
+import org.apache.flink.core.io.InputSplit
+import org.apache.flink.table.api.{TableSchema, Types}
+import org.apache.flink.table.sources.InputFormatTableSource
import org.apache.flink.types.Row
object PythonTableUtils {
- /**
- * Converts the given [[DataStream]] into a [[Table]].
- *
- * The schema of the [[Table]] is derived from the specified schemaString.
- *
- * @param tableEnv The table environment.
- * @param dataStream The [[DataStream]] to be converted.
- * @param dataType The type information of the table.
- * @return The converted [[Table]].
- */
- def fromDataStream(
- tableEnv: StreamTableEnvironment,
- dataStream: DataStream[Array[Object]],
- dataType: TypeInformation[Row]): Table = {
- val convertedDataStream = dataStream.map(
- new MapFunction[Array[Object], Row] {
- override def map(value: Array[Object]): Row =
- convertTo(dataType).apply(value).asInstanceOf[Row]
- }).returns(dataType.asInstanceOf[TypeInformation[Row]])
-
- tableEnv.fromDataStream(convertedDataStream)
- }
-
- /**
- * Converts the given [[DataSet]] into a [[Table]].
- *
- * The schema of the [[Table]] is derived from the specified schemaString.
- *
- * @param tableEnv The table environment.
- * @param dataSet The [[DataSet]] to be converted.
- * @param dataType The type information of the table.
- * @return The converted [[Table]].
- */
- def fromDataSet(
- tableEnv: BatchTableEnvironment,
- dataSet: DataSet[Array[Object]],
- dataType: TypeInformation[Row]): Table = {
- val convertedDataSet = dataSet.map(
- new MapFunction[Array[Object], Row] {
- override def map(value: Array[Object]): Row =
- convertTo(dataType).apply(value).asInstanceOf[Row]
- }).returns(dataType.asInstanceOf[TypeInformation[Row]])
-
- tableEnv.fromDataSet(convertedDataSet)
+ def getInputFormat(
Review comment:
As the method returns a `CollectionInputFormat`, how about rename the method
signature to `getCollectionInputFormat(xxx): CollectionInputFormat[Row]`?
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services