jonvex commented on code in PR #13503:
URL: https://github.com/apache/hudi/pull/13503#discussion_r2240770535


##########
hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/streaming/HoodieStreamSourceV1.scala:
##########
@@ -158,21 +173,31 @@ class HoodieStreamSourceV1(sqlContext: SQLContext,
           DataSourceReadOptions.END_COMMIT.key -> endOffset.offsetCommitTime,
           INCREMENTAL_READ_HANDLE_HOLLOW_COMMIT.key -> 
hollowCommitHandlingMode.name
         )
-
-        val rdd = tableType match {
-          case HoodieTableType.COPY_ON_WRITE =>
-            val serDe = sparkAdapter.createSparkRowSerDe(schema)
-            new IncrementalRelationV1(sqlContext, incParams, Some(schema), 
metaClient)
-              .buildScan()
-              .map(serDe.serializeRow)
-          case HoodieTableType.MERGE_ON_READ =>
-            val requiredColumns = schema.fields.map(_.name)
-            new MergeOnReadIncrementalRelationV1(sqlContext, incParams, 
metaClient, Some(schema))
-              .buildScan(requiredColumns, Array.empty[Filter])
-              .asInstanceOf[RDD[InternalRow]]
-          case _ => throw new IllegalArgumentException(s"UnSupport tableType: 
$tableType")
+        if (useNewParquetFileFormat) {
+          val relation = if (tableType == HoodieTableType.COPY_ON_WRITE) {
+            new 
HoodieCopyOnWriteIncrementalHadoopFsRelationFactoryV1(sqlContext, metaClient, 
incParams, Option(schema), false)
+              .build()
+          } else {
+            new 
HoodieMergeOnReadIncrementalHadoopFsRelationFactoryV1(sqlContext, metaClient, 
incParams, Option(schema), false)
+              .build()
+          }
+          
FileFormatUtilsForFileGroupReader.createStreamingDataFrame(sqlContext, 
relation, schema)
+        } else {
+          val rdd = tableType match {
+            case HoodieTableType.COPY_ON_WRITE =>
+              val serDe = sparkAdapter.createSparkRowSerDe(schema)
+              new IncrementalRelationV1(sqlContext, incParams, Some(schema), 
metaClient)
+                .buildScan()
+                .map(serDe.serializeRow)
+            case HoodieTableType.MERGE_ON_READ =>
+              val requiredColumns = schema.fields.map(_.name)
+              new MergeOnReadIncrementalRelationV1(sqlContext, incParams, 
metaClient, Some(schema))
+                .buildScan(requiredColumns, Array.empty[Filter])
+                .asInstanceOf[RDD[InternalRow]]
+            case _ => throw new IllegalArgumentException(s"UnSupport 
tableType: $tableType")
+          }
+          sqlContext.internalCreateDataFrame(rdd, schema, isStreaming = true)

Review Comment:
   added tests



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to