Github user gatorsmile commented on a diff in the pull request:
https://github.com/apache/spark/pull/20397#discussion_r163921087
--- Diff:
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceRDD.scala
---
@@ -22,24 +22,24 @@ import scala.reflect.ClassTag
import org.apache.spark.{InterruptibleIterator, Partition, SparkContext,
TaskContext}
import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.sources.v2.reader.ReadTask
+import org.apache.spark.sql.sources.v2.reader.DataReaderFactory
-class DataSourceRDDPartition[T : ClassTag](val index: Int, val readTask:
ReadTask[T])
+class DataSourceRDDPartition[T : ClassTag](val index: Int, val
readerFactory: DataReaderFactory[T])
extends Partition with Serializable
class DataSourceRDD[T: ClassTag](
sc: SparkContext,
- @transient private val readTasks: java.util.List[ReadTask[T]])
+ @transient private val readerFactories:
java.util.List[DataReaderFactory[T]])
extends RDD[T](sc, Nil) {
override protected def getPartitions: Array[Partition] = {
- readTasks.asScala.zipWithIndex.map {
+ readerFactories.asScala.zipWithIndex.map {
case (readTask, index) => new DataSourceRDDPartition(index, readTask)
--- End diff --
readTask -> readerFactory
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]