Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22009#discussion_r211644387
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/RateControlMicroBatchReadSupport.scala
 ---
    @@ -17,21 +17,15 @@
     
     package org.apache.spark.sql.execution.streaming.sources
     
    -import org.apache.spark.sql.catalyst.InternalRow
    -import org.apache.spark.sql.sources.v2.writer.{DataSourceWriter, 
DataWriterFactory, WriterCommitMessage}
    -import org.apache.spark.sql.sources.v2.writer.streaming.StreamWriter
    +import 
org.apache.spark.sql.sources.v2.reader.streaming.{MicroBatchReadSupport, Offset}
     
    -/**
    - * A [[DataSourceWriter]] used to hook V2 stream writers into a microbatch 
plan. It implements
    - * the non-streaming interface, forwarding the batch ID determined at 
construction to a wrapped
    - * streaming writer.
    - */
    -class MicroBatchWriter(batchId: Long, val writer: StreamWriter) extends 
DataSourceWriter {
    -  override def commit(messages: Array[WriterCommitMessage]): Unit = {
    -    writer.commit(batchId, messages)
    -  }
    +// A special `MicroBatchReadSupport` that can get latestOffset with a 
start offset.
    +trait RateControlMicroBatchReadSupport extends MicroBatchReadSupport {
    --- End diff --
    
    this is under `org.apache.spark.sql.execution`, so it's an internal API.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to