Github user rdblue commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22190#discussion_r212121224
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/MicroBatchWriteSupport.scala
 ---
    @@ -18,27 +18,38 @@
     package org.apache.spark.sql.execution.streaming.sources
     
     import org.apache.spark.sql.catalyst.InternalRow
    -import org.apache.spark.sql.sources.v2.writer.{BatchWriteSupport, 
DataWriter, DataWriterFactory, WriterCommitMessage}
    -import 
org.apache.spark.sql.sources.v2.writer.streaming.{StreamingDataWriterFactory, 
StreamingWriteSupport}
    +import org.apache.spark.sql.sources.v2.DataSourceOptions
    +import org.apache.spark.sql.sources.v2.writer.{BatchWriteSupport, 
DataWriter, DataWriterFactory, WriteConfig, WriterCommitMessage}
    +import 
org.apache.spark.sql.sources.v2.writer.streaming.{StreamingDataWriterFactory, 
StreamingWriteConfig, StreamingWriteSupport}
    +import org.apache.spark.sql.types.StructType
     
     /**
      * A [[BatchWriteSupport]] used to hook V2 stream writers into a 
microbatch plan. It implements
      * the non-streaming interface, forwarding the epoch ID determined at 
construction to a wrapped
      * streaming write support.
      */
    -class MicroBatchWritSupport(eppchId: Long, val writeSupport: 
StreamingWriteSupport)
    +class MicroBatchWriteSupport(eppchId: Long, val writeSupport: 
StreamingWriteSupport)
    --- End diff --
    
    This fixed a typo in the class name.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to