Github user lw-lin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/13595#discussion_r66589007
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataFrameReaderWriterSuite.scala
 ---
    @@ -371,66 +371,80 @@ class DataFrameReaderWriterSuite extends StreamTest 
with BeforeAndAfter {
     
       private def newTextInput = Utils.createTempDir(namePrefix = 
"text").getCanonicalPath
     
    -  test("check trigger() can only be called on continuous queries") {
    +  test("check trigger() can only be called on streaming 
Datasets/DataFrames") {
         val df = spark.read.text(newTextInput)
         val w = df.write.option("checkpointLocation", newMetadataDir)
         val e = intercept[AnalysisException](w.trigger(ProcessingTime("10 
seconds")))
    -    assert(e.getMessage == "trigger() can only be called on continuous 
queries;")
    +    assert(e.getMessage == "trigger() can only be called on streaming 
Datasets/DataFrames;")
       }
     
    -  test("check queryName() can only be called on continuous queries") {
    +  test("check queryName() can only be called on streaming 
Datasets/DataFrames") {
         val df = spark.read.text(newTextInput)
         val w = df.write.option("checkpointLocation", newMetadataDir)
         val e = intercept[AnalysisException](w.queryName("queryName"))
    -    assert(e.getMessage == "queryName() can only be called on continuous 
queries;")
    +    assert(e.getMessage == "queryName() can only be called on streaming 
Datasets/DataFrames;")
       }
     
    -  test("check startStream() can only be called on continuous queries") {
    +  test("check startStream() can only be called on streaming 
Datasets/DataFrames") {
         val df = spark.read.text(newTextInput)
         val w = df.write.option("checkpointLocation", newMetadataDir)
         val e = intercept[AnalysisException](w.startStream())
    -    assert(e.getMessage == "startStream() can only be called on continuous 
queries;")
    +    assert(e.getMessage == "startStream() can only be called on streaming 
Datasets/DataFrames;")
       }
     
    -  test("check startStream(path) can only be called on continuous queries") 
{
    +  test("check startStream(path) can only be called on streaming 
Datasets/DataFrames") {
         val df = spark.read.text(newTextInput)
         val w = df.write.option("checkpointLocation", newMetadataDir)
         val e = intercept[AnalysisException](w.startStream("non_exist_path"))
    -    assert(e.getMessage == "startStream() can only be called on continuous 
queries;")
    +    assert(e.getMessage == "startStream() can only be called on streaming 
Datasets/DataFrames;")
       }
     
    -  test("check mode(SaveMode) can only be called on non-continuous 
queries") {
    +  test("check foreach() can only be called on streaming 
Datasets/DataFrames") {
    +    val df = spark.read.text(newTextInput)
    +    val w = df.write.option("checkpointLocation", newMetadataDir)
    +    val foreachWriter = new ForeachWriter[String] {
    +      override def open(partitionId: Long, version: Long): Boolean = false
    +      override def process(value: String): Unit = {}
    +      override def close(errorOrNull: Throwable): Unit = {}
    +    }
    +    val e = intercept[AnalysisException](w.foreach(foreachWriter))
    +    Seq("foreach()", "streaming Datasets/DataFrames").foreach { s =>
    +      assert(e.getMessage.toLowerCase.contains(s.toLowerCase))
    +    }
    +  }
    +
    --- End diff --
    
    here we add a new test, checking foreach() can only be called on streaming 
Datasets/DataFrames


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to