This is an automated email from the ASF dual-hosted git repository.

hvanhovell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 2f9e5d5cde0 [SPARK-42569][CONNECT] Throw unsupported exceptions for 
non-supported API
2f9e5d5cde0 is described below

commit 2f9e5d5cde07de7b7f386a9af10643eb66f4df84
Author: Rui Wang <rui.w...@databricks.com>
AuthorDate: Sat Feb 25 14:07:10 2023 -0400

    [SPARK-42569][CONNECT] Throw unsupported exceptions for non-supported API
    
    ### What changes were proposed in this pull request?
    
    Match 
https://github.com/apache/spark/blob/6a2433070e60ad02c69ae45706a49cdd0b88a082/python/pyspark/sql/connect/dataframe.py#L1500
 to throw unsupported exceptions in Scala client.
    
    ### Why are the changes needed?
    
    Better indicating a API is not supported yet.
    
    ### Does this PR introduce _any_ user-facing change?
    
    NO
    
    ### How was this patch tested?
    
    N/A
    
    Closes #40164 from amaliujia/unsupported_op.
    
    Authored-by: Rui Wang <rui.w...@databricks.com>
    Signed-off-by: Herman van Hovell <her...@databricks.com>
---
 .../main/scala/org/apache/spark/sql/Dataset.scala  | 54 ++++++++++++++++++++++
 1 file changed, 54 insertions(+)

diff --git 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
index b8481cbe158..87dce7146b3 100644
--- 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -2555,6 +2555,60 @@ class Dataset[T] private[sql] (val sparkSession: 
SparkSession, private[sql] val
     new DataFrameWriterV2[T](table, this)
   }
 
+  def unpersist(blocking: Boolean): this.type = {
+    throw new UnsupportedOperationException("unpersist() is not implemented.")
+  }
+
+  def unpersist(): this.type = unpersist(blocking = false)
+
+  def cache(): this.type = {
+    throw new UnsupportedOperationException("cache() is not implemented.")
+  }
+
+  def withWatermark(eventTime: String, delayThreshold: String): Dataset[T] = {
+    throw new UnsupportedOperationException("withWatermark is not 
implemented.")
+  }
+
+  def observe(name: String, expr: Column, exprs: Column*): Dataset[T] = {
+    throw new UnsupportedOperationException("observe is not implemented.")
+  }
+
+  def foreach(f: T => Unit): Unit = {
+    throw new UnsupportedOperationException("foreach is not implemented.")
+  }
+
+  def foreachPartition(f: Iterator[T] => Unit): Unit = {
+    throw new UnsupportedOperationException("foreach is not implemented.")
+  }
+
+  def checkpoint(): Dataset[T] = {
+    throw new UnsupportedOperationException("checkpoint is not implemented.")
+  }
+
+  def checkpoint(eager: Boolean): Dataset[T] = {
+    throw new UnsupportedOperationException("checkpoint is not implemented.")
+  }
+
+  def localCheckpoint(): Dataset[T] = {
+    throw new UnsupportedOperationException("localCheckpoint is not 
implemented.")
+  }
+
+  def localCheckpoint(eager: Boolean): Dataset[T] = {
+    throw new UnsupportedOperationException("localCheckpoint is not 
implemented.")
+  }
+
+  def sameSemantics(other: Dataset[T]): Boolean = {
+    throw new UnsupportedOperationException("sameSemantics is not 
implemented.")
+  }
+
+  def semanticHash(): Int = {
+    throw new UnsupportedOperationException("semanticHash is not implemented.")
+  }
+
+  def toJSON: Dataset[String] = {
+    throw new UnsupportedOperationException("toJSON is not implemented.")
+  }
+
   private[sql] def analyze: proto.AnalyzePlanResponse = {
     sparkSession.analyze(plan, proto.Explain.ExplainMode.SIMPLE)
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to