This is an automated email from the ASF dual-hosted git repository.

kabhwan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 8e7587a1f29 [SPARK-40466][SS] Improve the error message when DSv2 is 
disabled while DSv1 is not avaliable
8e7587a1f29 is described below

commit 8e7587a1f296e1ee1ec008a97f0823f68569b14b
Author: Huanli Wang <[email protected]>
AuthorDate: Tue Sep 20 08:21:18 2022 +0900

    [SPARK-40466][SS] Improve the error message when DSv2 is disabled while 
DSv1 is not avaliable
    
    …le DSv1 is not avaliable.
    
    ### What changes were proposed in this pull request?
    
    Improve the error message when DSv2 is disable while its fallback DSv1 is 
not available.
    
    ### Why are the changes needed?
    
    Improve the user experience. When users get UnsupportOperationError for the 
disabled DSv2, they are able to know which config to modify to enable the V2 
source.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, error message.
    
    ### How was this patch tested?
    
    N/A, just the message change
    
    Closes #37917 from huanliwang-db/SPARK-40466.
    
    Authored-by: Huanli Wang <[email protected]>
    Signed-off-by: Jungtaek Lim <[email protected]>
---
 .../apache/spark/sql/errors/QueryExecutionErrors.scala    | 15 ++++++++++++---
 .../sql/execution/streaming/MicroBatchExecution.scala     |  3 ++-
 2 files changed, 14 insertions(+), 4 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 2515d43cf82..bc778abc985 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1599,9 +1599,18 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
       s"$commitProtocol does not support adding files with an absolute path")
   }
 
-  def microBatchUnsupportedByDataSourceError(srcName: String): Throwable = {
-    new UnsupportedOperationException(
-      s"Data source $srcName does not support microbatch processing.")
+  def microBatchUnsupportedByDataSourceError(
+      srcName: String,
+      disabledSources: String,
+      table: Table): Throwable = {
+    new UnsupportedOperationException(s"""
+         |Data source $srcName does not support microbatch processing.
+         |
+         |Either the data source is disabled at
+         |SQLConf.get.DISABLED_V2_STREAMING_MICROBATCH_READERS.key (The 
disabled sources
+         |are [$disabledSources]) or the table $table does not have 
MICRO_BATCH_READ
+         |capability. Meanwhile, the fallback, data source v1, is not 
available."
+       """.stripMargin)
   }
 
   def cannotExecuteStreamingRelationExecError(): Throwable = {
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/MicroBatchExecution.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/MicroBatchExecution.scala
index 12427ece236..153bc82f892 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/MicroBatchExecution.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/MicroBatchExecution.scala
@@ -106,7 +106,8 @@ class MicroBatchExecution(
             StreamingDataSourceV2Relation(output, scan, stream, catalog, 
identifier)
           })
         } else if (v1.isEmpty) {
-          throw 
QueryExecutionErrors.microBatchUnsupportedByDataSourceError(srcName)
+          throw QueryExecutionErrors.microBatchUnsupportedByDataSourceError(
+            srcName, 
sparkSession.sqlContext.conf.disabledV2StreamingMicroBatchReaders, table)
         } else {
           v2ToExecutionRelationMap.getOrElseUpdate(s, {
             // Materialize source to avoid creating it in every batch


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to