aokolnychyi commented on a change in pull request #2132:
URL: https://github.com/apache/iceberg/pull/2132#discussion_r562258531



##########
File path: 
spark3-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DynamicFileFilterExec.scala
##########
@@ -67,21 +66,21 @@ case class DynamicFileFilterExec(
   }
 }
 
-case class DynamicFileFilterWithCountCheckExec(
+case class DynamicFileFilterWithCardinalityCheckExec(
     scanExec: SparkPlan,
     fileFilterExec: SparkPlan,
     @transient filterable: SupportsFileFilter,
-    filesAccumulator: SetAccumulator[String],
-    @transient targetTableName: String)
-  extends DynamicFileFilterExecBase(scanExec, fileFilterExec, filterable)  {
+    filesAccumulator: SetAccumulator[String])
+  extends DynamicFileFilterExecBase(scanExec, fileFilterExec)  {
 
   override protected def doPrepare(): Unit = {
     val rows = fileFilterExec.executeCollect()
-    if (rows.size > 0) {
-      val msg =
-        s"""The same row of target table `$targetTableName` was identified 
more than
-           | once for an update, delete or insert operation of the MERGE 
statement.""".stripMargin
-      throw new SparkException(msg)
+    if (rows.length > 0) {
+      throw new SparkException(
+        "The ON search condition of the MERGE statement matched a single row 
from " +

Review comment:
       From DB2




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to