This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 424839745a94 [SPARK-49695][SQL] Postgres fix xor push-down
424839745a94 is described below

commit 424839745a94cd31461639d8bc98927bb3518735
Author: Andrej Gobeljić <[email protected]>
AuthorDate: Wed Dec 4 20:31:19 2024 +0100

    [SPARK-49695][SQL] Postgres fix xor push-down
    
    ### What changes were proposed in this pull request?
    This PR fixes the pushdown of ^ operator (XOR operator) for Postgres. Those 
two databases use this as exponent, rather then bitwise xor.
    
    Fix is consisted of overriding the SQLExpressionBuilder to replace the '^' 
character with '#'.
    ### Why are the changes needed?
    Result is incorrect.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes. The user will now have a proper translation of the ^ operator.
    
    ### How was this patch tested?
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #48144 from andrej-db/SPARK-49695-PostgresXOR.
    
    Lead-authored-by: Andrej Gobeljić <[email protected]>
    Co-authored-by: andrej-db <[email protected]>
    Co-authored-by: andrej-gobeljic_data <[email protected]>
    Signed-off-by: Max Gekk <[email protected]>
---
 .../apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala    | 10 ++++++++++
 .../main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala |  4 ++++
 2 files changed, 14 insertions(+)

diff --git 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala
 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala
index ef52aebd723a..eaf2a07ed459 100644
--- 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala
+++ 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresIntegrationSuite.scala
@@ -22,6 +22,7 @@ import java.sql.Connection
 import org.apache.spark.{SparkConf, SparkSQLException}
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
+import org.apache.spark.sql.execution.FilterExec
 import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
 import org.apache.spark.sql.jdbc.PostgresDatabaseOnDocker
 import org.apache.spark.sql.types._
@@ -243,6 +244,15 @@ class PostgresIntegrationSuite extends 
DockerJDBCIntegrationV2Suite with V2JDBCT
     }
   }
 
+  test("SPARK-49695: Postgres fix xor push-down") {
+    val df = spark.sql(s"select dept, name from $catalogName.employee where 
dept ^ 6 = 0")
+    val rows = df.collect()
+    assert(!df.queryExecution.sparkPlan.exists(_.isInstanceOf[FilterExec]))
+    assert(rows.length == 1)
+    assert(rows(0).getInt(0) === 6)
+    assert(rows(0).getString(1) === "jen")
+  }
+
   override def testDatetime(tbl: String): Unit = {
     val df1 = sql(s"SELECT name FROM $tbl WHERE " +
       "dayofyear(date1) > 100 AND dayofmonth(date1) > 10 ")
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
index bce9c6704278..c1b79f801741 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
@@ -310,6 +310,10 @@ private case class PostgresDialect()
         case _ => super.visitExtract(field, source)
       }
     }
+
+    override def visitBinaryArithmetic(name: String, l: String, r: String): 
String = {
+      l + " " + name.replace('^', '#') + " " + r
+    }
   }
 
   override def compileExpression(expr: Expression): Option[String] = {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to