cloud-fan commented on code in PR #50921:
URL: https://github.com/apache/spark/pull/50921#discussion_r2108703627


##########
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCScanBuilder.scala:
##########
@@ -121,6 +126,128 @@ case class JDBCScanBuilder(
     }
   }
 
+  override def isRightSideCompatibleForJoin(other: SupportsPushDownJoin): 
Boolean = {
+    other.isInstanceOf[JDBCScanBuilder] &&
+      jdbcOptions.url == other.asInstanceOf[JDBCScanBuilder].jdbcOptions.url
+  };
+
+  override def pushJoin(
+    other: SupportsPushDownJoin,
+    joinType: JoinType,
+    condition: Optional[Predicate],
+    leftRequiredSchema: StructType,
+    rightRequiredSchema: StructType
+  ): Boolean = {
+    if (!jdbcOptions.pushDownJoin || !dialect.supportsJoin) return false
+
+    val leftNodeSQLQuery = buildSQLQuery()
+    val rightNodeSQLQuery = other.asInstanceOf[JDBCScanBuilder].buildSQLQuery()
+
+    val leftSideQualifier = JoinOutputAliasIterator.get
+    val rightSideQualifier = JoinOutputAliasIterator.get
+
+    val leftProjections: Seq[JoinColumn] = leftRequiredSchema.fields.map { e =>
+      new JoinColumn(Array(leftSideQualifier), e.name, true)
+    }.toSeq
+    val rightProjections: Seq[JoinColumn] = rightRequiredSchema.fields.map { e 
=>
+      new JoinColumn(Array(rightSideQualifier), e.name, false)
+    }.toSeq
+
+    var aliasedLeftSchema = StructType(Seq())
+    var aliasedRightSchema = StructType(Seq())
+    val outputAliasPrefix = JoinOutputAliasIterator.get
+
+    val aliasedOutput = (leftProjections ++ rightProjections)
+      .zipWithIndex
+      .map { case (proj, i) =>
+        val name = s"${outputAliasPrefix}_col_$i"
+        val output = FieldReference(name)
+        if (i < leftProjections.length) {
+          val field = leftRequiredSchema.fields(i)
+          aliasedLeftSchema =
+            aliasedLeftSchema.add(name, field.dataType, field.nullable, 
field.metadata)
+        } else {
+          val field = rightRequiredSchema.fields(i - 
leftRequiredSchema.fields.length)
+          aliasedRightSchema =
+            aliasedRightSchema.add(name, field.dataType, field.nullable, 
field.metadata)
+        }
+
+        s"""${dialect.compileExpression(proj).get} AS 
${dialect.compileExpression(output).get}"""
+      }.mkString(",")
+
+    val compiledJoinType = dialect.compileJoinType(joinType)
+    if (!compiledJoinType.isDefined) return false
+
+    val conditionString = condition.toScala match {
+      case Some(cond) =>
+        qualifyCondition(cond, leftSideQualifier, rightSideQualifier)
+        s"ON ${dialect.compileExpression(cond).get}"

Review Comment:
   I think it's safer to pass the generated subquery aliases to the 
`compileExpression` function (or add a new `compileJoinCondition` function), 
which should respect the aliases when generating SQL for JoinColumn. It's 
better than making `JoinColumn` mutable.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to