itholic commented on code in PR #45377:
URL: https://github.com/apache/spark/pull/45377#discussion_r1559135258


##########
sql/api/src/main/scala/org/apache/spark/sql/catalyst/trees/QueryContexts.scala:
##########
@@ -134,7 +134,9 @@ case class SQLQueryContext(
   override def callSite: String = throw SparkUnsupportedOperationException()
 }
 
-case class DataFrameQueryContext(stackTrace: Seq[StackTraceElement]) extends 
QueryContext {
+case class DataFrameQueryContext(
+    stackTrace: Seq[StackTraceElement],
+    pysparkLoggingInfo: Option[(String, String)]) extends QueryContext {

Review Comment:
   Review applied. Thanks!



##########
sql/core/src/main/scala/org/apache/spark/sql/Column.scala:
##########
@@ -171,6 +171,26 @@ class Column(val expr: Expression) extends Logging {
     Column.fn(name, this, lit(other))
   }
 
+  /**
+   * A version of the `fn` method specifically designed for binary operations 
in PySpark
+   * that require logging information.
+   * This method is used when the operation involves another Column.
+   *
+   * @param name               The name of the operation to be performed.
+   * @param other              The value to be used in the operation, which 
will be converted to a
+   *                           Column if not already one.
+   * @param pysparkLoggingInfo A map containing logging information such as 
the fragment and
+   *                           call site from PySpark.
+   * @return A Column resulting from the operation.
+   */
+  private def fn(
+      name: String, other: Any, pysparkLoggingInfo: 
java.util.ArrayList[String]): Column = {

Review Comment:
   Updated!



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to