viirya commented on code in PR #155:
URL: 
https://github.com/apache/arrow-datafusion-comet/pull/155#discussion_r1511534716


##########
spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala:
##########
@@ -330,26 +333,44 @@ object QueryPlanSerde extends Logging with 
ShimQueryPlanSerde {
     }
   }
 
-  def exprToProto(expr: Expression, input: Seq[Attribute]): Option[Expr] = {
+  def exprToProto(
+      expr: Expression,
+      input: Seq[Attribute],
+      binding: Boolean = true): Option[Expr] = {
     val conf = SQLConf.get
     val newExpr =
       DecimalPrecision.promote(conf.decimalOperationsAllowPrecisionLoss, expr, 
!conf.ansiEnabled)
-    exprToProtoInternal(newExpr, input)
+    exprToProtoInternal(newExpr, input, binding)
   }
 
-  def exprToProtoInternal(expr: Expression, inputs: Seq[Attribute]): 
Option[Expr] = {
+  /**
+   * Convert a Spark expression to protobuf.
+   *
+   * @param expr
+   *   The input expression
+   * @param inputs
+   *   The input attributes
+   * @param binding
+   *   Whether to bind the expression to the input attributes
+   * @return
+   *   The protobuf representation of the expression, or None if the 
expression is not supported
+   */
+  def exprToProtoInternal(
+      expr: Expression,
+      inputs: Seq[Attribute],
+      binding: Boolean): Option[Expr] = {
     SQLConf.get
     expr match {
       case a @ Alias(_, _) =>
-        exprToProtoInternal(a.child, inputs)
+        exprToProtoInternal(a.child, inputs, binding)

Review Comment:
   Okay



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to