HuwCampbell commented on code in PR #36441:
URL: https://github.com/apache/spark/pull/36441#discussion_r973814236
##########
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/HigherOrderFunctionsSuite.scala:
##########
@@ -839,6 +841,52 @@ class HigherOrderFunctionsSuite extends SparkFunSuite with
ExpressionEvalHelper
Seq(1d, 2d, Double.NaN, null))
}
+ test("SPARK-39081: compatibility of combo of HigherOrderFunctions" +
+ " with other Expression subclasses") {
+ // Dummy example given in JIRA, this is to test a compile time issue only,
has no real usage
+ case class MyExploder(
+ arrays: Expression, // Array[AnyDataType]
+ asOfDate: Expression, //
LambdaFunction[AnyDataType -> TimestampType]
+ extractor: Expression) // TimestampType
+ extends HigherOrderFunction with Generator with TimeZoneAwareExpression {
+
+ override def children: Seq[Expression] = Seq[Expression](arrays,
asOfDate)
+
+ override def withNewChildrenInternal(newChildren:
IndexedSeq[Expression]): Expression = null
+
+ override def eval(input: InternalRow): TraversableOnce[InternalRow] =
null
+
+ override def nodePatternsInternal(): Seq[TreePattern] = Seq()
+
+ override def elementSchema: StructType = null
+
+ override def arguments: Seq[Expression] =
+ Seq(arrays, asOfDate)
+
+ override def argumentTypes: Seq[AbstractDataType] =
+ Seq(ArrayType, TimestampType)
+
+ override def doGenCode(ctx: CodegenContext, exp: ExprCode): ExprCode =
null
+
+ override def functions: Seq[Expression] =
+ Seq(extractor)
+
+ override def functionTypes: Seq[TimestampType] =
+ Seq(TimestampType)
+
+ override def bind(f: (Expression,
+ Seq[(DataType, Boolean)]) => LambdaFunction): HigherOrderFunction =
null
+
+ override def timeZoneId: Option[String] = None
+
+ override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
null
+ }
+
+ /* Should not get the error - value nodePatterns
+ ... cannot override final member, or conflict in nodePatterns between two
types
Review Comment:
Yeah I think you're pretty much dead on here, the correctness of the
optimiser will be up to the user to enforce; as they will need to ensure that
all node patterns are accounted for.
But, and it's a big one, it makes it possible to compose these interfaces,
even with the extra challenges... which it's just _not_ possible to do as of
Spark 3.2.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]