MaxGekk commented on a change in pull request #31932:
URL: https://github.com/apache/spark/pull/31932#discussion_r604385968
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
##########
@@ -428,10 +430,12 @@ case class InsertAction(
override def children: Seq[Expression] = condition.toSeq ++ assignments
}
-case class Assignment(key: Expression, value: Expression) extends Expression
with Unevaluable {
+case class Assignment(key: Expression, value: Expression) extends Expression
+ with Unevaluable with BinaryLike[Expression] {
Review comment:
2 space indent
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/collect.scala
##########
@@ -33,12 +34,11 @@ import org.apache.spark.sql.types._
* We have to store all the collected elements in memory, and so notice that
too many elements
* can cause GC paused and eventually OutOfMemory Errors.
*/
-abstract class Collect[T <: Growable[Any] with Iterable[Any]] extends
TypedImperativeAggregate[T] {
+abstract class Collect[T <: Growable[Any] with Iterable[Any]] extends
TypedImperativeAggregate[T]
+ with UnaryLike[Expression] {
Review comment:
2 space indent
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/bitwiseAggregates.scala
##########
@@ -18,16 +18,16 @@
package org.apache.spark.sql.catalyst.expressions.aggregate
import org.apache.spark.sql.catalyst.expressions.{AttributeReference,
BinaryArithmetic, BitwiseAnd, BitwiseOr, BitwiseXor, ExpectsInputTypes,
Expression, ExpressionDescription, If, IsNull, Literal}
+import org.apache.spark.sql.catalyst.trees.UnaryLike
import org.apache.spark.sql.types.{AbstractDataType, DataType, IntegralType}
-abstract class BitAggregate extends DeclarativeAggregate with
ExpectsInputTypes {
+abstract class BitAggregate extends DeclarativeAggregate with ExpectsInputTypes
+ with UnaryLike[Expression] {
Review comment:
2 space indent
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/PartitionTransforms.scala
##########
@@ -32,7 +33,8 @@ import org.apache.spark.sql.types.{DataType, IntegerType}
* df.writeTo("catalog.db.table").partitionedBy($"category",
days($"timestamp")).create()
* }}}
*/
-abstract class PartitionTransformExpression extends Expression with
Unevaluable {
+abstract class PartitionTransformExpression extends Expression with Unevaluable
+ with UnaryLike[Expression] {
Review comment:
nit: 2 spaces indentation, see
https://github.com/databricks/scala-style-guide#spacing-and-indentation
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountIf.scala
##########
@@ -34,10 +35,12 @@ import org.apache.spark.sql.types.{AbstractDataType,
BooleanType, DataType, Long
""",
group = "agg_funcs",
since = "3.0.0")
-case class CountIf(predicate: Expression) extends UnevaluableAggregate with
ImplicitCastInputTypes {
+case class CountIf(predicate: Expression) extends UnevaluableAggregate with
ImplicitCastInputTypes
+ with UnaryLike[Expression] {
Review comment:
2 space indent
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Sum.scala
##########
@@ -37,9 +38,8 @@ import org.apache.spark.sql.types._
""",
group = "agg_funcs",
since = "1.0.0")
-case class Sum(child: Expression) extends DeclarativeAggregate with
ImplicitCastInputTypes {
-
- override def children: Seq[Expression] = child :: Nil
+case class Sum(child: Expression) extends DeclarativeAggregate with
ImplicitCastInputTypes
+ with UnaryLike[Expression] {
Review comment:
2 space indent
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
##########
@@ -734,10 +741,12 @@ case class NthValue(input: Expression, offset:
Expression, ignoreNulls: Boolean)
since = "2.0.0",
group = "window_funcs")
// scalastyle:on line.size.limit line.contains.tab
-case class NTile(buckets: Expression) extends RowNumberLike with
SizeBasedWindowFunction {
+case class NTile(buckets: Expression) extends RowNumberLike with
SizeBasedWindowFunction
+ with UnaryLike[Expression] {
Review comment:
2 space indent
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Average.scala
##########
@@ -34,12 +35,11 @@ import org.apache.spark.sql.types._
""",
group = "agg_funcs",
since = "1.0.0")
-case class Average(child: Expression) extends DeclarativeAggregate with
ImplicitCastInputTypes {
+case class Average(child: Expression) extends DeclarativeAggregate with
ImplicitCastInputTypes
+ with UnaryLike[Expression] {
Review comment:
2 space indent
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala
##########
@@ -101,11 +102,11 @@ case class Rollup(groupByExprs: Seq[Expression]) extends
GroupingSet {}
since = "2.0.0",
group = "agg_funcs")
// scalastyle:on line.size.limit line.contains.tab
-case class Grouping(child: Expression) extends Expression with Unevaluable {
+case class Grouping(child: Expression) extends Expression with Unevaluable
+ with UnaryLike[Expression] {
Review comment:
2 space indent
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]