dongjoon-hyun commented on a change in pull request #32764:
URL: https://github.com/apache/spark/pull/32764#discussion_r645141345
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
##########
@@ -466,6 +526,60 @@ class DataSourceV2FunctionSuite extends
DatasourceV2SQLBase {
override def produceResult(state: (Long, Long)): Long = state._1 / state._2
}
+ object UnboundDecimalAverage extends UnboundFunction {
+ override def name(): String = "favg"
+
+ override def bind(inputType: StructType): BoundFunction = {
+ if (inputType.fields.length > 1) {
+ throw new UnsupportedOperationException("Too many arguments")
+ }
+
+ // put interval type here for testing purpose
+ inputType.fields(0).dataType match {
+ case _: NumericType | _: DayTimeIntervalType => DecimalAverage
+ case dataType =>
+ throw new UnsupportedOperationException(s"Unsupported input type:
$dataType")
+ }
+ }
+
+ override def description(): String =
+ """iavg: produces an average using decimal division, ignoring nulls
+ | iavg(integral) -> decimal
+ | iavg(float) -> decimal
+ | iavg(decimal) -> decimal""".stripMargin
+ }
+
+ object DecimalAverage extends AggregateFunction[(Decimal, Int), Decimal] {
+ val PRECISION: Int = 15
+ val SCALE: Int = 5
Review comment:
Sorry, but may I ask where these (precision 15, scale 5) came from? I
might miss the context, but this looks like non-conventional values in Apache
Spark codebase. Is it intentionally chosen for some purpose?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]