dongjoon-hyun commented on a change in pull request #28341:
URL: https://github.com/apache/spark/pull/28341#discussion_r415223266
##########
File path: sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
##########
@@ -2649,4 +2652,75 @@ class DataFrameSuite extends QueryTest with
SharedSQLContext {
}
assert(e.getMessage.contains("Table or view not found:"))
}
+
+ test("SPARK-31552: array encoder with different types") {
+ // primitives
+ val booleans = Array(true, false)
+ checkAnswer(Seq(booleans).toDF(), Row(booleans))
+
+ val bytes = Array(1.toByte, 2.toByte)
+ checkAnswer(Seq(bytes).toDF(), Row(bytes))
+ val shorts = Array(1.toShort, 2.toShort)
+ checkAnswer(Seq(shorts).toDF(), Row(shorts))
+ val ints = Array(1, 2)
+ checkAnswer(Seq(ints).toDF(), Row(ints))
+ val longs = Array(1L, 2L)
+ checkAnswer(Seq(longs).toDF(), Row(longs))
+
+ val floats = Array(1.0F, 2.0F)
+ checkAnswer(Seq(floats).toDF(), Row(floats))
+ val doubles = Array(1.0D, 2.0D)
+ checkAnswer(Seq(doubles).toDF(), Row(doubles))
+
+ val strings = Array("2020-04-24", "2020-04-25")
+ checkAnswer(Seq(strings).toDF(), Row(strings))
+
+ // tuples
+ val decOne = Decimal(1, 38, 18)
+ val decTwo = Decimal(2, 38, 18)
+ val tuple1 = (1, 2.2, "3.33", decOne, Date.valueOf("2012-11-22"))
+ val tuple2 = (2, 3.3, "4.44", decTwo, Date.valueOf("2022-11-22"))
+ checkAnswer(Seq(Array(tuple1, tuple2)).toDF(), Seq(Seq(tuple1,
tuple2)).toDF())
+
+ // case classes
+ val gbks = Array(GroupByKey(1, 2), GroupByKey(4, 5))
+ checkAnswer(Seq(gbks).toDF(), Row(Array(Row(1, 2), Row(4, 5))))
+
+ // We can move this implicit def to `SQLImplicits` when we eventually make
fully
+ // support for array encoder like Seq and Set
+ // For now cases below, decimal/datetime/interval/binary/nested types, etc,
+ // are not supported by array
+ implicit def newArrayEncoder[T <: Array[_] : TypeTag]: Encoder[T] =
ExpressionEncoder()
+
+ // decimals
+ val decSpark = Array(decOne, decTwo)
+ val decScala = decSpark.map(_.toBigDecimal)
+ val decJava = decSpark.map(_.toJavaBigDecimal)
+ checkAnswer(Seq(decSpark).toDF(), Row(decJava))
+ checkAnswer(Seq(decScala).toDF(), Row(decJava))
+ checkAnswer(Seq(decJava).toDF(), Row(decJava))
+
+ // datetimes
+ val dates = strings.map(Date.valueOf)
+ checkAnswer(Seq(dates).toDF(), Row(dates))
+
+ val timestamps =
+ Array(Timestamp.valueOf("2020-04-24 12:34:56"),
Timestamp.valueOf("2020-04-24 11:22:33"))
+ checkAnswer(Seq(timestamps).toDF(), Row(timestamps))
+
+ // binary
+ val bins = Array(Array(1.toByte), Array(2.toByte), Array(3.toByte),
Array(4.toByte))
+
+ val binsRes = Seq(bins).toDF().collect().head.get(0)
+ .asInstanceOf[mutable.WrappedArray.ofRef[Array[Byte]]].array
Review comment:
```scala
- .asInstanceOf[mutable.WrappedArray.ofRef[Array[Byte]]].array
+ .asInstanceOf[WrappedArray[Array[Byte]]]
```
Also, please use `import scala.collection.mutable.WrappedArray` instead of
`import scala.collection.mutable`. It seems that this PR doesn't not add
another `mutable` class usage.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]