HyukjinKwon commented on a change in pull request #26907: [SPARK-30267][SQL]
Avro arrays can be of any List
URL: https://github.com/apache/spark/pull/26907#discussion_r358764878
##########
File path:
external/avro/src/test/scala/org/apache/spark/sql/avro/AvroCatalystDataConversionSuite.scala
##########
@@ -127,6 +127,25 @@ class AvroCatalystDataConversionSuite extends
SparkFunSuite
}
}
+ {
+ val seed = scala.util.Random.nextLong()
+ val rand = new scala.util.Random(seed)
+ val schema = StructType(
+ StructField("a",
+ ArrayType(
+ RandomDataGenerator.randomNestedSchema(rand, 10, testingTypes)
+ , containsNull = false)
+ , nullable = false) :: Nil
+ )
+
+ test(s"array of nested schema ${schema.catalogString} with seed $seed") {
+ val data = RandomDataGenerator.randomRow(rand, schema)
+ val converter = CatalystTypeConverters.createToCatalystConverter(schema)
+ val input = Literal.create(converter(data), schema)
+ roundTripTest(input)
+ }
+ }
Review comment:
You can do it in a bit more neater way:
```scala
test(s"array of nested schema with seed") {
val seed = scala.util.Random.nextLong()
val rand = new scala.util.Random(seed)
val schema = StructType(
StructField("a",
ArrayType(
RandomDataGenerator.randomNestedSchema(rand, 10, testingTypes),
containsNull = false),
nullable = false) :: Nil
)
withClue(s"Nested schema: $schema\nseed: $seed") {
val data = RandomDataGenerator.randomRow(rand, schema)
val converter =
CatalystTypeConverters.createToCatalystConverter(schema)
val input = Literal.create(converter(data), schema)
roundTripTest(input)
}
}
```
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]