gengliangwang commented on a change in pull request #30224:
URL: https://github.com/apache/spark/pull/30224#discussion_r515960190
##########
File path:
external/avro/src/test/scala/org/apache/spark/sql/avro/AvroFunctionsSuite.scala
##########
@@ -201,4 +201,40 @@ class AvroFunctionsSuite extends QueryTest with
SharedSparkSession {
Map("avroSchema" -> evolvedAvroSchema).asJava)),
expected)
}
+
+ test("roundtrip in to_avro and from_avro - struct with nullable Avro
schema") {
+ val df = spark.range(10).select(struct('id,
'id.cast("string").as("str")).as("struct"))
+ val avroTypeStruct = s"""
+ |{
Review comment:
two spaces indent
##########
File path:
external/avro/src/test/scala/org/apache/spark/sql/avro/AvroFunctionsSuite.scala
##########
@@ -201,4 +201,40 @@ class AvroFunctionsSuite extends QueryTest with
SharedSparkSession {
Map("avroSchema" -> evolvedAvroSchema).asJava)),
expected)
}
+
+ test("roundtrip in to_avro and from_avro - struct with nullable Avro
schema") {
+ val df = spark.range(10).select(struct('id,
'id.cast("string").as("str")).as("struct"))
+ val avroTypeStruct = s"""
+ |{
+ | "type": "record",
+ | "name": "struct",
+ | "fields": [
+ | {"name": "id", "type": "long"},
+ | {"name": "str", "type": ["null", "string"]}
+ | ]
+ |}
+ """.stripMargin
+ val avroStructDF = df.select(functions.to_avro('struct,
avroTypeStruct).as("avro"))
+ checkAnswer(avroStructDF.select(
+ functions.from_avro('avro, avroTypeStruct)), df)
+ }
+
+ test("to_avro with invalid nullable Avro schema") {
+ val df = spark.range(10).select(struct('id,
'id.cast("string").as("str")).as("struct"))
+ for (invalidAvroType <- Seq("""["null", "int", "long"]""", """["int",
"long"]""")) {
+ val avroTypeStruct = s"""
+ |{
Review comment:
ditto
##########
File path:
external/avro/src/test/scala/org/apache/spark/sql/avro/AvroFunctionsSuite.scala
##########
@@ -201,4 +201,40 @@ class AvroFunctionsSuite extends QueryTest with
SharedSparkSession {
Map("avroSchema" -> evolvedAvroSchema).asJava)),
expected)
}
+
+ test("roundtrip in to_avro and from_avro - struct with nullable Avro
schema") {
+ val df = spark.range(10).select(struct('id,
'id.cast("string").as("str")).as("struct"))
+ val avroTypeStruct = s"""
+ |{
+ | "type": "record",
+ | "name": "struct",
+ | "fields": [
+ | {"name": "id", "type": "long"},
+ | {"name": "str", "type": ["null", "string"]}
+ | ]
+ |}
+ """.stripMargin
+ val avroStructDF = df.select(functions.to_avro('struct,
avroTypeStruct).as("avro"))
+ checkAnswer(avroStructDF.select(
+ functions.from_avro('avro, avroTypeStruct)), df)
+ }
+
+ test("to_avro with invalid nullable Avro schema") {
+ val df = spark.range(10).select(struct('id,
'id.cast("string").as("str")).as("struct"))
+ for (invalidAvroType <- Seq("""["null", "int", "long"]""", """["int",
"long"]""")) {
+ val avroTypeStruct = s"""
+ |{
+ | "type": "record",
+ | "name": "struct",
+ | "fields": [
+ | {"name": "id", "type": $invalidAvroType},
+ | {"name": "str", "type": ["null", "string"]}
+ | ]
+ |}
+ """.stripMargin
+ intercept[SparkException] {
Review comment:
let's verify the error message as well
##########
File path:
external/avro/src/main/scala/org/apache/spark/sql/avro/SchemaConverters.scala
##########
@@ -208,3 +208,5 @@ object SchemaConverters {
private[avro] class IncompatibleSchemaException(
msg: String, ex: Throwable = null) extends Exception(msg, ex)
+
+private[avro] class InvalidAvroTypeException(msg: String) extends
Exception(msg)
Review comment:
It seems that we can reuse `IncompatibleSchemaException`, or renaming it
as `UnsupportedAvroTypeException`
##########
File path:
external/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala
##########
@@ -1015,6 +1015,69 @@ abstract class AvroSuite
}
}
+ test("support user provided nullable avro schema " +
+ "for non-nullable catalyst schema without any null record") {
+ withTempPath { tempDir =>
+ val catalystSchema =
+ StructType(Seq(
+ StructField("Age", IntegerType, nullable = false),
+ StructField("Name", StringType, nullable = false)))
+
+ val avroSchema =
+ """
+ |{
+ | "type" : "record",
+ | "name" : "test_schema",
+ | "fields" : [
+ | {"name": "Age", "type": ["null", "int"]},
+ | {"name": "Name", "type": ["null", "string"]}
+ | ]
+ |}
+ """.stripMargin
+
+ val df = spark.createDataFrame(
+ spark.sparkContext.parallelize(Seq(Row(2, "Aurora"))), catalystSchema)
+
+ val tempSavePath = s"$tempDir/save/${UUID.randomUUID()}"
Review comment:
We can just write to `tempDir`
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]