maropu commented on a change in pull request #28239: [SPARK-31467][SQL][TEST]
Refactor the sql tests to prevent TableAlreadyExistsException
URL: https://github.com/apache/spark/pull/28239#discussion_r410865751
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
##########
@@ -833,171 +861,182 @@ abstract class JsonSuite extends QueryTest with
SharedSparkSession with TestJson
}
test("Applying schemas") {
- val dir = Utils.createTempDir()
- dir.delete()
- val path = dir.getCanonicalPath
- primitiveFieldAndType.map(record => record.replaceAll("\n", "
")).write.text(path)
+ withTempView("jsonTable1", "jsonTable2") {
+ val dir = Utils.createTempDir()
+ dir.delete()
+ val path = dir.getCanonicalPath
+ primitiveFieldAndType.map(record => record.replaceAll("\n", "
")).write.text(path)
- val schema = StructType(
- StructField("bigInteger", DecimalType.SYSTEM_DEFAULT, true) ::
- StructField("boolean", BooleanType, true) ::
- StructField("double", DoubleType, true) ::
- StructField("integer", IntegerType, true) ::
- StructField("long", LongType, true) ::
- StructField("null", StringType, true) ::
- StructField("string", StringType, true) :: Nil)
+ val schema = StructType(
+ StructField("bigInteger", DecimalType.SYSTEM_DEFAULT, true) ::
+ StructField("boolean", BooleanType, true) ::
+ StructField("double", DoubleType, true) ::
+ StructField("integer", IntegerType, true) ::
+ StructField("long", LongType, true) ::
+ StructField("null", StringType, true) ::
+ StructField("string", StringType, true) :: Nil)
- val jsonDF1 = spark.read.schema(schema).json(path)
+ val jsonDF1 = spark.read.schema(schema).json(path)
- assert(schema === jsonDF1.schema)
+ assert(schema === jsonDF1.schema)
- jsonDF1.createOrReplaceTempView("jsonTable1")
+ jsonDF1.createOrReplaceTempView("jsonTable1")
- checkAnswer(
- sql("select * from jsonTable1"),
- Row(new java.math.BigDecimal("92233720368547758070"),
- true,
- 1.7976931348623157,
- 10,
- 21474836470L,
- null,
- "this is a simple string.")
- )
+ checkAnswer(
+ sql("select * from jsonTable1"),
+ Row(new java.math.BigDecimal("92233720368547758070"),
+ true,
+ 1.7976931348623157,
+ 10,
+ 21474836470L,
+ null,
+ "this is a simple string.")
+ )
- val jsonDF2 = spark.read.schema(schema).json(primitiveFieldAndType)
+ val jsonDF2 = spark.read.schema(schema).json(primitiveFieldAndType)
- assert(schema === jsonDF2.schema)
+ assert(schema === jsonDF2.schema)
- jsonDF2.createOrReplaceTempView("jsonTable2")
+ jsonDF2.createOrReplaceTempView("jsonTable2")
- checkAnswer(
- sql("select * from jsonTable2"),
- Row(new java.math.BigDecimal("92233720368547758070"),
- true,
- 1.7976931348623157,
- 10,
- 21474836470L,
- null,
- "this is a simple string.")
- )
+ checkAnswer(
+ sql("select * from jsonTable2"),
+ Row(new java.math.BigDecimal("92233720368547758070"),
+ true,
+ 1.7976931348623157,
+ 10,
+ 21474836470L,
+ null,
+ "this is a simple string.")
+ )
+ }
}
test("Applying schemas with MapType") {
- val schemaWithSimpleMap = StructType(
- StructField("map", MapType(StringType, IntegerType, true), false) :: Nil)
- val jsonWithSimpleMap =
spark.read.schema(schemaWithSimpleMap).json(mapType1)
-
- jsonWithSimpleMap.createOrReplaceTempView("jsonWithSimpleMap")
+ withTempView("jsonWithSimpleMap", "jsonWithComplexMap") {
+ val schemaWithSimpleMap = StructType(
+ StructField("map", MapType(StringType, IntegerType, true), false) ::
Nil)
+ val jsonWithSimpleMap =
spark.read.schema(schemaWithSimpleMap).json(mapType1)
- checkAnswer(
- sql("select `map` from jsonWithSimpleMap"),
- Row(Map("a" -> 1)) ::
- Row(Map("b" -> 2)) ::
- Row(Map("c" -> 3)) ::
- Row(Map("c" -> 1, "d" -> 4)) ::
- Row(Map("e" -> null)) :: Nil
- )
+ jsonWithSimpleMap.createOrReplaceTempView("jsonWithSimpleMap")
- withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
checkAnswer(
- sql("select `map`['c'] from jsonWithSimpleMap"),
- Row(null) ::
- Row(null) ::
- Row(3) ::
- Row(1) ::
- Row(null) :: Nil
+ sql("select `map` from jsonWithSimpleMap"),
+ Row(Map("a" -> 1)) ::
+ Row(Map("b" -> 2)) ::
+ Row(Map("c" -> 3)) ::
+ Row(Map("c" -> 1, "d" -> 4)) ::
+ Row(Map("e" -> null)) :: Nil
)
- }
- val innerStruct = StructType(
- StructField("field1", ArrayType(IntegerType, true), true) ::
- StructField("field2", IntegerType, true) :: Nil)
- val schemaWithComplexMap = StructType(
- StructField("map", MapType(StringType, innerStruct, true), false) :: Nil)
+ withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
+ checkAnswer(
+ sql("select `map`['c'] from jsonWithSimpleMap"),
+ Row(null) ::
+ Row(null) ::
+ Row(3) ::
+ Row(1) ::
+ Row(null) :: Nil
+ )
+ }
- val jsonWithComplexMap =
spark.read.schema(schemaWithComplexMap).json(mapType2)
+ val innerStruct = StructType(
+ StructField("field1", ArrayType(IntegerType, true), true) ::
+ StructField("field2", IntegerType, true) :: Nil)
+ val schemaWithComplexMap = StructType(
+ StructField("map", MapType(StringType, innerStruct, true), false) ::
Nil)
- jsonWithComplexMap.createOrReplaceTempView("jsonWithComplexMap")
+ val jsonWithComplexMap =
spark.read.schema(schemaWithComplexMap).json(mapType2)
- checkAnswer(
- sql("select `map` from jsonWithComplexMap"),
- Row(Map("a" -> Row(Seq(1, 2, 3, null), null))) ::
- Row(Map("b" -> Row(null, 2))) ::
- Row(Map("c" -> Row(Seq(), 4))) ::
- Row(Map("c" -> Row(null, 3), "d" -> Row(Seq(null), null))) ::
- Row(Map("e" -> null)) ::
- Row(Map("f" -> Row(null, null))) :: Nil
- )
+ jsonWithComplexMap.createOrReplaceTempView("jsonWithComplexMap")
- withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
checkAnswer(
- sql("select `map`['a'].field1, `map`['c'].field2 from
jsonWithComplexMap"),
- Row(Seq(1, 2, 3, null), null) ::
- Row(null, null) ::
- Row(null, 4) ::
- Row(null, 3) ::
- Row(null, null) ::
- Row(null, null) :: Nil
+ sql("select `map` from jsonWithComplexMap"),
+ Row(Map("a" -> Row(Seq(1, 2, 3, null), null))) ::
+ Row(Map("b" -> Row(null, 2))) ::
+ Row(Map("c" -> Row(Seq(), 4))) ::
+ Row(Map("c" -> Row(null, 3), "d" -> Row(Seq(null), null))) ::
+ Row(Map("e" -> null)) ::
+ Row(Map("f" -> Row(null, null))) :: Nil
)
+
+ withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
+ checkAnswer(
+ sql("select `map`['a'].field1, `map`['c'].field2 from
jsonWithComplexMap"),
+ Row(Seq(1, 2, 3, null), null) ::
+ Row(null, null) ::
+ Row(null, 4) ::
+ Row(null, 3) ::
+ Row(null, null) ::
+ Row(null, null) :: Nil
+ )
+ }
}
}
test("SPARK-2096 Correctly parse dot notations") {
- val jsonDF = spark.read.json(complexFieldAndType2)
- jsonDF.createOrReplaceTempView("jsonTable")
+ withTempView("jsonTable") {
+ val jsonDF = spark.read.json(complexFieldAndType2)
+ jsonDF.createOrReplaceTempView("jsonTable")
- checkAnswer(
- sql("select arrayOfStruct[0].field1, arrayOfStruct[0].field2 from
jsonTable"),
- Row(true, "str1")
- )
- checkAnswer(
- sql(
- """
- |select complexArrayOfStruct[0].field1[1].inner2[0],
complexArrayOfStruct[1].field2[0][1]
- |from jsonTable
+ checkAnswer(
+ sql("select arrayOfStruct[0].field1, arrayOfStruct[0].field2 from
jsonTable"),
+ Row(true, "str1")
+ )
+ checkAnswer(
+ sql(
+ """
+ |select complexArrayOfStruct[0].field1[1].inner2[0],
Review comment:
nit: wrong indents.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]