HyukjinKwon commented on code in PR #46408:
URL: https://github.com/apache/spark/pull/46408#discussion_r1594851819
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala:
##########
@@ -3865,6 +3865,65 @@ abstract class JsonSuite
}
}
}
+
+ test("SPARK-48148: values are unchanged when read as string") {
+ withTempPath { path =>
+ def extractData(
+ jsonString: String,
+ expectedInexactData: Seq[String],
+ expectedExactData: Seq[String],
+ multiLine: Boolean = false): Unit = {
+ Seq(jsonString).toDF()
+ .repartition(1)
+ .write
+ .mode("overwrite")
+ .text(path.getAbsolutePath)
+
+ withClue("Exact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "true") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedExactData.map(d => Row(d)))
+ }
+ }
+
+ withClue("Inexact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "false") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedInexactData.map(d => Row(d)))
+ }
+ }
+ }
+ extractData(
+ s"""{"data": {"white": "space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
Review Comment:
```suggestion
expectedInexactData = Seq("""{"white":"space"}"""),
```
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala:
##########
@@ -3865,6 +3865,65 @@ abstract class JsonSuite
}
}
}
+
+ test("SPARK-48148: values are unchanged when read as string") {
+ withTempPath { path =>
+ def extractData(
+ jsonString: String,
+ expectedInexactData: Seq[String],
+ expectedExactData: Seq[String],
+ multiLine: Boolean = false): Unit = {
+ Seq(jsonString).toDF()
+ .repartition(1)
+ .write
+ .mode("overwrite")
+ .text(path.getAbsolutePath)
+
+ withClue("Exact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "true") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedExactData.map(d => Row(d)))
+ }
+ }
+
+ withClue("Inexact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "false") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedInexactData.map(d => Row(d)))
+ }
+ }
+ }
+ extractData(
+ s"""{"data": {"white": "space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
+ expectedExactData = Seq(s"""{"white": "space"}""")
+ )
+ extractData(
+ s"""{"data": ["white", "space"]}""",
Review Comment:
```suggestion
"""{"data": ["white", "space"]}""",
```
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala:
##########
@@ -3865,6 +3865,65 @@ abstract class JsonSuite
}
}
}
+
+ test("SPARK-48148: values are unchanged when read as string") {
+ withTempPath { path =>
+ def extractData(
+ jsonString: String,
+ expectedInexactData: Seq[String],
+ expectedExactData: Seq[String],
+ multiLine: Boolean = false): Unit = {
+ Seq(jsonString).toDF()
+ .repartition(1)
+ .write
+ .mode("overwrite")
+ .text(path.getAbsolutePath)
+
+ withClue("Exact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "true") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedExactData.map(d => Row(d)))
+ }
+ }
+
+ withClue("Inexact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "false") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedInexactData.map(d => Row(d)))
+ }
+ }
+ }
+ extractData(
+ s"""{"data": {"white": "space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
+ expectedExactData = Seq(s"""{"white": "space"}""")
Review Comment:
```suggestion
expectedExactData = Seq("""{"white": "space"}""")
```
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala:
##########
@@ -3865,6 +3865,65 @@ abstract class JsonSuite
}
}
}
+
+ test("SPARK-48148: values are unchanged when read as string") {
+ withTempPath { path =>
+ def extractData(
+ jsonString: String,
+ expectedInexactData: Seq[String],
+ expectedExactData: Seq[String],
+ multiLine: Boolean = false): Unit = {
+ Seq(jsonString).toDF()
+ .repartition(1)
+ .write
+ .mode("overwrite")
+ .text(path.getAbsolutePath)
+
+ withClue("Exact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "true") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedExactData.map(d => Row(d)))
+ }
+ }
+
+ withClue("Inexact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "false") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedInexactData.map(d => Row(d)))
+ }
+ }
+ }
+ extractData(
+ s"""{"data": {"white": "space"}}""",
Review Comment:
```suggestion
"""{"data": {"white": "space"}}""",
```
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala:
##########
@@ -3865,6 +3865,65 @@ abstract class JsonSuite
}
}
}
+
+ test("SPARK-48148: values are unchanged when read as string") {
+ withTempPath { path =>
+ def extractData(
+ jsonString: String,
+ expectedInexactData: Seq[String],
+ expectedExactData: Seq[String],
+ multiLine: Boolean = false): Unit = {
+ Seq(jsonString).toDF()
+ .repartition(1)
+ .write
+ .mode("overwrite")
+ .text(path.getAbsolutePath)
+
+ withClue("Exact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "true") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedExactData.map(d => Row(d)))
+ }
+ }
+
+ withClue("Inexact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "false") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedInexactData.map(d => Row(d)))
+ }
+ }
+ }
+ extractData(
+ s"""{"data": {"white": "space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
+ expectedExactData = Seq(s"""{"white": "space"}""")
+ )
+ extractData(
+ s"""{"data": ["white", "space"]}""",
+ expectedInexactData = Seq(s"""["white","space"]"""),
Review Comment:
```suggestion
expectedInexactData = Seq("""["white","space"]"""),
```
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala:
##########
@@ -3865,6 +3865,65 @@ abstract class JsonSuite
}
}
}
+
+ test("SPARK-48148: values are unchanged when read as string") {
+ withTempPath { path =>
+ def extractData(
+ jsonString: String,
+ expectedInexactData: Seq[String],
+ expectedExactData: Seq[String],
+ multiLine: Boolean = false): Unit = {
+ Seq(jsonString).toDF()
+ .repartition(1)
+ .write
+ .mode("overwrite")
+ .text(path.getAbsolutePath)
+
+ withClue("Exact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "true") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedExactData.map(d => Row(d)))
+ }
+ }
+
+ withClue("Inexact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "false") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedInexactData.map(d => Row(d)))
+ }
+ }
+ }
+ extractData(
+ s"""{"data": {"white": "space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
+ expectedExactData = Seq(s"""{"white": "space"}""")
+ )
+ extractData(
+ s"""{"data": ["white", "space"]}""",
+ expectedInexactData = Seq(s"""["white","space"]"""),
+ expectedExactData = Seq(s"""["white", "space"]""")
+ )
+ val granularFloat = "-999.99999999999999999999999999999999995"
+ extractData(
+ s"""{"data": {"v": ${granularFloat}}}""",
+ expectedInexactData = Seq(s"""{"v":-1000.0}"""),
+ expectedExactData = Seq(s"""{"v": ${granularFloat}}""")
+ )
+ // In multiLine, we fall back to the inexact method:
+ extractData(
+ s"""{"data": {"white":\n"space"}}""",
Review Comment:
```suggestion
"""{"data": {"white":\n"space"}}""",
```
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala:
##########
@@ -3865,6 +3865,65 @@ abstract class JsonSuite
}
}
}
+
+ test("SPARK-48148: values are unchanged when read as string") {
+ withTempPath { path =>
+ def extractData(
+ jsonString: String,
+ expectedInexactData: Seq[String],
+ expectedExactData: Seq[String],
+ multiLine: Boolean = false): Unit = {
+ Seq(jsonString).toDF()
+ .repartition(1)
+ .write
+ .mode("overwrite")
+ .text(path.getAbsolutePath)
+
+ withClue("Exact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "true") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedExactData.map(d => Row(d)))
+ }
+ }
+
+ withClue("Inexact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "false") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedInexactData.map(d => Row(d)))
+ }
+ }
+ }
+ extractData(
+ s"""{"data": {"white": "space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
+ expectedExactData = Seq(s"""{"white": "space"}""")
+ )
+ extractData(
+ s"""{"data": ["white", "space"]}""",
+ expectedInexactData = Seq(s"""["white","space"]"""),
+ expectedExactData = Seq(s"""["white", "space"]""")
+ )
+ val granularFloat = "-999.99999999999999999999999999999999995"
+ extractData(
+ s"""{"data": {"v": ${granularFloat}}}""",
+ expectedInexactData = Seq(s"""{"v":-1000.0}"""),
+ expectedExactData = Seq(s"""{"v": ${granularFloat}}""")
+ )
+ // In multiLine, we fall back to the inexact method:
+ extractData(
+ s"""{"data": {"white":\n"space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
Review Comment:
```suggestion
expectedInexactData = Seq("""{"white":"space"}"""),
```
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala:
##########
@@ -3865,6 +3865,65 @@ abstract class JsonSuite
}
}
}
+
+ test("SPARK-48148: values are unchanged when read as string") {
+ withTempPath { path =>
+ def extractData(
+ jsonString: String,
+ expectedInexactData: Seq[String],
+ expectedExactData: Seq[String],
+ multiLine: Boolean = false): Unit = {
+ Seq(jsonString).toDF()
+ .repartition(1)
+ .write
+ .mode("overwrite")
+ .text(path.getAbsolutePath)
+
+ withClue("Exact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "true") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedExactData.map(d => Row(d)))
+ }
+ }
+
+ withClue("Inexact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "false") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedInexactData.map(d => Row(d)))
+ }
+ }
+ }
+ extractData(
+ s"""{"data": {"white": "space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
+ expectedExactData = Seq(s"""{"white": "space"}""")
+ )
+ extractData(
+ s"""{"data": ["white", "space"]}""",
+ expectedInexactData = Seq(s"""["white","space"]"""),
+ expectedExactData = Seq(s"""["white", "space"]""")
Review Comment:
```suggestion
expectedExactData = Seq("""["white", "space"]""")
```
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala:
##########
@@ -3865,6 +3865,65 @@ abstract class JsonSuite
}
}
}
+
+ test("SPARK-48148: values are unchanged when read as string") {
+ withTempPath { path =>
+ def extractData(
+ jsonString: String,
+ expectedInexactData: Seq[String],
+ expectedExactData: Seq[String],
+ multiLine: Boolean = false): Unit = {
+ Seq(jsonString).toDF()
+ .repartition(1)
+ .write
+ .mode("overwrite")
+ .text(path.getAbsolutePath)
+
+ withClue("Exact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "true") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedExactData.map(d => Row(d)))
+ }
+ }
+
+ withClue("Inexact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "false") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedInexactData.map(d => Row(d)))
+ }
+ }
+ }
+ extractData(
+ s"""{"data": {"white": "space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
+ expectedExactData = Seq(s"""{"white": "space"}""")
+ )
+ extractData(
+ s"""{"data": ["white", "space"]}""",
+ expectedInexactData = Seq(s"""["white","space"]"""),
+ expectedExactData = Seq(s"""["white", "space"]""")
+ )
+ val granularFloat = "-999.99999999999999999999999999999999995"
+ extractData(
+ s"""{"data": {"v": ${granularFloat}}}""",
+ expectedInexactData = Seq(s"""{"v":-1000.0}"""),
+ expectedExactData = Seq(s"""{"v": ${granularFloat}}""")
+ )
+ // In multiLine, we fall back to the inexact method:
+ extractData(
+ s"""{"data": {"white":\n"space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
+ expectedExactData = Seq(s"""{"white":"space"}"""),
Review Comment:
```suggestion
expectedExactData = Seq("""{"white":"space"}"""),
```
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala:
##########
@@ -3865,6 +3865,65 @@ abstract class JsonSuite
}
}
}
+
+ test("SPARK-48148: values are unchanged when read as string") {
+ withTempPath { path =>
+ def extractData(
+ jsonString: String,
+ expectedInexactData: Seq[String],
+ expectedExactData: Seq[String],
+ multiLine: Boolean = false): Unit = {
+ Seq(jsonString).toDF()
+ .repartition(1)
+ .write
+ .mode("overwrite")
+ .text(path.getAbsolutePath)
+
+ withClue("Exact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "true") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedExactData.map(d => Row(d)))
+ }
+ }
+
+ withClue("Inexact string parsing") {
+ withSQLConf(SQLConf.JSON_EXACT_STRING_PARSING.key -> "false") {
+ val df = spark.read
+ .schema("data STRING")
+ .option("multiLine", multiLine.toString)
+ .json(path.getAbsolutePath)
+ checkAnswer(df, expectedInexactData.map(d => Row(d)))
+ }
+ }
+ }
+ extractData(
+ s"""{"data": {"white": "space"}}""",
+ expectedInexactData = Seq(s"""{"white":"space"}"""),
+ expectedExactData = Seq(s"""{"white": "space"}""")
+ )
+ extractData(
+ s"""{"data": ["white", "space"]}""",
+ expectedInexactData = Seq(s"""["white","space"]"""),
+ expectedExactData = Seq(s"""["white", "space"]""")
+ )
+ val granularFloat = "-999.99999999999999999999999999999999995"
+ extractData(
+ s"""{"data": {"v": ${granularFloat}}}""",
+ expectedInexactData = Seq(s"""{"v":-1000.0}"""),
Review Comment:
```suggestion
expectedInexactData = Seq("""{"v":-1000.0}"""),
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]