[ 
https://issues.apache.org/jira/browse/SPARK-40678?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Cédric Chantepie updated SPARK-40678:
-------------------------------------
    Description: 
In Spark 3.2 (Scala 2.13), values with {{ArrayType}} are no longer properly 
support with JSON; e.g.

{noformat}
import org.apache.spark.sql.SparkSession

case class KeyValue(key: String, value: Array[Byte])

val spark = 
SparkSession.builder().master("local[1]").appName("test").getOrCreate()

import spark.implicits._

val df = Seq(Array(KeyValue("foo", "bar".getBytes))).toDF()

df.foreach(r => println(r.json))
{noformat}

Expected:

{noformat}
[{foo, bar}]
{noformat}

Encountered:

{noformat}
java.lang.IllegalArgumentException: Failed to convert value 
ArraySeq([foo,[B@dcdb68f]) (class of class 
scala.collection.mutable.ArraySeq$ofRef}) with the type of 
ArrayType(Seq(StructField(key,StringType,false), 
StructField(value,BinaryType,false)),true) to JSON.
        at org.apache.spark.sql.Row.toJson$1(Row.scala:604)
        at org.apache.spark.sql.Row.jsonValue(Row.scala:613)
        at org.apache.spark.sql.Row.jsonValue$(Row.scala:552)
        at 
org.apache.spark.sql.catalyst.expressions.GenericRow.jsonValue(rows.scala:166)
        at org.apache.spark.sql.Row.json(Row.scala:535)
        at org.apache.spark.sql.Row.json$(Row.scala:535)
        at 
org.apache.spark.sql.catalyst.expressions.GenericRow.json(rows.scala:166)
{noformat}

  was:
In Spark 3.2 (Scala 2.13), values with {{ArrayType}} are no longer properly 
supported; e.g.

{noformat}
import org.apache.spark.sql.SparkSession

case class KeyValue(key: String, value: Array[Byte])

val spark = 
SparkSession.builder().master("local[1]").appName("test").getOrCreate()

import spark.implicits._

val df = Seq(Array(KeyValue("foo", "bar".getBytes))).toDF()

df.foreach(r => println(r.json))
{noformat}

Expected:

{noformat}
[{foo, bar}]
{noformat}

Encountered:

{noformat}
java.lang.IllegalArgumentException: Failed to convert value 
ArraySeq([foo,[B@dcdb68f]) (class of class 
scala.collection.mutable.ArraySeq$ofRef}) with the type of 
ArrayType(Seq(StructField(key,StringType,false), 
StructField(value,BinaryType,false)),true) to JSON.
        at org.apache.spark.sql.Row.toJson$1(Row.scala:604)
        at org.apache.spark.sql.Row.jsonValue(Row.scala:613)
        at org.apache.spark.sql.Row.jsonValue$(Row.scala:552)
        at 
org.apache.spark.sql.catalyst.expressions.GenericRow.jsonValue(rows.scala:166)
        at org.apache.spark.sql.Row.json(Row.scala:535)
        at org.apache.spark.sql.Row.json$(Row.scala:535)
        at 
org.apache.spark.sql.catalyst.expressions.GenericRow.json(rows.scala:166)
{noformat}


> JSON conversion of ArrayType is not properly supported in Spark 3.2/2.13
> ------------------------------------------------------------------------
>
>                 Key: SPARK-40678
>                 URL: https://issues.apache.org/jira/browse/SPARK-40678
>             Project: Spark
>          Issue Type: Bug
>          Components: Input/Output
>    Affects Versions: 3.2.0
>            Reporter: Cédric Chantepie
>            Priority: Major
>
> In Spark 3.2 (Scala 2.13), values with {{ArrayType}} are no longer properly 
> support with JSON; e.g.
> {noformat}
> import org.apache.spark.sql.SparkSession
> case class KeyValue(key: String, value: Array[Byte])
> val spark = 
> SparkSession.builder().master("local[1]").appName("test").getOrCreate()
> import spark.implicits._
> val df = Seq(Array(KeyValue("foo", "bar".getBytes))).toDF()
> df.foreach(r => println(r.json))
> {noformat}
> Expected:
> {noformat}
> [{foo, bar}]
> {noformat}
> Encountered:
> {noformat}
> java.lang.IllegalArgumentException: Failed to convert value 
> ArraySeq([foo,[B@dcdb68f]) (class of class 
> scala.collection.mutable.ArraySeq$ofRef}) with the type of 
> ArrayType(Seq(StructField(key,StringType,false), 
> StructField(value,BinaryType,false)),true) to JSON.
>       at org.apache.spark.sql.Row.toJson$1(Row.scala:604)
>       at org.apache.spark.sql.Row.jsonValue(Row.scala:613)
>       at org.apache.spark.sql.Row.jsonValue$(Row.scala:552)
>       at 
> org.apache.spark.sql.catalyst.expressions.GenericRow.jsonValue(rows.scala:166)
>       at org.apache.spark.sql.Row.json(Row.scala:535)
>       at org.apache.spark.sql.Row.json$(Row.scala:535)
>       at 
> org.apache.spark.sql.catalyst.expressions.GenericRow.json(rows.scala:166)
> {noformat}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to