dengziming commented on code in PR #42939:
URL: https://github.com/apache/spark/pull/42939#discussion_r1329790053


##########
common/utils/src/main/resources/error/error-classes.json:
##########
@@ -332,6 +332,11 @@
     ],
     "sqlState" : "22003"
   },
+  "CLASS_UNSUPPORTED_BY_MAP_OBJECTS" : {
+    "message" : [
+      "class `<cls>` is not supported by `MapObjects` as resulting collection."

Review Comment:
   Nice improvement, done.



##########
sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala:
##########
@@ -2561,6 +2563,40 @@ class DatasetSuite extends QueryTest
 
     checkDataset(ds.filter(f(col("_1"))), Tuple1(ValueClass(2)))
   }
+
+  test("CLASS_UNSUPPORTED_BY_MAP_OBJECTS when creating dataset") {
+    withSQLConf(
+      // Set CODEGEN_FACTORY_MODE to default value to reproduce 
CLASS_UNSUPPORTED_BY_MAP_OBJECTS
+      SQLConf.CODEGEN_FACTORY_MODE.key -> 
CodegenObjectFactoryMode.NO_CODEGEN.toString) {
+      // Create our own encoder to cover the default encoder from 
spark.implicits._
+      implicit val im: ExpressionEncoder[Array[Int]] = ExpressionEncoder(
+        AgnosticEncoders.IterableEncoder(
+          ClassTag(classOf[Array[Int]]), BoxedIntEncoder, false, false))
+
+      val df = spark.createDataset(Seq(Array(1)))
+      val exception = intercept[org.apache.spark.SparkRuntimeException] {
+        df.collect()
+      }
+      val expressions = im.resolveAndBind(df.queryExecution.logical.output,
+        spark.sessionState.analyzer)
+        .createDeserializer().expressions
+
+      // Expression decoding error
+      checkError(
+        exception = exception,
+        errorClass = "_LEGACY_ERROR_TEMP_2151",

Review Comment:
   Yes, I created SPARK-45213 for this.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to