AngersZhuuuu edited a comment on pull request #34308:
URL: https://github.com/apache/spark/pull/34308#issuecomment-962774932
> So what does the new error message look like? Can you provide a
stacktrace?
Full stack
```
va.lang.UnsupportedOperationException: Decoding to Int is not supported
when. reading column `_1`by PlainIntegerDictionary while reading file
/Users/yi.zhu/Documents/project/Angerszhuuuu/spark/target/tmp/spark-fb0360ae-32ce-4504-93c5-214167e7258a/part-00001-c8610250-45a7-4b80-ae56-6e7e0ee295bd-c000.snappy.parquet
at
org.apache.spark.sql.errors.QueryExecutionErrors$.unsupportedParquetDictionaryDecodingError(QueryExecutionErrors.scala:603)
at
org.apache.spark.sql.errors.QueryExecutionErrors.unsupportedParquetDictionaryDecodingError(QueryExecutionErrors.scala)
at
org.apache.spark.sql.execution.datasources.parquet.ParquetDictionary.decodeToInt(ParquetDictionary.java:48)
at
org.apache.spark.sql.execution.datasources.parquet.ParquetIOSuite.$anonfun$new$234(ParquetIOSuite.scala:1106)
at
scala.runtime.java8.JFunction0$mcI$sp.apply(JFunction0$mcI$sp.java:23)
at org.scalatest.Assertions.intercept(Assertions.scala:749)
at org.scalatest.Assertions.intercept$(Assertions.scala:746)
at org.scalatest.funsuite.AnyFunSuite.intercept(AnyFunSuite.scala:1563)
at
org.apache.spark.sql.execution.datasources.parquet.ParquetIOSuite.$anonfun$new$233(ParquetIOSuite.scala:1105)
at
org.apache.spark.sql.execution.datasources.parquet.ParquetIOSuite.$anonfun$new$233$adapted(ParquetIOSuite.scala:1093)
at
org.apache.spark.sql.execution.datasources.FileBasedDataSourceTest.$anonfun$withDataSourceFile$1(FileBasedDataSourceTest.scala:70)
at
org.apache.spark.sql.execution.datasources.FileBasedDataSourceTest.$anonfun$withDataSourceFile$1$adapted(FileBasedDataSourceTest.scala:68)
at
org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath(SQLHelper.scala:69)
at
org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath$(SQLHelper.scala:66)
at org.apache.spark.sql.QueryTest.withTempPath(QueryTest.scala:34)
at
org.apache.spark.sql.execution.datasources.FileBasedDataSourceTest.withDataSourceFile(FileBasedDataSourceTest.scala:68)
at
org.apache.spark.sql.execution.datasources.FileBasedDataSourceTest.withDataSourceFile$(FileBasedDataSourceTest.scala:65)
at
org.apache.spark.sql.execution.datasources.parquet.ParquetIOSuite.withDataSourceFile(ParquetIOSuite.scala:59)
at
org.apache.spark.sql.execution.datasources.parquet.ParquetTest.withParquetFile(ParquetTest.scala:65)
at
org.apache.spark.sql.execution.datasources.parquet.ParquetTest.withParquetFile$(ParquetTest.scala:63)
at
org.apache.spark.sql.execution.datasources.parquet.ParquetIOSuite.withParquetFile(ParquetIOSuite.scala:59)
at
org.apache.spark.sql.execution.datasources.parquet.ParquetIOSuite.$anonfun$new$229(ParquetIOSuite.scala:1093)
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:190)
at
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
at
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
at
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:62)
at
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
at
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:62)
at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
at
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
at scala.collection.immutable.List.foreach(List.scala:431)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
at
org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
at
org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563)
at org.scalatest.Suite.run(Suite.scala:1112)
at org.scalatest.Suite.run$(Suite.scala:1094)
at
org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)
at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
at
org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
at
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:62)
at
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:62)
at
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:318)
at
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:513)
at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.UnsupportedOperationException:
org.apache.parquet.column.values.dictionary.PlainValuesDictionary$PlainIntegerDictionary
at org.apache.parquet.column.Dictionary.decodeToLong(Dictionary.java:49)
at
org.apache.spark.sql.execution.datasources.parquet.ParquetDictionary.decodeToInt(ParquetDictionary.java:43)
... 64 more
```
> Also, I think we should include the column name in the error message if
possible, otherwise it might be difficult to debug when you have 300 dictionary
encoded columns.
Done
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]