[ 
https://issues.apache.org/jira/browse/SPARK-38714?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17564619#comment-17564619
 ] 

Pablo Langa Blanco commented on SPARK-38714:
--------------------------------------------

I have tested it in master and branch 3.3 and it's solved. 

> Interval multiplication error
> -----------------------------
>
>                 Key: SPARK-38714
>                 URL: https://issues.apache.org/jira/browse/SPARK-38714
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 3.3.0
>         Environment: branch-3.3,  Java 8
>  
>            Reporter: chong
>            Priority: Major
>
> Code gen have error when multipling interval by a decimal.
>  
> $SPARK_HOME/bin/spark-shell
>  
> import org.apache.spark.sql.Row
> import java.time.Duration
> import java.time.Period
> import org.apache.spark.sql.types._
> val data = Seq(Row(new java.math.BigDecimal("123456789.11")))
> val schema = StructType(Seq(
> StructField("c1", DecimalType(9, 2)),
> ))
> val df = spark.createDataFrame(spark.sparkContext.parallelize(data), schema)
> df.selectExpr("interval '100' second * c1").show(false)
> errors are:
> *{color:#FF0000}java.lang.AssertionError: assertion failed:{color}*
> Decimal$DecimalIsFractional
> while compiling: <console>
> during phase: globalPhase=terminal, enteringPhase=jvm
> library version: version 2.12.15
> compiler version: version 2.12.15
> reconstructed args: -classpath -Yrepl-class-based -Yrepl-outdir 
> /tmp/spark-83a0cda4-dd0b-472e-ad8b-a4b33b85f613/repl-06489815-5366-4aa0-9419-f01abda8d041
> last tree to typer: TypeTree(class Byte)
> tree position: line 6 of <console>
> tree tpe: Byte
> symbol: (final abstract) class Byte in package scala
> symbol definition: final abstract class Byte extends (a ClassSymbol)
> symbol package: scala
> symbol owners: class Byte
> call site: constructor $eval in object $eval in package $line21
> == Source file context for tree position ==
> 3
> 4 object $eval {
> 5 lazy val $result = 
> $line21.$read.INSTANCE.$iw.$iw.$iw.$iw.$iw.$iw.$iw.$iw.$iw.$iw.res0
> 6 lazy val $print: {_}root{_}.java.lang.String = {
> 7 $line21.$read.INSTANCE.$iw.$iw.$iw.$iw.$iw.$iw.$iw.$iw.$iw.$iw
> 8
> 9 ""
> at 
> scala.reflect.internal.SymbolTable.throwAssertionError(SymbolTable.scala:185)
> at scala.reflect.internal.Symbols$Symbol.completeInfo(Symbols.scala:1525)
> at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1514)
> at scala.reflect.internal.Symbols$Symbol.flatOwnerInfo(Symbols.scala:2353)
> at 
> scala.reflect.internal.Symbols$ClassSymbol.companionModule0(Symbols.scala:3346)
> at 
> scala.reflect.internal.Symbols$ClassSymbol.companionModule(Symbols.scala:3348)
> at 
> scala.reflect.internal.Symbols$ModuleClassSymbol.sourceModule(Symbols.scala:3487)
> at 
> scala.reflect.internal.Symbols.$anonfun$forEachRelevantSymbols$1$adapted(Symbols.scala:3802)
> at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
> at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
> at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
> at scala.reflect.internal.Symbols.markFlagsCompleted(Symbols.scala:3799)
> at scala.reflect.internal.Symbols.markFlagsCompleted$(Symbols.scala:3805)
> at scala.reflect.internal.SymbolTable.markFlagsCompleted(SymbolTable.scala:28)
> at 
> scala.reflect.internal.pickling.UnPickler$Scan.finishSym$1(UnPickler.scala:324)
> at 
> scala.reflect.internal.pickling.UnPickler$Scan.readSymbol(UnPickler.scala:342)
> at 
> scala.reflect.internal.pickling.UnPickler$Scan.readSymbolRef(UnPickler.scala:645)
> at 
> scala.reflect.internal.pickling.UnPickler$Scan.readType(UnPickler.scala:413)
> at 
> scala.reflect.internal.pickling.UnPickler$Scan.$anonfun$readSymbol$10(UnPickler.scala:357)
> at scala.reflect.internal.pickling.UnPickler$Scan.at(UnPickler.scala:188)
> at 
> scala.reflect.internal.pickling.UnPickler$Scan.readSymbol(UnPickler.scala:357)
> at 
> scala.reflect.internal.pickling.UnPickler$Scan.$anonfun$run$1(UnPickler.scala:96)
> at scala.reflect.internal.pickling.UnPickler$Scan.run(UnPickler.scala:88)
> at scala.reflect.internal.pickling.UnPickler.unpickle(UnPickler.scala:47)
> at 
> scala.tools.nsc.symtab.classfile.ClassfileParser.unpickleOrParseInnerClasses(ClassfileParser.scala:1186)
> at 
> scala.tools.nsc.symtab.classfile.ClassfileParser.parseClass(ClassfileParser.scala:468)
> at 
> scala.tools.nsc.symtab.classfile.ClassfileParser.$anonfun$parse$2(ClassfileParser.scala:161)
> at 
> scala.tools.nsc.symtab.classfile.ClassfileParser.$anonfun$parse$1(ClassfileParser.scala:147)
> at 
> scala.tools.nsc.symtab.classfile.ClassfileParser.parse(ClassfileParser.scala:130)
> at 
> scala.tools.nsc.symtab.SymbolLoaders$ClassfileLoader.doComplete(SymbolLoaders.scala:343)
> at 
> scala.tools.nsc.symtab.SymbolLoaders$SymbolLoader.complete(SymbolLoaders.scala:250)
> at 
> scala.tools.nsc.symtab.SymbolLoaders$SymbolLoader.load(SymbolLoaders.scala:269)
> at scala.reflect.internal.Symbols$Symbol.exists(Symbols.scala:1104)
> at scala.reflect.internal.Symbols$Symbol.toOption(Symbols.scala:2609)
> at scala.tools.nsc.interpreter.IMain.translateSimpleResource(IMain.scala:340)
> at 
> scala.tools.nsc.interpreter.IMain$TranslatingClassLoader.findAbstractFile(IMain.scala:354)
> at 
> scala.reflect.internal.util.AbstractFileClassLoader.findResource(AbstractFileClassLoader.scala:76)
> at java.lang.ClassLoader.getResource(ClassLoader.java:1089)
> at java.lang.ClassLoader.getResourceAsStream(ClassLoader.java:1300)
> at 
> scala.reflect.internal.util.RichClassLoader$.classAsStream$extension(ScalaClassLoader.scala:89)
> at 
> scala.reflect.internal.util.RichClassLoader$.classBytes$extension(ScalaClassLoader.scala:81)
> at 
> scala.reflect.internal.util.ScalaClassLoader.classBytes(ScalaClassLoader.scala:131)
> at 
> scala.reflect.internal.util.ScalaClassLoader.classBytes$(ScalaClassLoader.scala:131)
> at 
> scala.reflect.internal.util.AbstractFileClassLoader.classBytes(AbstractFileClassLoader.scala:41)
> at 
> scala.reflect.internal.util.AbstractFileClassLoader.findClass(AbstractFileClassLoader.scala:70)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:405)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:405)
> at 
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.java:40)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
> at 
> org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:109)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:405)
> at 
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.java:40)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
> at java.lang.Class.forName0(Native Method)
> at java.lang.Class.forName(Class.java:348)
> at 
> org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:89)
> at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:317)
> at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:8618)
> at org.codehaus.janino.UnitCompiler.reclassifyName(UnitCompiler.java:8838)
> at org.codehaus.janino.UnitCompiler.reclassifyName(UnitCompiler.java:8529)
> at org.codehaus.janino.UnitCompiler.reclassify(UnitCompiler.java:8388)
> at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:6900)
> at org.codehaus.janino.UnitCompiler.access$14600(UnitCompiler.java:226)
> at 
> org.codehaus.janino.UnitCompiler$22$2$1.visitAmbiguousName(UnitCompiler.java:6518)
> at 
> org.codehaus.janino.UnitCompiler$22$2$1.visitAmbiguousName(UnitCompiler.java:6515)
> at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:4429)
> at org.codehaus.janino.UnitCompiler$22$2.visitLvalue(UnitCompiler.java:6515)
> at org.codehaus.janino.UnitCompiler$22$2.visitLvalue(UnitCompiler.java:6511)
> at org.codehaus.janino.Java$Lvalue.accept(Java.java:4353)
> at org.codehaus.janino.UnitCompiler$22.visitRvalue(UnitCompiler.java:6511)
> at org.codehaus.janino.UnitCompiler$22.visitRvalue(UnitCompiler.java:6490)
> at org.codehaus.janino.Java$Rvalue.accept(Java.java:4321)
> at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:6490)
> at org.codehaus.janino.UnitCompiler.findIMethod(UnitCompiler.java:9110)
> at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:7164)
> at org.codehaus.janino.UnitCompiler.access$16200(UnitCompiler.java:226)
> at 
> org.codehaus.janino.UnitCompiler$22$2.visitMethodInvocation(UnitCompiler.java:6538)
> at 
> org.codehaus.janino.UnitCompiler$22$2.visitMethodInvocation(UnitCompiler.java:6511)
> at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:5286)
> at org.codehaus.janino.UnitCompiler$22.visitRvalue(UnitCompiler.java:6511)
> at org.codehaus.janino.UnitCompiler$22.visitRvalue(UnitCompiler.java:6490)
> at org.codehaus.janino.Java$Rvalue.accept(Java.java:4321)
> at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:6490)
> at 
> org.codehaus.janino.UnitCompiler.findMostSpecificIInvocable(UnitCompiler.java:9306)
> at org.codehaus.janino.UnitCompiler.findIMethod(UnitCompiler.java:9192)
> at org.codehaus.janino.UnitCompiler.findIMethod(UnitCompiler.java:9110)
> at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:5055)
> at org.codehaus.janino.UnitCompiler.access$9100(UnitCompiler.java:226)
> at 
> org.codehaus.janino.UnitCompiler$16.visitMethodInvocation(UnitCompiler.java:4482)
> at 
> org.codehaus.janino.UnitCompiler$16.visitMethodInvocation(UnitCompiler.java:4455)
> at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:5286)
> at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:4455)
> at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:5683)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:3839)
> at org.codehaus.janino.UnitCompiler.access$6100(UnitCompiler.java:226)
> at org.codehaus.janino.UnitCompiler$13.visitAssignment(UnitCompiler.java:3799)
> at org.codehaus.janino.UnitCompiler$13.visitAssignment(UnitCompiler.java:3779)
> at org.codehaus.janino.Java$Assignment.accept(Java.java:4690)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3779)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2366)
> at org.codehaus.janino.UnitCompiler.access$1800(UnitCompiler.java:226)
> at 
> org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1497)
> at 
> org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1490)
> at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:3064)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1490)
> at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1573)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1559)
> at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:226)
> at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1496)
> at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1490)
> at org.codehaus.janino.Java$Block.accept(Java.java:2969)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1490)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2478)
> at org.codehaus.janino.UnitCompiler.access$1900(UnitCompiler.java:226)
> at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1498)
> at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1490)
> at org.codehaus.janino.Java$IfStatement.accept(Java.java:3140)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1490)
> at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1573)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3420)
> at 
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1362)
> at 
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1335)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:807)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:975)
> at org.codehaus.janino.UnitCompiler.access$700(UnitCompiler.java:226)
> at 
> org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:392)
> at 
> org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:384)
> at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1445)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:384)
> at 
> org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1312)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:833)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:410)
> at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:226)
> at 
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:389)
> at 
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:384)
> at 
> org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1594)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:384)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:362)
> at org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:226)
> at 
> org.codehaus.janino.UnitCompiler$1.visitCompilationUnit(UnitCompiler.java:336)
> at 
> org.codehaus.janino.UnitCompiler$1.visitCompilationUnit(UnitCompiler.java:333)
> at org.codehaus.janino.Java$CompilationUnit.accept(Java.java:363)
> at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:333)
> at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:235)
> at 
> org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:464)
> at 
> org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:314)
> at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:237)
> at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:205)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80)
> at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1490)
> at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1587)
> at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1584)
> at 
> org.sparkproject.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> at 
> org.sparkproject.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> at 
> org.sparkproject.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> at org.sparkproject.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> at org.sparkproject.guava.cache.LocalCache.get(LocalCache.java:4000)
> at org.sparkproject.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> at 
> org.sparkproject.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1437)
> at 
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:378)
> at 
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:331)
> at 
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:34)
> at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:1363)
> at 
> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$Serializer.apply(ExpressionEncoder.scala:204)
> at 
> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$Serializer.apply(ExpressionEncoder.scala:193)
> at scala.collection.Iterator$$anon$10.next(Iterator.scala:461)
> at 
> org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown
>  Source)
> at 
> org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
> at 
> org.apache.spark.sql.execution.WholeStageCodegenExec$$anon$1.hasNext(WholeStageCodegenExec.scala:760)
> at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$getByteArrayRdd$1(SparkPlan.scala:364)
> at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:890)
> at 
> org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:890)
> at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
> at org.apache.spark.scheduler.Task.run(Task.scala:136)
> at 
> org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:507)
> at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1504)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:510)
> at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:748)
> error: error while loading Decimal, class file 
> '/home/chong/progs/sparks/spark-home/jars/spark-catalyst_2.12-3.3.0-SNAPSHOT.jar(org/apache/spark/sql/types/Decimal.class)'
>  is broken
> (class java.lang.RuntimeException/error reading Scala signature of 
> Decimal.class: assertion failed:
> Decimal$DecimalIsFractional
> while compiling: <console>
> during phase: globalPhase=terminal, enteringPhase=jvm
> library version: version 2.12.15
> compiler version: version 2.12.15
> reconstructed args: -classpath -Yrepl-class-based -Yrepl-outdir 
> /tmp/spark-83a0cda4-dd0b-472e-ad8b-a4b33b85f613/repl-06489815-5366-4aa0-9419-f01abda8d041
> last tree to typer: TypeTree(class Byte)
> tree position: line 6 of <console>
> tree tpe: Byte
> symbol: (final abstract) class Byte in package scala
> symbol definition: final abstract class Byte extends (a ClassSymbol)
> symbol package: scala
> symbol owners: class Byte
> call site: constructor $eval in object $eval in package $line21
> == Source file context for tree position ==
>      3
>      4 object $eval {
>      5   lazy val $result = res0
>      6   lazy val $print: _root_.java.lang.String =  {
>      7     $iw
>      8
>      9 "" )
> +----------------------------+
> |(INTERVAL '100' SECOND * c1)|
> +----------------------------+
> |null                        |
> +----------------------------+
>  
>  



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to