[ 
https://issues.apache.org/jira/browse/HIVE-29029?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Stamatis Zampetakis updated HIVE-29029:
---------------------------------------
    Summary: ClassCastException when compiling query with STACK UDTF in 
multiple UNION ALL branches  (was: When stack UDTF is used with UNION ALL 
throwing ClassCastException in CBO mode.)

> ClassCastException when compiling query with STACK UDTF in multiple UNION ALL 
> branches
> --------------------------------------------------------------------------------------
>
>                 Key: HIVE-29029
>                 URL: https://issues.apache.org/jira/browse/HIVE-29029
>             Project: Hive
>          Issue Type: Bug
>          Components: HiveServer2
>            Reporter: Dayakar M
>            Assignee: Dayakar M
>            Priority: Major
>              Labels: pull-request-available
>
> When *stack* UDTF is used with UNION ALL, it is throwing ClassCastException 
> in CBO mode. 
> {noformat}
> java.lang.ClassCastException: class org.apache.calcite.rex.RexLiteral cannot 
> be cast to class org.apache.calcite.rex.RexCall 
> (org.apache.calcite.rex.RexLiteral and org.apache.calcite.rex.RexCall are in 
> unnamed module of loader 'app')
>     at 
> org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveUnionSimpleSelectsToInlineTableRule.onMatch(HiveUnionSimpleSelectsToInlineTableRule.java:144)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.calcite.plan.AbstractRelOptPlanner.fireRule(AbstractRelOptPlanner.java:337)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.calcite.plan.hep.HepPlanner.applyRule(HepPlanner.java:556) 
> ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.calcite.plan.hep.HepPlanner.applyRules(HepPlanner.java:420) 
> ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.calcite.plan.hep.HepPlanner.executeRuleInstance(HepPlanner.java:243)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.calcite.plan.hep.HepInstruction$RuleInstance$State.execute(HepInstruction.java:178)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.calcite.plan.hep.HepPlanner.lambda$executeProgram$0(HepPlanner.java:211)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hive.com.google.common.collect.ImmutableList.forEach(ImmutableList.java:397)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.calcite.plan.hep.HepPlanner.executeProgram(HepPlanner.java:210) 
> ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.calcite.plan.hep.HepProgram$State.execute(HepProgram.java:118) 
> ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.calcite.plan.hep.HepPlanner.executeProgram(HepPlanner.java:205) 
> ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.calcite.plan.hep.HepPlanner.findBestExp(HepPlanner.java:191) 
> ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.executeProgram(CalcitePlanner.java:2584)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.executeProgram(CalcitePlanner.java:2544)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.executeProgram(CalcitePlanner.java:2538)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.applyPostJoinOrderingTransform(CalcitePlanner.java:2457)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.apply(CalcitePlanner.java:1748)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.apply(CalcitePlanner.java:1579)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.calcite.tools.Frameworks.lambda$withPlanner$0(Frameworks.java:140) 
> ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.calcite.prepare.CalcitePrepareImpl.perform(CalcitePrepareImpl.java:936)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.calcite.tools.Frameworks.withPrepare(Frameworks.java:191) 
> ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.calcite.tools.Frameworks.withPlanner(Frameworks.java:135) 
> ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner.logicalPlan(CalcitePlanner.java:1335)
>  ~[hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner.genOPTree(CalcitePlanner.java:587)
>  [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:13170)
>  [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:480)
>  [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:336)
>  [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.hadoop.hive.ql.Compiler.analyze(Compiler.java:224) 
> [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.hadoop.hive.ql.Compiler.compile(Compiler.java:109) 
> [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:498) 
> [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:450) 
> [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:414) 
> [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:408) 
> [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.compileAndRespond(ReExecDriver.java:126)
>  [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:234) 
> [hive-exec-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:257) 
> [hive-cli-4.1.0-SNAPSHOT.jar:?]
>     at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:201) 
> [hive-cli-4.1.0-SNAPSHOT.jar:?]
>     at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:127) 
> [hive-cli-4.1.0-SNAPSHOT.jar:?]
>     at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:425) 
> [hive-cli-4.1.0-SNAPSHOT.jar:?]
>     at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:356) 
> [hive-cli-4.1.0-SNAPSHOT.jar:?]
>     at 
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:746) 
> [hive-it-util-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:716) 
> [hive-it-util-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:116)
>  [hive-it-util-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:157) 
> [hive-it-util-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at 
> org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
>  [test-classes/:?]
>     at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native 
> Method) ~[?:?]
>     at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
>  ~[?:?]
>     at 
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  ~[?:?]
>     at java.base/java.lang.reflect.Method.invoke(Method.java:569) ~[?:?]
>     at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
>  [junit-4.13.2.jar:4.13.2]
>     at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>  [junit-4.13.2.jar:4.13.2]
>     at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
>  [junit-4.13.2.jar:4.13.2]
>     at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
>  [junit-4.13.2.jar:4.13.2]
>     at 
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:135)
>  [hive-it-util-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) 
> [junit-4.13.2.jar:4.13.2]
>     at 
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
>  [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) 
> [junit-4.13.2.jar:4.13.2]
>     at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
>  [junit-4.13.2.jar:4.13.2]
>     at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
>  [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner.run(ParentRunner.java:413) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.Suite.runChild(Suite.java:128) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.Suite.runChild(Suite.java:27) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) 
> [junit-4.13.2.jar:4.13.2]
>     at 
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:95)
>  [hive-it-util-4.1.0-SNAPSHOT.jar:4.1.0-SNAPSHOT]
>     at org.junit.rules.RunRules.evaluate(RunRules.java:20) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) 
> [junit-4.13.2.jar:4.13.2]
>     at org.junit.runners.ParentRunner.run(ParentRunner.java:413) 
> [junit-4.13.2.jar:4.13.2]
>     at 
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:316)
>  [surefire-junit4-3.5.1.jar:3.5.1]
>     at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:240)
>  [surefire-junit4-3.5.1.jar:3.5.1]
>     at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:214)
>  [surefire-junit4-3.5.1.jar:3.5.1]
>     at 
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:155)
>  [surefire-junit4-3.5.1.jar:3.5.1]
>     at 
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:385)
>  [surefire-booter-3.5.1.jar:3.5.1]
>     at 
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:162) 
> [surefire-booter-3.5.1.jar:3.5.1]
>     at 
> org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:507) 
> [surefire-booter-3.5.1.jar:3.5.1]
>     at 
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:495) 
> [surefire-booter-3.5.1.jar:3.5.1]{noformat}
> Reproduction steps:
> _*stack_with_unionall.q*_ file
> {code:java}
> select stack(3,'A',10,date '2015-01-01','z','B',20,date 
> '2016-01-01','y','C',30,date '2017-08-09','x') as (col0,col1,col2,col3);
> union all
> select stack(3,'A',10,date '2015-01-01','n','B',20,date 
> '2016-01-01','m','C',30,date '2017-08-09','l') as (col0,col1,col2,col3);{code}
> Command to execute:
> {code:java}
> mvn test -Dtest.output.overwrite -Dtest=TestMiniLlapLocalCliDriver 
> -Dqfile=stack_with_unionall.q -pl itests/qtest -Pitests {code}
>  
>  



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to