YuanGuanhu created SPARK-35641:
----------------------------------

             Summary: TPCDS Q32 running failed
                 Key: SPARK-35641
                 URL: https://issues.apache.org/jira/browse/SPARK-35641
             Project: Spark
          Issue Type: Bug
          Components: SQL
    Affects Versions: 3.1.2, 3.1.1, 3.1.0, 3.0.2, 3.0.1, 3.0.0
         Environment: spark3.1.1 
            Reporter: YuanGuanhu


TPCDS Q32 sql running failed.
{code:java}
// code placeholder
{code}
SELECT 1 AS `excess discount amount` from catalog_sales, item, date_dim where 
i_manufact_id = 977 AND i_item_sk = cs_item_sk AND d_date BETWEEN '2000-01-27' 
AND (cast('2000-01-27' AS DATE) + interval 90 days)  AND d_date_sk = 
cs_sold_date_sk AND cs_ext_discount_amt > (SELECT 1.3 * 
avg(cs_ext_discount_amt) FROM catalog_sales, date_dim WHERE cs_item_sk = 
i_item_sk  AND d_date BETWEEN '2000-01-27]' AND (cast('2000-01-27' AS DATE) + 
interval 90 days)  AND d_date_sk = cs_sold_date_sk) LIMIT 10;SELECT 1 AS 
`excess discount amount` from catalog_sales, item, date_dim where i_manufact_id 
= 977 AND i_item_sk = cs_item_sk AND d_date BETWEEN '2000-01-27' AND 
(cast('2000-01-27' AS DATE) + interval 90 days)  AND d_date_sk = 
cs_sold_date_sk AND cs_ext_discount_amt > (SELECT 1.3 * 
avg(cs_ext_discount_amt) FROM catalog_sales, date_dim WHERE cs_item_sk = 
i_item_sk  AND d_date BETWEEN '2000-01-27]' AND (cast('2000-01-27' AS DATE) + 
interval 90 days)  AND d_date_sk = cs_sold_date_sk) LIMIT 10;21/06/03 19:36:51 
ERROR SparkSQLDriver: Failed in [SELECT 1 AS `excess discount amount` from 
catalog_sales, item, date_dim where i_manufact_id = 977 AND i_item_sk = 
cs_item_sk AND d_date BETWEEN '2000-01-27' AND (cast('2000-01-27' AS DATE) + 
interval 90 days)  AND d_date_sk = cs_sold_date_sk AND cs_ext_discount_amt > 
(SELECT 1.3 * avg(cs_ext_discount_amt) FROM catalog_sales, date_dim WHERE 
cs_item_sk = i_item_sk  AND d_date BETWEEN '2000-01-27]' AND (cast('2000-01-27' 
AS DATE) + interval 90 days)  AND d_date_sk = cs_sold_date_sk) LIMIT 
10]java.lang.RuntimeException: Unexpected operator in scalar subquery: 
LocalRelation <empty>, [(CAST(1.3 AS DOUBLE) * avg(cs_ext_discount_amt))#491, 
cs_item_sk#443] at scala.sys.package$.error(package.scala:30) at 
org.apache.spark.sql.catalyst.optimizer.RewriteCorrelatedScalarSubquery$.evalPlan$1(subquery.scala:437)
 at 
org.apache.spark.sql.catalyst.optimizer.RewriteCorrelatedScalarSubquery$.evalSubqueryOnZeroTups(subquery.scala:440)
 at 
org.apache.spark.sql.catalyst.optimizer.RewriteCorrelatedScalarSubquery$.$anonfun$constructLeftJoins$1(subquery.scala:497)
 at scala.collection.IndexedSeqOptimized.foldLeft(IndexedSeqOptimized.scala:60) 
at scala.collection.IndexedSeqOptimized.foldLeft$(IndexedSeqOptimized.scala:68) 
at scala.collection.mutable.ArrayBuffer.foldLeft(ArrayBuffer.scala:49) at 
org.apache.spark.sql.catalyst.optimizer.RewriteCorrelatedScalarSubquery$.org$apache$spark$sql$catalyst$optimizer$RewriteCorrelatedScalarSubquery$$constructLeftJoins(subquery.scala:493)
 at 
org.apache.spark.sql.catalyst.optimizer.RewriteCorrelatedScalarSubquery$$anonfun$apply$3.applyOrElse(subquery.scala:597)
 at 
org.apache.spark.sql.catalyst.optimizer.RewriteCorrelatedScalarSubquery$$anonfun$apply$3.applyOrElse(subquery.scala:570)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:309)
 at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:72)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:309) 
at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:314)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:399)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:237)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:397) at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:350) at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:314) 
at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:314)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:399)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:237)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:397) at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:350) at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:314) 
at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:314)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:399)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:237)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:397) at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:350) at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:314) 
at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:314)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:399)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:237)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:397) at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:350) at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:314) 
at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:314)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:399)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:237)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:397) at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:350) at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:314) 
at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:298) 
at 
org.apache.spark.sql.catalyst.optimizer.RewriteCorrelatedScalarSubquery$.apply(subquery.scala:570)
 at 
org.apache.spark.sql.catalyst.optimizer.RewriteCorrelatedScalarSubquery$.apply(subquery.scala:317)
 at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:149)
 at scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126) 
at scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122) 
at scala.collection.immutable.List.foldLeft(List.scala:89) at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:146)
 at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:138)
 at scala.collection.immutable.List.foreach(List.scala:392) at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:138)
 at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:116)
 at 
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)
 at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:116)
 at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:82)
 at 
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
 at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:133)
 at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763) at 
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:133)
 at 
org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:82)
 at 
org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:79)
 at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$writePlans$4(QueryExecution.scala:197)
 at org.apache.spark.sql.catalyst.plans.QueryPlan$.append(QueryPlan.scala:381) 
at 
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$writePlans(QueryExecution.scala:197)
 at 
org.apache.spark.sql.execution.QueryExecution.toString(QueryExecution.scala:207)
 at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:95)
 at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
 at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
 at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763) at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
 at 
org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:65)
 at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:377)
 at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:496)
 at scala.collection.Iterator.foreach(Iterator.scala:941) at 
scala.collection.Iterator.foreach$(Iterator.scala:941) at 
scala.collection.AbstractIterator.foreach(Iterator.scala:1429) at 
scala.collection.IterableLike.foreach(IterableLike.scala:74) at 
scala.collection.IterableLike.foreach$(IterableLike.scala:73) at 
scala.collection.AbstractIterable.foreach(Iterable.scala:56) at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:490)
 at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:282)
 at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:498) at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928)
 at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) at 
org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) at 
org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007) at 
org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016) at 
org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to