chinaboyll opened a new issue #2344:
URL: https://github.com/apache/iceberg/issues/2344


   spark version :3.0
   iceberg version:0.11.0
   -------------------------------------------------------
   Start Spark-sql:
   spark3-sql --master yarn --packages 
org.apache.iceberg:iceberg-spark3-runtime:0.11.0 \
       --conf 
spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions
 \
       --conf spark.sql.catalog.zdm=org.apache.iceberg.spark.SparkCatalog \
       --conf spark.sql.catalog.zdm.type=hadoop \
       --conf spark.sql.catalog.zdm.warehouse=hdfs://HDFS81339/tmp/iceberg
   -------------------------------------------------------
   When i run :
   MERGE INTO zdm.test.iceberg_spark s USING (select * from 
zdm.test.iceberg_spark1) s1 ON s.id = s1.id  WHEN NOT MATCHED THEN INSERT *;  
   The error is as follows:
   java.lang.NoSuchMethodError: 
org.apache.spark.sql.catalyst.plans.logical.MergeAction.condition()Lscala/Option;
           at 
org.apache.spark.sql.catalyst.optimizer.RewriteMergeInto.org$apache$spark$sql$catalyst$optimizer$RewriteMergeInto$$getClauseCondition(RewriteMergeInto.scala:181)
           at 
org.apache.spark.sql.catalyst.optimizer.RewriteMergeInto$$anonfun$apply$1.$anonfun$applyOrElse$1(RewriteMergeInto.scala:93)
           at 
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
           at 
scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
           at 
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
           at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
           at scala.collection.TraversableLike.map(TraversableLike.scala:238)
           at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
           at scala.collection.AbstractTraversable.map(Traversable.scala:108)
           at 
org.apache.spark.sql.catalyst.optimizer.RewriteMergeInto$$anonfun$apply$1.applyOrElse(RewriteMergeInto.scala:93)
           at 
org.apache.spark.sql.catalyst.optimizer.RewriteMergeInto$$anonfun$apply$1.applyOrElse(RewriteMergeInto.scala:78)
           at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsDown$2(AnalysisHelper.scala:108)
           at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:72)
           at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsDown$1(AnalysisHelper.scala:108)
           at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194)
           at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsDown(AnalysisHelper.scala:106)
           at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsDown$(AnalysisHelper.scala:104)
           at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsDown(LogicalPlan.scala:29)
           at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperators(AnalysisHelper.scala:73)
           at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperators$(AnalysisHelper.scala:72)
           at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperators(LogicalPlan.scala:29)
           at 
org.apache.spark.sql.catalyst.optimizer.RewriteMergeInto.apply(RewriteMergeInto.scala:78)
           at 
org.apache.spark.sql.catalyst.optimizer.RewriteMergeInto.apply(RewriteMergeInto.scala:67)
           at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:149)
           at 
scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
           at 
scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
           at scala.collection.immutable.List.foldLeft(List.scala:89)
           at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:146)
           at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:138)
           at scala.collection.immutable.List.foreach(List.scala:392)
           at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:138)
           at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:116)
           at 
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)
           at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:116)
           at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:82)
           at 
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
           at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:133)
           at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
           at 
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:133)
           at 
org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:82)
           at 
org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:79)
           at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$writePlans$4(QueryExecution.scala:197)
           at 
org.apache.spark.sql.catalyst.plans.QueryPlan$.append(QueryPlan.scala:381)
           at 
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$writePlans(QueryExecution.scala:197)
           at 
org.apache.spark.sql.execution.QueryExecution.toString(QueryExecution.scala:207)
           at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:95)
           at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
           at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
           at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
           at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
           at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
           at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229)
           at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
           at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
           at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
           at 
org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
           at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
           at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
           at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:650)
           at 
org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:63)
           at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:377)
           at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:496)
           at scala.collection.Iterator.foreach(Iterator.scala:941)
           at scala.collection.Iterator.foreach$(Iterator.scala:941)
           at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
           at scala.collection.IterableLike.foreach(IterableLike.scala:74)
           at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
           at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
           at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:490)
           at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:282)
           at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
           at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
           at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
           at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
           at java.lang.reflect.Method.invoke(Method.java:498)
           at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
           at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928)
           at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
           at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
           at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
           at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007)
           at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016)
           at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
   ------------------------------------------------------------------
   I don`t know what's wrong~ please help me。


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to