[
https://issues.apache.org/jira/browse/SPARK-43592?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Yuming Wang updated SPARK-43592:
--------------------------------
Target Version/s: (was: 3.4.0)
> NoSuchMethodError in Spark 3.4 with JDK8u362 & JDK8u372
> -------------------------------------------------------
>
> Key: SPARK-43592
> URL: https://issues.apache.org/jira/browse/SPARK-43592
> Project: Spark
> Issue Type: Bug
> Components: Spark Core
> Affects Versions: 3.4.0
> Environment: JDK: JDK8u362, JDK8u372
> Kubernetes
> Spark 3.4
> Reporter: Shivam Kasat
> Priority: Critical
> Labels: JDK1.8, java, jdk11
>
> My project was on spark 3.3 with JDK8u362 and I tried updating it to spark
> 3.4, Official documentation of spark 3.4 says it works with JDK8u362 and
> above but when I tried upgrading docker base image of spark to JDK8u362 and
> JDK8u372 it is failing at runtime with below error, For JDK8u362 it throws
> error for Java.nio.CharBuffer.position method and for JDK8u372 it throws
> error for java.nio.ByteBuffer.flip method. But when I run with JDK11 image in
> spark Docker file it works fine. Am I missing anything or how to fix this
> issue as I want to run it with JDK8.
> {code:java}
> ava.lang.NoSuchMethodError:
> java.nio.CharBuffer.position(I)Ljava/nio/CharBuffer;
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.unescapeSQLString(ParserUtils.scala:220)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.string(ParserUtils.scala:95)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$createString$2(AstBuilder.scala:2632)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.Iterator.foreach(Iterator.scala:943)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.Iterator.foreach$(Iterator.scala:943)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.IterableLike.foreach(IterableLike.scala:74)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.TraversableLike.map(TraversableLike.scala:286)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.AbstractTraversable.map(Traversable.scala:108)
> ~[scala-library-2.12.17.jar:?]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.createString(AstBuilder.scala:2632)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitStringLiteral$1(AstBuilder.scala:2618)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitStringLiteral(AstBuilder.scala:2618)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitStringLiteral(AstBuilder.scala:58)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$StringLiteralContext.accept(SqlBaseParser.java:19511)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitConstantDefault(SqlBaseParserBaseVisitor.java:1735)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$ConstantDefaultContext.accept(SqlBaseParser.java:18373)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitValueExpressionDefault(SqlBaseParserBaseVisitor.java:1567)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$ValueExpressionDefaultContext.accept(SqlBaseParser.java:17491)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.expression(AstBuilder.scala:1630)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$withPredicate$1(AstBuilder.scala:1870)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.withPredicate(AstBuilder.scala:1784)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitPredicated$1(AstBuilder.scala:1768)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitPredicated(AstBuilder.scala:1765)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitPredicated(AstBuilder.scala:58)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$PredicatedContext.accept(SqlBaseParser.java:16909)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitExpression(SqlBaseParserBaseVisitor.java:1518)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$ExpressionContext.accept(SqlBaseParser.java:16766)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.expression(AstBuilder.scala:1630)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitParenthesizedExpression$1(AstBuilder.scala:2361)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitParenthesizedExpression(AstBuilder.scala:2361)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitParenthesizedExpression(AstBuilder.scala:58)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$ParenthesizedExpressionContext.accept(SqlBaseParser.java:18036)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitValueExpressionDefault(SqlBaseParserBaseVisitor.java:1567)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$ValueExpressionDefaultContext.accept(SqlBaseParser.java:17491)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.expression(AstBuilder.scala:1630)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitPredicated$1(AstBuilder.scala:1766)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitPredicated(AstBuilder.scala:1765)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitPredicated(AstBuilder.scala:58)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$PredicatedContext.accept(SqlBaseParser.java:16909)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.expression(AstBuilder.scala:1630)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitLogicalBinary$4(AstBuilder.scala:1694)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at scala.collection.SeqLike.$anonfun$reverseMap$2(SeqLike.scala:295)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.SeqLike.reverseMap(SeqLike.scala:294)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.SeqLike.reverseMap$(SeqLike.scala:289)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.AbstractSeq.reverseMap(Seq.scala:45)
> ~[scala-library-2.12.17.jar:?]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitLogicalBinary$1(AstBuilder.scala:1694)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitLogicalBinary(AstBuilder.scala:1669)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitLogicalBinary(AstBuilder.scala:58)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$LogicalBinaryContext.accept(SqlBaseParser.java:16958)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.expression(AstBuilder.scala:1630)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.withWhereClause(AstBuilder.scala:703)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitCommonSelectQueryClausePlan$2(AstBuilder.scala:807)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$EnhancedLogicalPlan$.optionalMap$extension(ParserUtils.scala:256)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitCommonSelectQueryClausePlan(AstBuilder.scala:807)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$withSelectQuerySpecification$1(AstBuilder.scala:788)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.withSelectQuerySpecification(AstBuilder.scala:776)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitRegularQuerySpecification$1(AstBuilder.scala:668)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitRegularQuerySpecification(AstBuilder.scala:656)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitRegularQuerySpecification(AstBuilder.scala:58)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$RegularQuerySpecificationContext.accept(SqlBaseParser.java:10386)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitQueryPrimaryDefault(SqlBaseParserBaseVisitor.java:902)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$QueryPrimaryDefaultContext.accept(SqlBaseParser.java:9891)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitQueryTermDefault(SqlBaseParserBaseVisitor.java:888)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$QueryTermDefaultContext.accept(SqlBaseParser.java:9658)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.typedVisit(AstBuilder.scala:63)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.plan(AstBuilder.scala:114)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitQuery$1(AstBuilder.scala:120)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitQuery(AstBuilder.scala:119)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitQuery(AstBuilder.scala:58)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$QueryContext.accept(SqlBaseParser.java:6891)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitChildren(AstBuilder.scala:73)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParserBaseVisitor.visitStatementDefault(SqlBaseParserBaseVisitor.java:69)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.SqlBaseParser$StatementDefaultContext.accept(SqlBaseParser.java:1988)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.antlr.v4.runtime.tree.AbstractParseTreeVisitor.visit(AbstractParseTreeVisitor.java:18)
> ~[antlr4-runtime-4.9.3.jar:4.9.3]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.$anonfun$visitSingleStatement$1(AstBuilder.scala:80)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AstBuilder.visitSingleStatement(AstBuilder.scala:80)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AbstractSqlParser.$anonfun$parsePlan$2(ParseDriver.scala:92)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.ParserUtils$.withOrigin(ParserUtils.scala:160)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AbstractSqlParser.$anonfun$parsePlan$1(ParseDriver.scala:92)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parse(ParseDriver.scala:127)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.execution.SparkSqlParser.parse(SparkSqlParser.scala:52)
> ~[spark-sql_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parsePlan(ParseDriver.scala:89)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.SparkSession.$anonfun$sql$2(SparkSession.scala:633)
> ~[spark-sql_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
> ~[spark-catalyst_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:632)
> ~[spark-sql_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:827)
> ~[spark-sql_2.12-3.4.0.jar:3.4.0]
> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:630)
> ~[spark-sql_2.12-3.4.0.jar:3.4.0]
> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:671)
> ~[spark-sql_2.12-3.4.0.jar:3.4.0]
> at
> com.cisco.intersight.cep.AdvisoryProcessorTask.processActions(AdvisoryProcessorTask.java:376)
> ~[orion.jar:?]
> at
> com.cisco.intersight.cep.AdvisoryProcessorTask.processAdvisory(AdvisoryProcessorTask.java:329)
> ~[orion.jar:?]
> at
> com.cisco.intersight.cep.AdvisoryProcessorTask.processAdvisories(AdvisoryProcessorTask.java:306)
> ~[orion.jar:?]
> at
> com.cisco.intersight.cep.AdvisoryProcessorTask.run(AdvisoryProcessorTask.java:286)
> ~[orion.jar:?]
> at
> com.cisco.intersight.cep.AdvisoryDriver.main(AdvisoryDriver.java:115)
> ~[orion.jar:?]
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> ~[?:1.8.0_352]
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> ~[?:1.8.0_352]
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> ~[?:1.8.0_352]
> at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_352]
> at
> org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1020)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:192)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:215)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1111)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1120)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]% {code}
> Error with JDK8u362
> {code:java}
> org.apache.spark.SparkException: Job aborted due to stage failure: Task
> serialization failed: java.lang.NoSuchMethodError:
> java.nio.ByteBuffer.flip()Ljava/nio/ByteBuffer;
> java.lang.NoSuchMethodError: java.nio.ByteBuffer.flip()Ljava/nio/ByteBuffer;
> at
> org.apache.spark.util.io.ChunkedByteBufferOutputStream.toChunkedByteBuffer(ChunkedByteBufferOutputStream.scala:115)
> at
> org.apache.spark.broadcast.TorrentBroadcast$.blockifyObject(TorrentBroadcast.scala:362)
> at
> org.apache.spark.broadcast.TorrentBroadcast.writeBlocks(TorrentBroadcast.scala:160)
> at
> org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:99)
> at
> org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:38)
> at
> org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:78)
> at
> org.apache.spark.SparkContext.broadcastInternal(SparkContext.scala:1548)
> at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1530)
> at
> org.apache.spark.scheduler.DAGScheduler.submitMissingTasks(DAGScheduler.scala:1535)
> at
> org.apache.spark.scheduler.DAGScheduler.submitStage(DAGScheduler.scala:1353)
> at
> org.apache.spark.scheduler.DAGScheduler.handleMapStageSubmitted(DAGScheduler.scala:1334)
> at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2934)
> at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2923)
> at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2912)
> at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
> at
> org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2785)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2721)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2720)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
> ~[scala-library-2.12.17.jar:?]
> at
> scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
> ~[scala-library-2.12.17.jar:?]
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
> ~[scala-library-2.12.17.jar:?]
> at
> org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2720)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.scheduler.DAGScheduler.submitMissingTasks(DAGScheduler.scala:1545)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.scheduler.DAGScheduler.submitStage(DAGScheduler.scala:1353)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.scheduler.DAGScheduler.handleMapStageSubmitted(DAGScheduler.scala:1334)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2934)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2923)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2912)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> Caused by: java.lang.NoSuchMethodError:
> java.nio.ByteBuffer.flip()Ljava/nio/ByteBuffer;
> at
> org.apache.spark.util.io.ChunkedByteBufferOutputStream.toChunkedByteBuffer(ChunkedByteBufferOutputStream.scala:115)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.broadcast.TorrentBroadcast$.blockifyObject(TorrentBroadcast.scala:362)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.broadcast.TorrentBroadcast.writeBlocks(TorrentBroadcast.scala:160)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:99)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:38)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:78)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.SparkContext.broadcastInternal(SparkContext.scala:1548)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1530)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> at
> org.apache.spark.scheduler.DAGScheduler.submitMissingTasks(DAGScheduler.scala:1535)
> ~[spark-core_2.12-3.4.0.jar:3.4.0]
> ... 6 more
> {code}
--
This message was sent by Atlassian Jira
(v8.20.10#820010)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]