CommanderWahid opened a new issue, #2472: URL: https://github.com/apache/sedona/issues/2472
Hello, I’m encountering difficulties setting up Sedona 1.8 on Databricks (DBR 17.3 LTS). I used the following jars for spark 4.0 <img width="764" height="195" alt="Image" src="https://github.com/user-attachments/assets/89f8febd-2a06-4c39-932c-0d8f56d237e8" /> The error message: <img width="263" height="271" alt="Image" src="https://github.com/user-attachments/assets/e9d7aba1-c879-4a40-832d-0e9d5f71281c" /> And the log details: ERROR DriverDaemon$: XXX Fatal uncaught exception. Terminating driver. java.lang.NoClassDefFoundError: org/apache/spark/sql/catalyst/expressions/FoldableUnevaluable at java.base/java.lang.Class.getDeclaredConstructors0(Native Method) at java.base/java.lang.Class.privateGetDeclaredConstructors(Class.java:3373) at java.base/java.lang.Class.getConstructor0(Class.java:3578) at java.base/java.lang.Class.getConstructor(Class.java:2271) at org.apache.sedona.sql.UDF.AbstractCatalog.function(AbstractCatalog.scala:40) at org.apache.sedona.sql.UDF.Catalog$.<clinit>(Catalog.scala:351) at org.apache.sedona.spark.SedonaContext$.create(SedonaContext.scala:116) at org.apache.sedona.spark.SedonaContext$.create(SedonaContext.scala:61) at org.apache.sedona.sql.SedonaSqlExtensions.$anonfun$apply$1(SedonaSqlExtensions.scala:35) at org.apache.spark.sql.SparkSessionExtensions.$anonfun$buildCheckRules$1(SparkSessionExtensions.scala:287) at scala.collection.StrictOptimizedIterableOps.map(StrictOptimizedIterableOps.scala:100) at scala.collection.StrictOptimizedIterableOps.map$(StrictOptimizedIterableOps.scala:87) at scala.collection.mutable.ArrayBuffer.map(ArrayBuffer.scala:42) at org.apache.spark.sql.SparkSessionExtensions.buildCheckRules(SparkSessionExtensions.scala:287) at com.databricks.sql.HiveDatabricksEdge.singlePassCustomResolutionChecks(HiveDatabricksEdge.scala:62) at com.databricks.sql.HiveDatabricksEdge.singlePassCustomResolutionChecks$(HiveDatabricksEdge.scala:39) at org.apache.spark.sql.hive.HiveSessionStateBuilder.singlePassCustomResolutionChecks(HiveSessionStateBuilder.scala:58) at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anon$1.<init>(HiveSessionStateBuilder.scala:149) at org.apache.spark.sql.hive.HiveSessionStateBuilder.analyzer(HiveSessionStateBuilder.scala:102) at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$9(BaseSessionStateBuilder.scala:587) at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:124) at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:124) at org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyAnalyzed$3(QueryExecution.scala:355) at com.databricks.spark.util.FrameProfiler$.$anonfun$record$1(FrameProfiler.scala:114) at com.databricks.spark.util.FrameProfilerExporter$.maybeExportFrameProfiler(FrameProfilerExporter.scala:200) at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:105) at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:675) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$8(QueryExecution.scala:862) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withExecutionPhase$1(SQLExecution.scala:160) at com.databricks.util.TracingSpanUtils$.withTracing(TracingSpanUtils.scala:239) at com.databricks.tracing.TracingUtils$.withTracing(TracingUtils.scala:296) at com.databricks.spark.util.DatabricksTracingHelper.withSpan(DatabricksSparkTracingHelper.scala:112) at com.databricks.spark.util.DBRTracing$.withSpan(DBRTracing.scala:47) at org.apache.spark.sql.execution.SQLExecution$.withExecutionPhase(SQLExecution.scala:141) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$7(QueryExecution.scala:862) at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:1513) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$5(QueryExecution.scala:855) at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$4(QueryExecution.scala:852) at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$3(QueryExecution.scala:852) at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:851) at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63) at org.apache.spark.sql.execution.QueryExecution.withQueryExecutionId(QueryExecution.scala:839) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:850) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:860) at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:849) at org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyAnalyzed$2(QueryExecution.scala:337) at com.databricks.sql.util.MemoryTrackerHelper.withMemoryTracking(MemoryTrackerHelper.scala:111) at org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyAnalyzed$1(QueryExecution.scala:336) at scala.util.Try$.apply(Try.scala:217) at org.apache.spark.util.Utils$.doTryWithCallerStacktrace(Utils.scala:1684) at org.apache.spark.util.LazyTry.tryT$lzycompute(LazyTry.scala:60) at org.apache.spark.util.LazyTry.tryT(LazyTry.scala:59) at org.apache.spark.util.LazyTry.get(LazyTry.scala:75) at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:397) at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:310) at org.apache.spark.sql.classic.Dataset$.$anonfun$ofRows$3(Dataset.scala:153) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:860) at org.apache.spark.sql.classic.SparkSession.$anonfun$withActiveAndFrameProfiler$1(SparkSession.scala:1116) at com.databricks.spark.util.FrameProfiler$.$anonfun$record$1(FrameProfiler.scala:114) at com.databricks.spark.util.FrameProfilerExporter$.maybeExportFrameProfiler(FrameProfilerExporter.scala:200) at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:105) at org.apache.spark.sql.classic.SparkSession.withActiveAndFrameProfiler(SparkSession.scala:1116) at org.apache.spark.sql.classic.Dataset$.ofRows(Dataset.scala:145) at org.apache.spark.sql.classic.SparkSession.$anonfun$sql$5(SparkSession.scala:856) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:860) at org.apache.spark.sql.classic.SparkSession.sql(SparkSession.scala:819) at org.apache.spark.sql.classic.SparkSession.sql(SparkSession.scala:862) at org.apache.spark.sql.classic.SparkSession.sql(SparkSession.scala:912) at org.apache.spark.sql.classic.SparkSession.sql(SparkSession.scala:110) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:602) at org.apache.spark.sql.classic.SQLContext.sql(SQLContext.scala:303) at com.databricks.backend.daemon.driver.DatabricksILoop$.$anonfun$executeDependedOperations$1(DatabricksILoop.scala:654) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:86) at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:83) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:110) at com.databricks.unity.UCSUniverseHelper$.withNewScope(UCSUniverseHelper.scala:8) at com.databricks.backend.daemon.driver.DatabricksILoop$.executeDependedOperations(DatabricksILoop.scala:639) at com.databricks.backend.daemon.driver.DatabricksILoop$.initializeSharedDriverContext(DatabricksILoop.scala:514) at com.databricks.backend.daemon.driver.DatabricksILoop$.getOrCreateSharedDriverContext(DatabricksILoop.scala:310) at com.databricks.backend.daemon.driver.DriverCorral.sharedEnvContext(DriverCorral.scala:401) at com.databricks.backend.daemon.driver.DriverCorral.<init>(DriverCorral.scala:255) at com.databricks.backend.daemon.driver.DriverDaemon.<init>(DriverDaemon.scala:119) at com.databricks.backend.daemon.driver.DriverDaemon$.create(DriverDaemon.scala:773) at com.databricks.backend.daemon.driver.DriverDaemon$.initialize(DriverDaemon.scala:1093) at com.databricks.backend.daemon.driver.DriverDaemon$.wrappedMain(DriverDaemon.scala:1033) at com.databricks.DatabricksMain.$anonfun$main$5(DatabricksMain.scala:251) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at com.databricks.DatabricksMain.$anonfun$withStartupProfilingData$2(DatabricksMain.scala:743) at com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:510) at com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:616) at com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:643) at com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:80) at com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:348) at scala.util.DynamicVariable.withValue(DynamicVariable.scala:59) at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:344) at com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:78) at com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:75) at com.databricks.DatabricksMain.withAttributionContext(DatabricksMain.scala:116) at com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:127) at com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:108) at com.databricks.DatabricksMain.withAttributionTags(DatabricksMain.scala:116) at com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:611) at com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:519) at com.databricks.DatabricksMain.recordOperationWithResultTags(DatabricksMain.scala:116) at com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:511) at com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:475) at com.databricks.DatabricksMain.recordOperation(DatabricksMain.scala:116) at com.databricks.DatabricksMain.withStartupProfilingData(DatabricksMain.scala:742) at com.databricks.DatabricksMain.$anonfun$main$4(DatabricksMain.scala:250) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at com.databricks.context.integrity.IntegrityCheckContext$ThreadLocalStorage$.withValue(IntegrityCheckContext.scala:73) at com.databricks.DatabricksMain.$anonfun$main$1(DatabricksMain.scala:250) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) at com.databricks.context.integrity.IntegrityCheckContext$ThreadLocalStorage$.withValue(IntegrityCheckContext.scala:73) at com.databricks.DatabricksMain.main(DatabricksMain.scala:222) at com.databricks.backend.daemon.driver.DriverDaemon.main(DriverDaemon.scala) Caused by: java.lang.ClassNotFoundException: org.apache.spark.sql.catalyst.expressions.FoldableUnevaluable at java.base/jdk.internal.loader.BuiltinClassLoader.loadClass(BuiltinClassLoader.java:641) at java.base/jdk.internal.loader.ClassLoaders$AppClassLoader.loadClass(ClassLoaders.java:188) at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:525) ... 120 more -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
