CommanderWahid commented on issue #2472:
URL: https://github.com/apache/sedona/issues/2472#issuecomment-3491811149

   @Kontinuation, Thank you 
   @burrowsej, I gave it a try, but unfortunately it didn’t resolve the issue, 
although the cluster managed to start.
   
   <img width="1100" height="814" alt="Image" 
src="https://github.com/user-attachments/assets/a62be6ee-1fe9-4d76-a7b4-0105a87e9db3";
 />
   
   
   25/11/05 14:36:03 WARN SparkSession: Cannot use 
org.apache.sedona.sql.SedonaSqlExtensions to configure session extensions.
   java.lang.ClassNotFoundException: org.apache.sedona.sql.SedonaSqlExtensions
        at 
java.base/jdk.internal.loader.BuiltinClassLoader.loadClass(BuiltinClassLoader.java:641)
        at 
java.base/jdk.internal.loader.ClassLoaders$AppClassLoader.loadClass(ClassLoaders.java:188)
        at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:525)
        at java.base/java.lang.Class.forName0(Native Method)
        at java.base/java.lang.Class.forName(Class.java:467)
        at 
org.apache.spark.util.SparkClassUtils.classForName(SparkClassUtils.scala:45)
        at 
org.apache.spark.util.SparkClassUtils.classForName$(SparkClassUtils.scala:40)
        at org.apache.spark.util.Utils$.classForName(Utils.scala:110)
        at 
org.apache.spark.sql.classic.SparkSession$.$anonfun$applyExtensions$2(SparkSession.scala:1467)
        at 
org.apache.spark.sql.classic.SparkSession$.$anonfun$applyExtensions$2$adapted(SparkSession.scala:1465)
        at scala.collection.IterableOnceOps.foreach(IterableOnce.scala:619)
        at scala.collection.IterableOnceOps.foreach$(IterableOnce.scala:617)
        at scala.collection.AbstractIterable.foreach(Iterable.scala:935)
        at 
org.apache.spark.sql.classic.SparkSession$.org$apache$spark$sql$classic$SparkSession$$applyExtensions(SparkSession.scala:1465)
        at 
org.apache.spark.sql.classic.SparkSession$Builder.build(SparkSession.scala:1253)
        at 
org.apache.spark.sql.classic.SparkSession$Builder.getOrCreate(SparkSession.scala:1269)
        at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:38)
        at 
com.databricks.backend.daemon.driver.DatabricksILoop$.initializeSharedDriverContext(DatabricksILoop.scala:503)
        at 
com.databricks.backend.daemon.driver.DatabricksILoop$.getOrCreateSharedDriverContext(DatabricksILoop.scala:310)
        at 
com.databricks.backend.daemon.driver.DriverCorral.sharedEnvContext(DriverCorral.scala:401)
        at 
com.databricks.backend.daemon.driver.DriverCorral.<init>(DriverCorral.scala:255)
        at 
com.databricks.backend.daemon.driver.DriverDaemon.<init>(DriverDaemon.scala:119)
        at 
com.databricks.backend.daemon.driver.DriverDaemon$.create(DriverDaemon.scala:773)
        at 
com.databricks.backend.daemon.driver.DriverDaemon$.initialize(DriverDaemon.scala:1093)
        at 
com.databricks.backend.daemon.driver.DriverDaemon$.wrappedMain(DriverDaemon.scala:1033)
        at 
com.databricks.DatabricksMain.$anonfun$main$5(DatabricksMain.scala:251)
        at 
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
        at 
com.databricks.DatabricksMain.$anonfun$withStartupProfilingData$2(DatabricksMain.scala:743)
        at 
com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:510)
        at 
com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:616)
        at 
com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:643)
        at 
com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:80)
        at 
com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:348)
        at scala.util.DynamicVariable.withValue(DynamicVariable.scala:59)
        at 
com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:344)
        at 
com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:78)
        at 
com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:75)
        at 
com.databricks.DatabricksMain.withAttributionContext(DatabricksMain.scala:116)
        at 
com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:127)
        at 
com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:108)
        at 
com.databricks.DatabricksMain.withAttributionTags(DatabricksMain.scala:116)
        at 
com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:611)
        at 
com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:519)
        at 
com.databricks.DatabricksMain.recordOperationWithResultTags(DatabricksMain.scala:116)
        at 
com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:511)
        at 
com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:475)
        at 
com.databricks.DatabricksMain.recordOperation(DatabricksMain.scala:116)
        at 
com.databricks.DatabricksMain.withStartupProfilingData(DatabricksMain.scala:742)
        at 
com.databricks.DatabricksMain.$anonfun$main$4(DatabricksMain.scala:250)
        at 
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
        at 
com.databricks.context.integrity.IntegrityCheckContext$ThreadLocalStorage$.withValue(IntegrityCheckContext.scala:73)
        at 
com.databricks.DatabricksMain.$anonfun$main$1(DatabricksMain.scala:250)
        at 
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
        at 
com.databricks.context.integrity.IntegrityCheckContext$ThreadLocalStorage$.withValue(IntegrityCheckContext.scala:73)
        at com.databricks.DatabricksMain.main(DatabricksMain.scala:222)
        at 
com.databricks.backend.daemon.driver.DriverDaemon.main(DriverDaemon.scala)
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to