rafficghani commented on issue #4608:
URL: https://github.com/apache/pulsar/issues/4608#issuecomment-833766030
> @yjshen following your user example, I get this error
>
> ```
> >>> df = spark.createDataFrame([(1, 1.0), (1, 2.0), (2, 3.0), (2, 5.0),
(2, 10.0)],("id", "v"))
> >>> df.write.format("pulsar").option("service.url",
"pulsar://localhost:6650").option("admin.url",
"http://localhost:8088").option("topic", "abc").save()
> Traceback (most recent call last):
> File "<stdin>", line 1, in <module>
> File
"/Users/kevin/anaconda3/lib/python3.8/site-packages/pyspark/sql/readwriter.py",
line 828, in save
> self._jwrite.save()
> File
"/Users/kevin/anaconda3/lib/python3.8/site-packages/pyspark/python/lib/py4j-0.10.9-src.zip/py4j/java_gateway.py",
line 1304, in __call__
> File
"/Users/kevin/anaconda3/lib/python3.8/site-packages/pyspark/sql/utils.py", line
128, in deco
> return f(*a, **kw)
> File
"/Users/kevin/anaconda3/lib/python3.8/site-packages/pyspark/python/lib/py4j-0.10.9-src.zip/py4j/protocol.py",
line 326, in get_return_value
> py4j.protocol.Py4JJavaError: An error occurred while calling o57.save.
> : java.lang.NoClassDefFoundError:
org/apache/spark/sql/sources/v2/StreamWriteSupport
> at java.lang.ClassLoader.defineClass1(Native Method)
> at java.lang.ClassLoader.defineClass(ClassLoader.java:756)
> at
java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
> at java.net.URLClassLoader.defineClass(URLClassLoader.java:468)
> at java.net.URLClassLoader.access$100(URLClassLoader.java:74)
> at java.net.URLClassLoader$1.run(URLClassLoader.java:369)
> at java.net.URLClassLoader$1.run(URLClassLoader.java:363)
> at java.security.AccessController.doPrivileged(Native Method)
> at java.net.URLClassLoader.findClass(URLClassLoader.java:362)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
> at java.lang.Class.forName0(Native Method)
> at java.lang.Class.forName(Class.java:348)
> at
java.util.ServiceLoader$LazyIterator.nextService(ServiceLoader.java:370)
> at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:404)
> at java.util.ServiceLoader$1.next(ServiceLoader.java:480)
> at
scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:44)
> at scala.collection.Iterator.foreach(Iterator.scala:941)
> at scala.collection.Iterator.foreach$(Iterator.scala:941)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
> at scala.collection.IterableLike.foreach(IterableLike.scala:74)
> at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
> at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
> at
scala.collection.TraversableLike.filterImpl(TraversableLike.scala:255)
> at
scala.collection.TraversableLike.filterImpl$(TraversableLike.scala:249)
> at
scala.collection.AbstractTraversable.filterImpl(Traversable.scala:108)
> at scala.collection.TraversableLike.filter(TraversableLike.scala:347)
> at scala.collection.TraversableLike.filter$(TraversableLike.scala:347)
> at scala.collection.AbstractTraversable.filter(Traversable.scala:108)
> at
org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:659)
> at
org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSourceV2(DataSource.scala:743)
> at
org.apache.spark.sql.DataFrameWriter.lookupV2Provider(DataFrameWriter.scala:966)
> at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:303)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
> at py4j.Gateway.invoke(Gateway.java:282)
> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
> at py4j.commands.CallCommand.execute(CallCommand.java:79)
> at py4j.GatewayConnection.run(GatewayConnection.java:238)
> at java.lang.Thread.run(Thread.java:748)
> Caused by: java.lang.ClassNotFoundException:
org.apache.spark.sql.sources.v2.StreamWriteSupport
> at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
> ... 44 more
> ```
I received the same error, Probably the Scala and Spark version is not
supported - Check what version you are using Not sure when the upgrade will be
available - [Available
Connectors](https://dl.bintray.com/streamnative/maven/io/streamnative/connectors/pulsar-spark-connector_2.11/)
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]