Github user zzl0 commented on a diff in the pull request:
https://github.com/apache/spark/pull/1791#discussion_r15856478
--- Diff: core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
---
@@ -730,7 +730,25 @@ private[spark] object PythonRDD extends Logging {
}
/**
- * Convert and RDD of Java objects to and RDD of serialized Python
objects, that is usable by
+ * Convert a RDD of serialized Python objects to RDD of Double, that is
usable by
+ * PySpark.
+ */
+ def pythonToJavaDouble(pyRDD: JavaRDD[Array[Byte]], batched: Boolean):
JavaDoubleRDD = {
+ new JavaDoubleRDD(pyRDD.rdd.mapPartitions { iter =>
+ val unpickle = new Unpickler
+ iter.flatMap { row =>
+ val obj = unpickle.loads(row)
+ if (batched) {
+ obj.asInstanceOf[JArrayList[_]].map(_.asInstanceOf[Double])
+ } else {
+ Seq(obj.asInstanceOf[Double])
+ }
+ }
+ })
+ }
+
+ /**
+ * Convert a RDD of Java objects to and RDD of serialized Python
objects, that is usable by
--- End diff --
Convert a RDD of Java objects to and RDD of serialized Python objects
=>
Convert an RDD of Java objects to an RDD of serialized Python objects ?
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]