Github user mridulm commented on a diff in the pull request:
https://github.com/apache/spark/pull/22010#discussion_r220672579
--- Diff: core/src/main/scala/org/apache/spark/rdd/RDD.scala ---
@@ -396,7 +397,20 @@ abstract class RDD[T: ClassTag](
* Return a new RDD containing the distinct elements in this RDD.
*/
def distinct(numPartitions: Int)(implicit ord: Ordering[T] = null):
RDD[T] = withScope {
- map(x => (x, null)).reduceByKey((x, y) => x, numPartitions).map(_._1)
+ def removeDuplicatesInPartition(partition: Iterator[T]): Iterator[T] =
{
+ // Create an instance of external append only map which ignores
values.
+ val map = new ExternalAppendOnlyMap[T, Null, Null](
+ createCombiner = value => null,
+ mergeValue = (a, b) => a,
+ mergeCombiners = (a, b) => a)
--- End diff --
nit: clean them ?
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]