Repository: spark
Updated Branches:
  refs/heads/master ab8177da2 -> ed92b47e8


[SPARK-4397] Move object RDD to the front of RDD.scala.

I ran into multiple cases that SBT/Scala compiler was confused by the implicits 
in continuous compilation mode. Adding explicit return types fixes the problem.

Author: Reynold Xin <r...@databricks.com>

Closes #3580 from rxin/rdd-implicit and squashes the following commits:

ee32fcd [Reynold Xin] Move object RDD to the end of the file.
b8562c9 [Reynold Xin] Merge branch 'master' of github.com:apache/spark into 
rdd-implicit
d4e9f85 [Reynold Xin] Code review.
a836a37 [Reynold Xin] Move object RDD to the front of RDD.scala.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ed92b47e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ed92b47e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ed92b47e

Branch: refs/heads/master
Commit: ed92b47e83c2882f0e76da78dc268577df820382
Parents: ab8177d
Author: Reynold Xin <r...@databricks.com>
Authored: Thu Dec 4 16:32:20 2014 -0800
Committer: Reynold Xin <r...@databricks.com>
Committed: Thu Dec 4 16:32:20 2014 -0800

----------------------------------------------------------------------
 .../scala/org/apache/spark/SparkContext.scala   |  8 +++---
 .../main/scala/org/apache/spark/rdd/RDD.scala   | 29 +++++++++++++++-----
 2 files changed, 26 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/ed92b47e/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 532f292..aded7c1 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1758,7 +1758,7 @@ object SparkContext extends Logging {
 
   @deprecated("Replaced by implicit functions in WritableConverter. This is 
kept here only for " +
     "backward compatibility.", "1.3.0")
-  def writableWritableConverter[T <: Writable]() =
+  def writableWritableConverter[T <: Writable](): WritableConverter[T] =
     WritableConverter.writableWritableConverter()
 
   /**
@@ -2017,15 +2017,15 @@ object WritableConverter {
     simpleWritableConverter[Boolean, BooleanWritable](_.get)
 
   implicit def bytesWritableConverter(): WritableConverter[Array[Byte]] = {
-    simpleWritableConverter[Array[Byte], BytesWritable](bw =>
+    simpleWritableConverter[Array[Byte], BytesWritable] { bw =>
       // getBytes method returns array which is longer then data to be returned
       Arrays.copyOfRange(bw.getBytes, 0, bw.getLength)
-    )
+    }
   }
 
   implicit def stringWritableConverter(): WritableConverter[String] =
     simpleWritableConverter[String, Text](_.toString)
 
-  implicit def writableWritableConverter[T <: Writable]() =
+  implicit def writableWritableConverter[T <: Writable](): 
WritableConverter[T] =
     new WritableConverter[T](_.runtimeClass.asInstanceOf[Class[T]], 
_.asInstanceOf[T])
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/ed92b47e/core/src/main/scala/org/apache/spark/rdd/RDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala 
b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index 0bd616e..214f22b 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -1398,6 +1398,13 @@ abstract class RDD[T: ClassTag](
   }
 }
 
+
+/**
+ * Defines implicit functions that provide extra functionalities on RDDs of 
specific types.
+ *
+ * For example, [[RDD.rddToPairRDDFunctions]] converts an RDD into a 
[[PairRDDFunctions]] for
+ * key-value-pair RDDs, and enabling extra functionalities such as 
[[PairRDDFunctions.reduceByKey]].
+ */
 object RDD {
 
   // The following implicit functions were in SparkContext before 1.2 and 
users had to
@@ -1406,22 +1413,30 @@ object RDD {
   // compatibility and forward to the following functions directly.
 
   implicit def rddToPairRDDFunctions[K, V](rdd: RDD[(K, V)])
-      (implicit kt: ClassTag[K], vt: ClassTag[V], ord: Ordering[K] = null) = {
+    (implicit kt: ClassTag[K], vt: ClassTag[V], ord: Ordering[K] = null): 
PairRDDFunctions[K, V] = {
     new PairRDDFunctions(rdd)
   }
 
-  implicit def rddToAsyncRDDActions[T: ClassTag](rdd: RDD[T]) = new 
AsyncRDDActions(rdd)
+  implicit def rddToAsyncRDDActions[T: ClassTag](rdd: RDD[T]): 
AsyncRDDActions[T] = {
+    new AsyncRDDActions(rdd)
+  }
 
   implicit def rddToSequenceFileRDDFunctions[K <% Writable: ClassTag, V <% 
Writable: ClassTag](
-      rdd: RDD[(K, V)]) =
+      rdd: RDD[(K, V)]): SequenceFileRDDFunctions[K, V] = {
     new SequenceFileRDDFunctions(rdd)
+  }
 
-  implicit def rddToOrderedRDDFunctions[K : Ordering : ClassTag, V: ClassTag](
-      rdd: RDD[(K, V)]) =
+  implicit def rddToOrderedRDDFunctions[K : Ordering : ClassTag, V: 
ClassTag](rdd: RDD[(K, V)])
+    : OrderedRDDFunctions[K, V, (K, V)] = {
     new OrderedRDDFunctions[K, V, (K, V)](rdd)
+  }
 
-  implicit def doubleRDDToDoubleRDDFunctions(rdd: RDD[Double]) = new 
DoubleRDDFunctions(rdd)
+  implicit def doubleRDDToDoubleRDDFunctions(rdd: RDD[Double]): 
DoubleRDDFunctions = {
+    new DoubleRDDFunctions(rdd)
+  }
 
-  implicit def numericRDDToDoubleRDDFunctions[T](rdd: RDD[T])(implicit num: 
Numeric[T]) =
+  implicit def numericRDDToDoubleRDDFunctions[T](rdd: RDD[T])(implicit num: 
Numeric[T])
+    : DoubleRDDFunctions = {
     new DoubleRDDFunctions(rdd.map(x => num.toDouble(x)))
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to