Updated Branches:
  refs/heads/branch-0.8 c8e0c0dc1 -> 1d9412b6d

Merge pull request #144 from liancheng/runjob-clean

Removed unused return value in SparkContext.runJob

Return type of this `runJob` version is `Unit`:

    def runJob[T, U: ClassManifest](
        rdd: RDD[T],
        func: (TaskContext, Iterator[T]) => U,
        partitions: Seq[Int],
        allowLocal: Boolean,
        resultHandler: (Int, U) => Unit) {
        ...
    }

It's obviously unnecessary to "return" `result`.

(cherry picked from commit aadeda5e7697a433c82879033e758fbc403680dc)
Signed-off-by: Reynold Xin <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/1d9412b6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/1d9412b6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/1d9412b6

Branch: refs/heads/branch-0.8
Commit: 1d9412b6dd4575808ed095c5619fe44a1459e125
Parents: c8e0c0d
Author: Reynold Xin <[email protected]>
Authored: Wed Nov 6 13:27:47 2013 -0800
Committer: Reynold Xin <[email protected]>
Committed: Wed Nov 6 13:28:02 2013 -0800

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkContext.scala | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/1d9412b6/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 10d3c53..1e70628 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -799,11 +799,10 @@ class SparkContext(
     val cleanedFunc = clean(func)
     logInfo("Starting job: " + callSite)
     val start = System.nanoTime
-    val result = dagScheduler.runJob(rdd, cleanedFunc, partitions, callSite, 
allowLocal,
+    dagScheduler.runJob(rdd, cleanedFunc, partitions, callSite, allowLocal,
       resultHandler, localProperties.get)
     logInfo("Job finished: " + callSite + ", took " + (System.nanoTime - 
start) / 1e9 + " s")
     rdd.doCheckpoint()
-    result
   }
 
   /**

Reply via email to