Merge pull request #62 from harveyfeng/master

Make TaskContext's stageId publicly accessible.

(cherry picked from commit 4e46fde818a9b94960ab83fa6390952e2d5dd3e6)
Signed-off-by: Reynold Xin <r...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/5a73ab76
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/5a73ab76
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/5a73ab76

Branch: refs/heads/branch-0.8
Commit: 5a73ab7610727b1049faf007585a2e842cef0eb4
Parents: b6ce111
Author: Matei Zaharia <ma...@eecs.berkeley.edu>
Authored: Tue Oct 15 23:14:27 2013 -0700
Committer: Reynold Xin <r...@apache.org>
Committed: Thu Oct 17 18:36:36 2013 -0700

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala | 5 ++++-
 core/src/main/scala/org/apache/spark/TaskContext.scala       | 2 +-
 2 files changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/5a73ab76/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala 
b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
index 2bab9d6..afa76a4 100644
--- a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
+++ b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
@@ -36,7 +36,10 @@ import org.apache.spark.SerializableWritable
  * Saves the RDD using a JobConf, which should contain an output key class, an 
output value class,
  * a filename to write to, etc, exactly like in a Hadoop MapReduce job.
  */
-class SparkHadoopWriter(@transient jobConf: JobConf) extends Logging with 
SparkHadoopMapRedUtil with Serializable {
+class SparkHadoopWriter(@transient jobConf: JobConf)
+  extends Logging
+  with SparkHadoopMapRedUtil
+  with Serializable {
 
   private val now = new Date()
   private val conf = new SerializableWritable(jobConf)

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/5a73ab76/core/src/main/scala/org/apache/spark/TaskContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/TaskContext.scala 
b/core/src/main/scala/org/apache/spark/TaskContext.scala
index 51584d6..cae983e 100644
--- a/core/src/main/scala/org/apache/spark/TaskContext.scala
+++ b/core/src/main/scala/org/apache/spark/TaskContext.scala
@@ -22,7 +22,7 @@ import scala.collection.mutable.ArrayBuffer
 import org.apache.spark.executor.TaskMetrics
 
 class TaskContext(
-  private[spark] val stageId: Int,
+  val stageId: Int,
   val partitionId: Int,
   val attemptId: Long,
   val runningLocally: Boolean = false,

Reply via email to