Fixed some scala warnings in core.

Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/bfbd7e5d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/bfbd7e5d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/bfbd7e5d

Branch: refs/heads/scala-2.10
Commit: bfbd7e5d9f9e556961e79135dcde8f88fa31e2bc
Parents: 34da58a
Author: Prashant Sharma <prashan...@imaginea.com>
Authored: Thu Oct 10 15:22:31 2013 +0530
Committer: Prashant Sharma <prashan...@imaginea.com>
Committed: Thu Oct 10 15:22:31 2013 +0530

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala  | 2 +-
 .../org/apache/spark/scheduler/cluster/TaskResultGetter.scala    | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/bfbd7e5d/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala 
b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
index d7b45d4..c9c13f7 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -72,7 +72,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends 
Serializable {
    * Return a new RDD by applying a function to each partition of this RDD, 
while tracking the index
    * of the original partition.
    */
-  def mapPartitionsWithIndex[R: ClassManifest](
+  def mapPartitionsWithIndex[R: ClassTag](
       f: JFunction2[Int, java.util.Iterator[T], java.util.Iterator[R]],
       preservesPartitioning: Boolean = false): JavaRDD[R] =
     new JavaRDD(rdd.mapPartitionsWithIndex(((a,b) => f(a,asJavaIterator(b))),

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/bfbd7e5d/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskResultGetter.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskResultGetter.scala 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskResultGetter.scala
index feec8ec..b2a8f06 100644
--- 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskResultGetter.scala
+++ 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskResultGetter.scala
@@ -87,7 +87,7 @@ private[spark] class TaskResultGetter(sparkEnv: SparkEnv, 
scheduler: ClusterSche
           case cnf: ClassNotFoundException =>
             val loader = Thread.currentThread.getContextClassLoader
             taskSetManager.abort("ClassNotFound with classloader: " + loader)
-          case ex =>
+          case ex: Throwable =>
             taskSetManager.abort("Exception while deserializing and fetching 
task: %s".format(ex))
         }
       }
@@ -111,7 +111,7 @@ private[spark] class TaskResultGetter(sparkEnv: SparkEnv, 
scheduler: ClusterSche
             val loader = Thread.currentThread.getContextClassLoader
             logError(
               "Could not deserialize TaskEndReason: ClassNotFound with 
classloader " + loader)
-          case ex => {}
+          case ex: Throwable => {}
         }
         scheduler.handleFailedTask(taskSetManager, tid, taskState, reason)
       }

Reply via email to