Repository: spark
Updated Branches:
  refs/heads/master 28132ceb1 -> 146da0d81


Fix typos

Two typos squashed.

BTW Let me know how to proceed with other typos if I ran across any. I don't 
feel well to leave them aside as much as sending pull requests with such tiny 
changes. Guide me.

Author: Jacek Laskowski <[email protected]>

Closes #9250 from jaceklaskowski/typos-hunting.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/146da0d8
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/146da0d8
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/146da0d8

Branch: refs/heads/master
Commit: 146da0d8100490a6e49a6c076ec253cdaf9f8905
Parents: 28132ce
Author: Jacek Laskowski <[email protected]>
Authored: Sun Oct 25 01:33:22 2015 +0100
Committer: Sean Owen <[email protected]>
Committed: Sun Oct 25 01:33:22 2015 +0100

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkConf.scala              | 2 +-
 core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala  | 2 +-
 .../src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala | 3 ++-
 core/src/main/scala/org/apache/spark/util/ThreadUtils.scala       | 2 +-
 4 files changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/146da0d8/core/src/main/scala/org/apache/spark/SparkConf.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala 
b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 58d3b84..f023e4b 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -621,7 +621,7 @@ private[spark] object SparkConf extends Logging {
   /**
    * Return whether the given config should be passed to an executor on 
start-up.
    *
-   * Certain akka and authentication configs are required of the executor when 
it connects to
+   * Certain akka and authentication configs are required from the executor 
when it connects to
    * the scheduler, while the rest of the spark configs can be inherited from 
the driver later.
    */
   def isExecutorStartupConf(name: String): Boolean = {

http://git-wip-us.apache.org/repos/asf/spark/blob/146da0d8/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala 
b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
index 48afe3a..fdf76d3 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
@@ -197,7 +197,7 @@ private[spark] class MetricsSystem private (
           }
         } catch {
           case e: Exception => {
-            logError("Sink class " + classPath + " cannot be instantialized")
+            logError("Sink class " + classPath + " cannot be instantiated")
             throw e
           }
         }

http://git-wip-us.apache.org/repos/asf/spark/blob/146da0d8/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala 
b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
index f25f3ed..cb9a300 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
@@ -22,7 +22,8 @@ import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.storage.BlockManagerId
 
 /**
- * Low-level task scheduler interface, currently implemented exclusively by 
TaskSchedulerImpl.
+ * Low-level task scheduler interface, currently implemented exclusively by
+ * [[org.apache.spark.scheduler.TaskSchedulerImpl]].
  * This interface allows plugging in different task schedulers. Each 
TaskScheduler schedules tasks
  * for a single SparkContext. These schedulers get sets of tasks submitted to 
them from the
  * DAGScheduler for each stage, and are responsible for sending the tasks to 
the cluster, running

http://git-wip-us.apache.org/repos/asf/spark/blob/146da0d8/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala 
b/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala
index 15e7519..5328344 100644
--- a/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala
+++ b/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala
@@ -80,7 +80,7 @@ private[spark] object ThreadUtils {
   }
 
   /**
-   * Wrapper over newSingleThreadScheduledExecutor.
+   * Wrapper over ScheduledThreadPoolExecutor.
    */
   def newDaemonSingleThreadScheduledExecutor(threadName: String): 
ScheduledExecutorService = {
     val threadFactory = new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat(threadName).build()


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to