Repository: spark
Updated Branches:
  refs/heads/branch-2.1 72bf51997 -> b226f10e3


[MINOR][CORE][SQL][DOCS] Typo fixes

## What changes were proposed in this pull request?

Typo fixes

## How was this patch tested?

Local build. Awaiting the official build.

Author: Jacek Laskowski <ja...@japila.pl>

Closes #16144 from jaceklaskowski/typo-fixes.

(cherry picked from commit b162cc0c2810c1a9fa2eee8e664ffae84f9eea11)
Signed-off-by: Sean Owen <so...@cloudera.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b226f10e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b226f10e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b226f10e

Branch: refs/heads/branch-2.1
Commit: b226f10e3df8b789da6ef820b256f994b178fbbe
Parents: 72bf519
Author: Jacek Laskowski <ja...@japila.pl>
Authored: Fri Dec 9 18:45:57 2016 +0800
Committer: Sean Owen <so...@cloudera.com>
Committed: Fri Dec 9 18:46:32 2016 +0800

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/MapOutputTracker.scala    | 2 +-
 core/src/main/scala/org/apache/spark/SparkContext.scala        | 4 ++--
 .../apache/spark/deploy/history/HistoryServerArguments.scala   | 2 +-
 .../main/scala/org/apache/spark/internal/config/package.scala  | 2 +-
 core/src/main/scala/org/apache/spark/rdd/RDD.scala             | 2 +-
 core/src/main/scala/org/apache/spark/rpc/RpcCallContext.scala  | 2 +-
 docs/monitoring.md                                             | 6 ++----
 .../main/java/org/apache/spark/sql/streaming/OutputMode.java   | 2 +-
 .../main/scala/org/apache/spark/sql/catalyst/InternalRow.scala | 2 +-
 .../apache/spark/sql/catalyst/catalog/ExternalCatalog.scala    | 2 +-
 .../org/apache/spark/sql/catalyst/expressions/Expression.scala | 6 +++---
 .../spark/sql/catalyst/expressions/objects/objects.scala       | 2 +-
 12 files changed, 16 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala 
b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
index 7f8f0f5..6f5c31d 100644
--- a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
+++ b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
@@ -322,7 +322,7 @@ private[spark] class MapOutputTrackerMaster(conf: SparkConf,
   if (minSizeForBroadcast > maxRpcMessageSize) {
     val msg = s"spark.shuffle.mapOutput.minSizeForBroadcast 
($minSizeForBroadcast bytes) must " +
       s"be <= spark.rpc.message.maxSize ($maxRpcMessageSize bytes) to prevent 
sending an rpc " +
-      "message that is to large."
+      "message that is too large."
     logError(msg)
     throw new IllegalArgumentException(msg)
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 8f8392f..b6aeeb9 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -2567,8 +2567,8 @@ object SparkContext extends Logging {
     val serviceLoaders =
       ServiceLoader.load(classOf[ExternalClusterManager], 
loader).asScala.filter(_.canCreate(url))
     if (serviceLoaders.size > 1) {
-      throw new SparkException(s"Multiple Cluster Managers ($serviceLoaders) 
registered " +
-          s"for the url $url:")
+      throw new SparkException(
+        s"Multiple external cluster managers registered for the url $url: 
$serviceLoaders")
     }
     serviceLoaders.headOption
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
 
b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
index 2eddb5f..080ba12 100644
--- 
a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
+++ 
b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
@@ -24,7 +24,7 @@ import org.apache.spark.internal.Logging
 import org.apache.spark.util.Utils
 
 /**
- * Command-line parser for the master.
+ * Command-line parser for the [[HistoryServer]].
  */
 private[history] class HistoryServerArguments(conf: SparkConf, args: 
Array[String])
   extends Logging {

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/core/src/main/scala/org/apache/spark/internal/config/package.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala 
b/core/src/main/scala/org/apache/spark/internal/config/package.scala
index 8ce9883..f4844de 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/package.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala
@@ -198,7 +198,7 @@ package object config {
     .createWithDefault(0)
 
   private[spark] val DRIVER_BLOCK_MANAGER_PORT = 
ConfigBuilder("spark.driver.blockManager.port")
-    .doc("Port to use for the block managed on the driver.")
+    .doc("Port to use for the block manager on the driver.")
     .fallbackConf(BLOCK_MANAGER_PORT)
 
   private[spark] val IGNORE_CORRUPT_FILES = 
ConfigBuilder("spark.files.ignoreCorruptFiles")

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/core/src/main/scala/org/apache/spark/rdd/RDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala 
b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index d285e91..374abcc 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -1746,7 +1746,7 @@ abstract class RDD[T: ClassTag](
 
   /**
    * Clears the dependencies of this RDD. This method must ensure that all 
references
-   * to the original parent RDDs is removed to enable the parent RDDs to be 
garbage
+   * to the original parent RDDs are removed to enable the parent RDDs to be 
garbage
    * collected. Subclasses of RDD may override this method for implementing 
their own cleaning
    * logic. See [[org.apache.spark.rdd.UnionRDD]] for an example.
    */

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/core/src/main/scala/org/apache/spark/rpc/RpcCallContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rpc/RpcCallContext.scala 
b/core/src/main/scala/org/apache/spark/rpc/RpcCallContext.scala
index f527ec8..117f51c 100644
--- a/core/src/main/scala/org/apache/spark/rpc/RpcCallContext.scala
+++ b/core/src/main/scala/org/apache/spark/rpc/RpcCallContext.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.rpc
 
 /**
- * A callback that [[RpcEndpoint]] can use it to send back a message or 
failure. It's thread-safe
+ * A callback that [[RpcEndpoint]] can use to send back a message or failure. 
It's thread-safe
  * and can be called in any thread.
  */
 private[spark] trait RpcCallContext {

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/docs/monitoring.md
----------------------------------------------------------------------
diff --git a/docs/monitoring.md b/docs/monitoring.md
index 2eef456..7a1de52 100644
--- a/docs/monitoring.md
+++ b/docs/monitoring.md
@@ -44,10 +44,8 @@ The spark jobs themselves must be configured to log events, 
and to log them to t
 writable directory. For example, if the server was configured with a log 
directory of
 `hdfs://namenode/shared/spark-logs`, then the client-side options would be:
 
-```
-spark.eventLog.enabled true
-spark.eventLog.dir hdfs://namenode/shared/spark-logs
-```
+    spark.eventLog.enabled true
+    spark.eventLog.dir hdfs://namenode/shared/spark-logs
 
 The history server can be configured as follows:
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java 
b/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
index 49a18df..a515c1a 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
@@ -46,7 +46,7 @@ public class OutputMode {
 
   /**
    * OutputMode in which all the rows in the streaming DataFrame/Dataset will 
be written
-   * to the sink every time these is some updates. This output mode can only 
be used in queries
+   * to the sink every time there are some updates. This output mode can only 
be used in queries
    * that contain aggregations.
    *
    * @since 2.0.0

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/InternalRow.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/InternalRow.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/InternalRow.scala
index f498e07..256f64e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/InternalRow.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/InternalRow.scala
@@ -21,7 +21,7 @@ import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.types.{DataType, Decimal, StructType}
 
 /**
- * An abstract class for row used internal in Spark SQL, which only contain 
the columns as
+ * An abstract class for row used internally in Spark SQL, which only contains 
the columns as
  * internal types.
  */
 abstract class InternalRow extends SpecializedGetters with Serializable {

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
index 4b8cac8..78897da 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
@@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.expressions.Expression
 
 
 /**
- * Interface for the system catalog (of columns, partitions, tables, and 
databases).
+ * Interface for the system catalog (of functions, partitions, tables, and 
databases).
  *
  * This is only used for non-temporary items, and implementations must be 
thread-safe as they
  * can be accessed in multiple threads. This is an external catalog because it 
is expected to

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index 221f830..b93a5d0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -70,9 +70,9 @@ abstract class Expression extends TreeNode[Expression] {
    * children.
    *
    * Note that this means that an expression should be considered as 
non-deterministic if:
-   * - if it relies on some mutable internal state, or
-   * - if it relies on some implicit input that is not part of the children 
expression list.
-   * - if it has non-deterministic child or children.
+   * - it relies on some mutable internal state, or
+   * - it relies on some implicit input that is not part of the children 
expression list.
+   * - it has non-deterministic child or children.
    *
    * An example would be `SparkPartitionID` that relies on the partition id 
returned by TaskContext.
    * By default leaf expressions are deterministic as 
Nil.forall(_.deterministic) returns true.

http://git-wip-us.apache.org/repos/asf/spark/blob/b226f10e/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
index e517ec1..038b023 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
@@ -924,7 +924,7 @@ case class InitializeJavaBean(beanInstance: Expression, 
setters: Map[String, Exp
 /**
  * Asserts that input values of a non-nullable child expression are not null.
  *
- * Note that there are cases where `child.nullable == true`, while we still 
needs to add this
+ * Note that there are cases where `child.nullable == true`, while we still 
need to add this
  * assertion.  Consider a nullable column `s` whose data type is a struct 
containing a non-nullable
  * `Int` field named `i`.  Expression `s.i` is nullable because `s` can be 
null.  However, for all
  * non-null `s`, `s.i` can't be null.


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to