Repository: spark
Updated Branches:
  refs/heads/master e1afc4dcc -> 34fc48fb5


[MINOR] Issue: Change "slice" vs "partition" in exception messages (and code?)

## What changes were proposed in this pull request?

Came across the term "slice" when running some spark scala code. Consequently, 
a Google search indicated that "slices" and "partitions" refer to the same 
things; indeed see:

- [This issue](https://issues.apache.org/jira/browse/SPARK-1701)
- [This pull request](https://github.com/apache/spark/pull/2305)
- [This StackOverflow 
answer](http://stackoverflow.com/questions/23436640/what-is-the-difference-between-an-rdd-partition-and-a-slice)
 and [this 
one](http://stackoverflow.com/questions/24269495/what-are-the-differences-between-slices-and-partitions-of-rdds)

Thus this pull request fixes the occurrence of slice I came accross. 
Nonetheless, [it would 
appear](https://github.com/apache/spark/search?utf8=%E2%9C%93&q=slice&type=) 
there are still many references to "slice/slices" - thus I thought I'd raise 
this Pull Request to address the issue (sorry if this is the wrong place, I'm 
not too familar with raising apache issues).

## How was this patch tested?

(Not tested locally - only a minor exception message change.)

Please review http://spark.apache.org/contributing.html before opening a pull 
request.

Author: asmith26 <[email protected]>

Closes #17565 from asmith26/master.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/34fc48fb
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/34fc48fb
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/34fc48fb

Branch: refs/heads/master
Commit: 34fc48fb5976ede00f3f6d8c4d3eec979e4f4d7f
Parents: e1afc4d
Author: asmith26 <[email protected]>
Authored: Sun Apr 9 07:47:23 2017 +0100
Committer: Sean Owen <[email protected]>
Committed: Sun Apr 9 07:47:23 2017 +0100

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala    | 2 +-
 examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java  | 2 +-
 examples/src/main/java/org/apache/spark/examples/JavaTC.java       | 2 +-
 .../src/main/scala/org/apache/spark/examples/BroadcastTest.scala   | 2 +-
 .../main/scala/org/apache/spark/examples/MultiBroadcastTest.scala  | 2 +-
 examples/src/main/scala/org/apache/spark/examples/SparkALS.scala   | 2 +-
 examples/src/main/scala/org/apache/spark/examples/SparkLR.scala    | 2 +-
 7 files changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/34fc48fb/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala 
b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
index e909273..9f8019b 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
@@ -116,7 +116,7 @@ private object ParallelCollectionRDD {
    */
   def slice[T: ClassTag](seq: Seq[T], numSlices: Int): Seq[Seq[T]] = {
     if (numSlices < 1) {
-      throw new IllegalArgumentException("Positive number of slices required")
+      throw new IllegalArgumentException("Positive number of partitions 
required")
     }
     // Sequences need to be sliced at the same set of index positions for 
operations
     // like RDD.zip() to behave as expected

http://git-wip-us.apache.org/repos/asf/spark/blob/34fc48fb/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java 
b/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
index cb4b265..37bd8ff 100644
--- a/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
@@ -26,7 +26,7 @@ import java.util.List;
 
 /**
  * Computes an approximation to pi
- * Usage: JavaSparkPi [slices]
+ * Usage: JavaSparkPi [partitions]
  */
 public final class JavaSparkPi {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/34fc48fb/examples/src/main/java/org/apache/spark/examples/JavaTC.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/spark/examples/JavaTC.java 
b/examples/src/main/java/org/apache/spark/examples/JavaTC.java
index bde30b8..c9ca9c9 100644
--- a/examples/src/main/java/org/apache/spark/examples/JavaTC.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaTC.java
@@ -32,7 +32,7 @@ import org.apache.spark.sql.SparkSession;
 
 /**
  * Transitive closure on a graph, implemented in Java.
- * Usage: JavaTC [slices]
+ * Usage: JavaTC [partitions]
  */
 public final class JavaTC {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/34fc48fb/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
----------------------------------------------------------------------
diff --git 
a/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala 
b/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
index 86eed38..25718f9 100644
--- a/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
@@ -21,7 +21,7 @@ package org.apache.spark.examples
 import org.apache.spark.sql.SparkSession
 
 /**
- * Usage: BroadcastTest [slices] [numElem] [blockSize]
+ * Usage: BroadcastTest [partitions] [numElem] [blockSize]
  */
 object BroadcastTest {
   def main(args: Array[String]) {

http://git-wip-us.apache.org/repos/asf/spark/blob/34fc48fb/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
----------------------------------------------------------------------
diff --git 
a/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala 
b/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
index 6495a86..e6f33b7 100644
--- a/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
@@ -23,7 +23,7 @@ import org.apache.spark.sql.SparkSession
 
 
 /**
- * Usage: MultiBroadcastTest [slices] [numElem]
+ * Usage: MultiBroadcastTest [partitions] [numElem]
  */
 object MultiBroadcastTest {
   def main(args: Array[String]) {

http://git-wip-us.apache.org/repos/asf/spark/blob/34fc48fb/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala 
b/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
index 8a3d08f..a99ddd9 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
@@ -100,7 +100,7 @@ object SparkALS {
         ITERATIONS = iters.getOrElse("5").toInt
         slices = slices_.getOrElse("2").toInt
       case _ =>
-        System.err.println("Usage: SparkALS [M] [U] [F] [iters] [slices]")
+        System.err.println("Usage: SparkALS [M] [U] [F] [iters] [partitions]")
         System.exit(1)
     }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/34fc48fb/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala 
b/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
index afa8f58..cb2be09 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
@@ -28,7 +28,7 @@ import org.apache.spark.sql.SparkSession
 
 /**
  * Logistic regression based classification.
- * Usage: SparkLR [slices]
+ * Usage: SparkLR [partitions]
  *
  * This is an example implementation for learning how to use Spark. For more 
conventional use,
  * please refer to org.apache.spark.ml.classification.LogisticRegression.


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to