Repository: spark
Updated Branches:
  refs/heads/master 667d4ea7b -> 865ec32dd


[MINOR][X][X] Replace all occurrences of None: Option with Option.empty

## What changes were proposed in this pull request?
Replace all occurrences of `None: Option[X]` with `Option.empty[X]`

## How was this patch tested?
Exisiting Tests

Author: Sandeep Singh <sand...@techaddict.me>

Closes #13591 from techaddict/minor-7.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/865ec32d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/865ec32d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/865ec32d

Branch: refs/heads/master
Commit: 865ec32dd997e63aea01a871d1c7b4947f43c111
Parents: 667d4ea
Author: Sandeep Singh <sand...@techaddict.me>
Authored: Fri Jun 10 13:06:51 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Fri Jun 10 13:06:51 2016 -0700

----------------------------------------------------------------------
 .../org/apache/spark/sql/catalyst/trees/TreeNode.scala    |  4 ++--
 .../main/scala/org/apache/spark/sql/DataFrameWriter.scala |  2 +-
 .../sql/execution/command/createDataSourceTables.scala    |  2 +-
 .../spark/sql/execution/exchange/ShuffleExchange.scala    |  2 +-
 .../org/apache/spark/sql/hive/orc/OrcQuerySuite.scala     | 10 +++++-----
 .../spark/streaming/receiver/ReceivedBlockHandler.scala   |  2 +-
 6 files changed, 11 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/865ec32d/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
index f924efe..3cc7a1a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
@@ -105,7 +105,7 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] 
extends Product {
    */
   def find(f: BaseType => Boolean): Option[BaseType] = f(this) match {
     case true => Some(this)
-    case false => children.foldLeft(None: Option[BaseType]) { (l, r) => 
l.orElse(r.find(f)) }
+    case false => children.foldLeft(Option.empty[BaseType]) { (l, r) => 
l.orElse(r.find(f)) }
   }
 
   /**
@@ -165,7 +165,7 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] 
extends Product {
   def collectFirst[B](pf: PartialFunction[BaseType, B]): Option[B] = {
     val lifted = pf.lift
     lifted(this).orElse {
-      children.foldLeft(None: Option[B]) { (l, r) => 
l.orElse(r.collectFirst(pf)) }
+      children.foldLeft(Option.empty[B]) { (l, r) => 
l.orElse(r.collectFirst(pf)) }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/865ec32d/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
index 78b74f9..1c2003c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
@@ -503,7 +503,7 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) 
{
   private def insertInto(tableIdent: TableIdentifier): Unit = {
     assertNotBucketed("insertInto")
     assertNotStreaming("insertInto() can only be called on non-continuous 
queries")
-    val partitions = normalizedParCols.map(_.map(col => col -> (None: 
Option[String])).toMap)
+    val partitions = normalizedParCols.map(_.map(col => col -> 
(Option.empty[String])).toMap)
     val overwrite = mode == SaveMode.Overwrite
 
     df.sparkSession.sessionState.executePlan(

http://git-wip-us.apache.org/repos/asf/spark/blob/865ec32d/sql/core/src/main/scala/org/apache/spark/sql/execution/command/createDataSourceTables.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/createDataSourceTables.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/createDataSourceTables.scala
index 66753fa..865e406 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/createDataSourceTables.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/createDataSourceTables.scala
@@ -169,7 +169,7 @@ case class CreateDataSourceTableAsSelectCommand(
         options
       }
 
-    var existingSchema = None: Option[StructType]
+    var existingSchema = Option.empty[StructType]
     if (sparkSession.sessionState.catalog.tableExists(tableIdent)) {
       // Check if we need to throw an exception or just return.
       mode match {

http://git-wip-us.apache.org/repos/asf/spark/blob/865ec32d/sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ShuffleExchange.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ShuffleExchange.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ShuffleExchange.scala
index e18b59f..afe0fbe 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ShuffleExchange.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ShuffleExchange.scala
@@ -129,7 +129,7 @@ case class ShuffleExchange(
 
 object ShuffleExchange {
   def apply(newPartitioning: Partitioning, child: SparkPlan): ShuffleExchange 
= {
-    ShuffleExchange(newPartitioning, child, coordinator = None: 
Option[ExchangeCoordinator])
+    ShuffleExchange(newPartitioning, child, coordinator = 
Option.empty[ExchangeCoordinator])
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/865ec32d/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
index 9771b23..e6c9c5d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
@@ -147,11 +147,11 @@ class OrcQuerySuite extends QueryTest with 
BeforeAndAfterAll with OrcTest {
 
   test("save and load case class RDD with `None`s as orc") {
     val data = (
-      None: Option[Int],
-      None: Option[Long],
-      None: Option[Float],
-      None: Option[Double],
-      None: Option[Boolean]
+      Option.empty[Int],
+      Option.empty[Long],
+      Option.empty[Float],
+      Option.empty[Double],
+      Option.empty[Boolean]
     ) :: Nil
 
     withOrcFile(data) { file =>

http://git-wip-us.apache.org/repos/asf/spark/blob/865ec32d/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala
----------------------------------------------------------------------
diff --git 
a/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala
 
b/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala
index c4bc5cf..80c0795 100644
--- 
a/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala
+++ 
b/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala
@@ -170,7 +170,7 @@ private[streaming] class WriteAheadLogBasedBlockHandler(
    */
   def storeBlock(blockId: StreamBlockId, block: ReceivedBlock): 
ReceivedBlockStoreResult = {
 
-    var numRecords = None: Option[Long]
+    var numRecords = Option.empty[Long]
     // Serialize the block so that it can be inserted into both
     val serializedBlock = block match {
       case ArrayBufferBlock(arrayBuffer) =>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to