Repository: spark
Updated Branches:
  refs/heads/master c567a68a5 -> 725715cbf


[SPARK-3010] fix redundant conditional

https://issues.apache.org/jira/browse/SPARK-3010

this pr is to fix redundant conditional in spark, such as
1.
private[spark] def codegenEnabled: Boolean =
if (getConf(CODEGEN_ENABLED, "false") == "true") true else false
2.
x => if (x == 2) true else false
...

Author: scwf <wangf...@huawei.com>
Author: wangfei <wangfei_he...@126.com>

Closes #1992 from scwf/condition and squashes the following commits:

b2a044a [scwf] merge SecurityManager
e16239c [scwf] fix confilct
6811401 [scwf] fix merge confilct
0824df4 [scwf] Merge branch 'master' of https://github.com/apache/spark into 
patch-4
e274515 [scwf] fix redundant conditions
d032bf9 [wangfei] [SQL]Excess judgment


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/725715cb
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/725715cb
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/725715cb

Branch: refs/heads/master
Commit: 725715cbf3c1834b81aa91ea751fd04d307f504d
Parents: c567a68
Author: scwf <wangf...@huawei.com>
Authored: Sun Aug 31 14:02:11 2014 -0700
Committer: Matei Zaharia <ma...@databricks.com>
Committed: Sun Aug 31 14:02:11 2014 -0700

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/SecurityManager.scala  |  4 ++--
 .../apache/spark/rdd/PartitionPruningRDDSuite.scala    | 13 ++++---------
 .../org/apache/spark/sql/columnar/ColumnType.scala     |  4 +---
 .../org/apache/spark/deploy/yarn/ClientBase.scala      |  2 +-
 4 files changed, 8 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/725715cb/core/src/main/scala/org/apache/spark/SecurityManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala 
b/core/src/main/scala/org/apache/spark/SecurityManager.scala
index 25c2c9f..12b15fe 100644
--- a/core/src/main/scala/org/apache/spark/SecurityManager.scala
+++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala
@@ -294,7 +294,7 @@ private[spark] class SecurityManager(sparkConf: SparkConf) 
extends Logging {
   def checkUIViewPermissions(user: String): Boolean = {
     logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " viewAcls=" +
       viewAcls.mkString(","))
-    if (aclsEnabled() && (user != null) && (!viewAcls.contains(user))) false 
else true
+    !aclsEnabled || user == null || viewAcls.contains(user)
   }
 
   /**
@@ -309,7 +309,7 @@ private[spark] class SecurityManager(sparkConf: SparkConf) 
extends Logging {
   def checkModifyPermissions(user: String): Boolean = {
     logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " modifyAcls=" 
+
       modifyAcls.mkString(","))
-    if (aclsEnabled() && (user != null) && (!modifyAcls.contains(user))) false 
else true
+    !aclsEnabled || user == null || modifyAcls.contains(user)
   }
 
 

http://git-wip-us.apache.org/repos/asf/spark/blob/725715cb/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala 
b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala
index 956c2b9..8408d7e 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala
@@ -38,9 +38,7 @@ class PartitionPruningRDDSuite extends FunSuite with 
SharedSparkContext {
         Iterator()
       }
     }
-    val prunedRDD = PartitionPruningRDD.create(rdd, {
-      x => if (x == 2) true else false
-    })
+    val prunedRDD = PartitionPruningRDD.create(rdd, _ == 2)
     assert(prunedRDD.partitions.length == 1)
     val p = prunedRDD.partitions(0)
     assert(p.index == 0)
@@ -62,13 +60,10 @@ class PartitionPruningRDDSuite extends FunSuite with 
SharedSparkContext {
         List(split.asInstanceOf[TestPartition].testValue).iterator
       }
     }
-    val prunedRDD1 = PartitionPruningRDD.create(rdd, {
-      x => if (x == 0) true else false
-    })
+    val prunedRDD1 = PartitionPruningRDD.create(rdd, _ == 0)
 
-    val prunedRDD2 = PartitionPruningRDD.create(rdd, {
-      x => if (x == 2) true else false
-    })
+
+    val prunedRDD2 = PartitionPruningRDD.create(rdd, _ == 2)
 
     val merged = prunedRDD1 ++ prunedRDD2
     assert(merged.count() == 2)

http://git-wip-us.apache.org/repos/asf/spark/blob/725715cb/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala
index 794bc60..9a61600 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala
@@ -158,9 +158,7 @@ private[sql] object BOOLEAN extends 
NativeColumnType(BooleanType, 4, 1) {
     buffer.put(if (v) 1.toByte else 0.toByte)
   }
 
-  override def extract(buffer: ByteBuffer) = {
-    if (buffer.get() == 1) true else false
-  }
+  override def extract(buffer: ByteBuffer) = buffer.get() == 1
 
   override def setField(row: MutableRow, ordinal: Int, value: Boolean) {
     row.setBoolean(ordinal, value)

http://git-wip-us.apache.org/repos/asf/spark/blob/725715cb/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
----------------------------------------------------------------------
diff --git 
a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala 
b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
index 6cf300c..5d8e5e6 100644
--- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
@@ -209,7 +209,7 @@ trait ClientBase extends Logging {
       if (! localPath.isEmpty()) {
         val localURI = new URI(localPath)
         if (!ClientBase.LOCAL_SCHEME.equals(localURI.getScheme())) {
-          val setPermissions = if (destName.equals(ClientBase.APP_JAR)) true 
else false
+          val setPermissions = destName.equals(ClientBase.APP_JAR)
           val destPath = copyRemoteFile(dst, qualifyForLocal(localURI), 
replication, setPermissions)
           val destFs = FileSystem.get(destPath.toUri(), conf)
           distCacheMgr.addResource(destFs, conf, destPath, localResources, 
LocalResourceType.FILE,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to