spark git commit: [SPARK-5355] use j.u.c.ConcurrentHashMap instead of TrieMap

2015-01-26 Thread rxin
Repository: spark
Updated Branches:
  refs/heads/master 81251682e - 142093179


[SPARK-5355] use j.u.c.ConcurrentHashMap instead of TrieMap

j.u.c.ConcurrentHashMap is more battle tested.

cc rxin JoshRosen pwendell

Author: Davies Liu dav...@databricks.com

Closes #4208 from davies/safe-conf and squashes the following commits:

c2182dc [Davies Liu] address comments, fix tests
3a1d821 [Davies Liu] fix test
da14ced [Davies Liu] Merge branch 'master' of github.com:apache/spark into 
safe-conf
ae4d305 [Davies Liu] change to j.u.c.ConcurrentMap
f8fa1cf [Davies Liu] change to TrieMap
a1d769a [Davies Liu] make SparkConf thread-safe


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/14209317
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/14209317
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/14209317

Branch: refs/heads/master
Commit: 142093179a4c40bdd90744191034de7b94a963ff
Parents: 8125168
Author: Davies Liu dav...@databricks.com
Authored: Mon Jan 26 12:51:32 2015 -0800
Committer: Reynold Xin r...@databricks.com
Committed: Mon Jan 26 12:51:32 2015 -0800

--
 .../main/scala/org/apache/spark/SparkConf.scala | 38 ++--
 .../deploy/worker/WorkerArgumentsTest.scala |  4 +--
 .../apache/spark/storage/LocalDirsSuite.scala   |  2 +-
 3 files changed, 23 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/14209317/core/src/main/scala/org/apache/spark/SparkConf.scala
--
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala 
b/core/src/main/scala/org/apache/spark/SparkConf.scala
index f9d4aa4..cd91c8f 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -17,9 +17,11 @@
 
 package org.apache.spark
 
+import java.util.concurrent.ConcurrentHashMap
+
 import scala.collection.JavaConverters._
-import scala.collection.concurrent.TrieMap
-import scala.collection.mutable.{HashMap, LinkedHashSet}
+import scala.collection.mutable.LinkedHashSet
+
 import org.apache.spark.serializer.KryoSerializer
 
 /**
@@ -47,12 +49,12 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
   /** Create a SparkConf that loads defaults from system properties and the 
classpath */
   def this() = this(true)
 
-  private[spark] val settings = new TrieMap[String, String]()
+  private val settings = new ConcurrentHashMap[String, String]()
 
   if (loadDefaults) {
 // Load any spark.* system properties
 for ((k, v) - System.getProperties.asScala if k.startsWith(spark.)) {
-  settings(k) = v
+  set(k, v)
 }
   }
 
@@ -64,7 +66,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with 
Logging {
 if (value == null) {
   throw new NullPointerException(null value for  + key)
 }
-settings(key) = value
+settings.put(key, value)
 this
   }
 
@@ -130,15 +132,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
 
   /** Set multiple parameters together */
   def setAll(settings: Traversable[(String, String)]) = {
-this.settings ++= settings
+this.settings.putAll(settings.toMap.asJava)
 this
   }
 
   /** Set a parameter if it isn't already configured */
   def setIfMissing(key: String, value: String): SparkConf = {
-if (!settings.contains(key)) {
-  settings(key) = value
-}
+settings.putIfAbsent(key, value)
 this
   }
 
@@ -164,21 +164,23 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
 
   /** Get a parameter; throws a NoSuchElementException if it's not set */
   def get(key: String): String = {
-settings.getOrElse(key, throw new NoSuchElementException(key))
+getOption(key).getOrElse(throw new NoSuchElementException(key))
   }
 
   /** Get a parameter, falling back to a default if not set */
   def get(key: String, defaultValue: String): String = {
-settings.getOrElse(key, defaultValue)
+getOption(key).getOrElse(defaultValue)
   }
 
   /** Get a parameter as an Option */
   def getOption(key: String): Option[String] = {
-settings.get(key)
+Option(settings.get(key))
   }
 
   /** Get all parameters as a list of pairs */
-  def getAll: Array[(String, String)] = settings.toArray
+  def getAll: Array[(String, String)] = {
+settings.entrySet().asScala.map(x = (x.getKey, x.getValue)).toArray
+  }
 
   /** Get a parameter as an integer, falling back to a default if not set */
   def getInt(key: String, defaultValue: Int): Int = {
@@ -225,11 +227,11 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
   def getAppId: String = get(spark.app.id)
 
   /** Does the configuration contain a given parameter? */
-  def contains(key: String): 

spark git commit: [SPARK-5355] use j.u.c.ConcurrentHashMap instead of TrieMap

2015-01-26 Thread joshrosen
Repository: spark
Updated Branches:
  refs/heads/branch-1.2 cf65620f5 - ef6fe84dc


[SPARK-5355] use j.u.c.ConcurrentHashMap instead of TrieMap

j.u.c.ConcurrentHashMap is more battle tested.

cc rxin JoshRosen pwendell

Author: Davies Liu dav...@databricks.com

Closes #4208 from davies/safe-conf and squashes the following commits:

c2182dc [Davies Liu] address comments, fix tests
3a1d821 [Davies Liu] fix test
da14ced [Davies Liu] Merge branch 'master' of github.com:apache/spark into 
safe-conf
ae4d305 [Davies Liu] change to j.u.c.ConcurrentMap
f8fa1cf [Davies Liu] change to TrieMap
a1d769a [Davies Liu] make SparkConf thread-safe

(cherry picked from commit 142093179a4c40bdd90744191034de7b94a963ff)
Signed-off-by: Josh Rosen joshro...@databricks.com


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ef6fe84d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ef6fe84d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ef6fe84d

Branch: refs/heads/branch-1.2
Commit: ef6fe84dcbfff2d937088ea179889752317896e5
Parents: cf65620
Author: Davies Liu dav...@databricks.com
Authored: Mon Jan 26 12:51:32 2015 -0800
Committer: Josh Rosen joshro...@databricks.com
Committed: Mon Jan 26 13:22:17 2015 -0800

--
 .../main/scala/org/apache/spark/SparkConf.scala | 38 ++--
 .../deploy/worker/WorkerArgumentsTest.scala |  4 +--
 .../apache/spark/storage/LocalDirsSuite.scala   |  2 +-
 3 files changed, 23 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/ef6fe84d/core/src/main/scala/org/apache/spark/SparkConf.scala
--
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala 
b/core/src/main/scala/org/apache/spark/SparkConf.scala
index dd80013..3337974 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -17,9 +17,11 @@
 
 package org.apache.spark
 
+import java.util.concurrent.ConcurrentHashMap
+
 import scala.collection.JavaConverters._
-import scala.collection.concurrent.TrieMap
-import scala.collection.mutable.{HashMap, LinkedHashSet}
+import scala.collection.mutable.LinkedHashSet
+
 import org.apache.spark.serializer.KryoSerializer
 
 /**
@@ -47,12 +49,12 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
   /** Create a SparkConf that loads defaults from system properties and the 
classpath */
   def this() = this(true)
 
-  private[spark] val settings = new TrieMap[String, String]()
+  private val settings = new ConcurrentHashMap[String, String]()
 
   if (loadDefaults) {
 // Load any spark.* system properties
 for ((k, v) - System.getProperties.asScala if k.startsWith(spark.)) {
-  settings(k) = v
+  set(k, v)
 }
   }
 
@@ -64,7 +66,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with 
Logging {
 if (value == null) {
   throw new NullPointerException(null value)
 }
-settings(key) = value
+settings.put(key, value)
 this
   }
 
@@ -130,15 +132,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
 
   /** Set multiple parameters together */
   def setAll(settings: Traversable[(String, String)]) = {
-this.settings ++= settings
+this.settings.putAll(settings.toMap.asJava)
 this
   }
 
   /** Set a parameter if it isn't already configured */
   def setIfMissing(key: String, value: String): SparkConf = {
-if (!settings.contains(key)) {
-  settings(key) = value
-}
+settings.putIfAbsent(key, value)
 this
   }
 
@@ -164,21 +164,23 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
 
   /** Get a parameter; throws a NoSuchElementException if it's not set */
   def get(key: String): String = {
-settings.getOrElse(key, throw new NoSuchElementException(key))
+getOption(key).getOrElse(throw new NoSuchElementException(key))
   }
 
   /** Get a parameter, falling back to a default if not set */
   def get(key: String, defaultValue: String): String = {
-settings.getOrElse(key, defaultValue)
+getOption(key).getOrElse(defaultValue)
   }
 
   /** Get a parameter as an Option */
   def getOption(key: String): Option[String] = {
-settings.get(key)
+Option(settings.get(key))
   }
 
   /** Get all parameters as a list of pairs */
-  def getAll: Array[(String, String)] = settings.toArray
+  def getAll: Array[(String, String)] = {
+settings.entrySet().asScala.map(x = (x.getKey, x.getValue)).toArray
+  }
 
   /** Get a parameter as an integer, falling back to a default if not set */
   def getInt(key: String, defaultValue: Int): Int = {
@@ -225,11 +227,11 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
   def getAppId: