Repository: spark
Updated Branches:
refs/heads/master 59e3a5644 -> 8c911adac
[SPARK-20465][CORE] Throws a proper exception when any temp directory could not
be got
## What changes were proposed in this pull request?
This PR proposes to throw an exception with better message rather than
`ArrayIndexOutOfBoundsException` when temp directories could not be created.
Running the commands below:
```bash
./bin/spark-shell --conf
spark.local.dir=/NONEXISTENT_DIR_ONE,/NONEXISTENT_DIR_TWO
```
produces ...
**Before**
```
Exception in thread "main" java.lang.ExceptionInInitializerError
...
Caused by: java.lang.ArrayIndexOutOfBoundsException: 0
...
```
**After**
```
Exception in thread "main" java.lang.ExceptionInInitializerError
...
Caused by: java.io.IOException: Failed to get a temp directory under
[/NONEXISTENT_DIR_ONE,/NONEXISTENT_DIR_TWO].
...
```
## How was this patch tested?
Unit tests in `LocalDirsSuite.scala`.
Author: hyukjinkwon <[email protected]>
Closes #17768 from HyukjinKwon/throws-temp-dir-exception.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8c911ada
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8c911ada
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8c911ada
Branch: refs/heads/master
Commit: 8c911adac56a1b1d95bc19915e0070ce7305257c
Parents: 59e3a56
Author: hyukjinkwon <[email protected]>
Authored: Fri Apr 28 08:49:35 2017 +0100
Committer: Sean Owen <[email protected]>
Committed: Fri Apr 28 08:49:35 2017 +0100
----------------------------------------------------------------------
.../scala/org/apache/spark/util/Utils.scala | 6 ++++-
.../apache/spark/storage/LocalDirsSuite.scala | 23 +++++++++++++++++---
2 files changed, 25 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/8c911ada/core/src/main/scala/org/apache/spark/util/Utils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index e042bad..4d37db9 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -740,7 +740,11 @@ private[spark] object Utils extends Logging {
* always return a single directory.
*/
def getLocalDir(conf: SparkConf): String = {
- getOrCreateLocalRootDirs(conf)(0)
+ getOrCreateLocalRootDirs(conf).headOption.getOrElse {
+ val configuredLocalDirs = getConfiguredLocalDirs(conf)
+ throw new IOException(
+ s"Failed to get a temp directory under
[${configuredLocalDirs.mkString(",")}].")
+ }
}
private[spark] def isRunningInYarnContainer(conf: SparkConf): Boolean = {
http://git-wip-us.apache.org/repos/asf/spark/blob/8c911ada/core/src/test/scala/org/apache/spark/storage/LocalDirsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/storage/LocalDirsSuite.scala
b/core/src/test/scala/org/apache/spark/storage/LocalDirsSuite.scala
index c707407..f7b3a27 100644
--- a/core/src/test/scala/org/apache/spark/storage/LocalDirsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/LocalDirsSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.storage
-import java.io.File
+import java.io.{File, IOException}
import org.scalatest.BeforeAndAfter
@@ -33,9 +33,13 @@ class LocalDirsSuite extends SparkFunSuite with
BeforeAndAfter {
Utils.clearLocalRootDirs()
}
+ after {
+ Utils.clearLocalRootDirs()
+ }
+
test("Utils.getLocalDir() returns a valid directory, even if some local dirs
are missing") {
// Regression test for SPARK-2974
- assert(!new File("/NONEXISTENT_DIR").exists())
+ assert(!new File("/NONEXISTENT_PATH").exists())
val conf = new SparkConf(false)
.set("spark.local.dir",
s"/NONEXISTENT_PATH,${System.getProperty("java.io.tmpdir")}")
assert(new File(Utils.getLocalDir(conf)).exists())
@@ -43,7 +47,7 @@ class LocalDirsSuite extends SparkFunSuite with
BeforeAndAfter {
test("SPARK_LOCAL_DIRS override also affects driver") {
// Regression test for SPARK-2975
- assert(!new File("/NONEXISTENT_DIR").exists())
+ assert(!new File("/NONEXISTENT_PATH").exists())
// spark.local.dir only contains invalid directories, but that's not a
problem since
// SPARK_LOCAL_DIRS will override it on both the driver and workers:
val conf = new SparkConfWithEnv(Map("SPARK_LOCAL_DIRS" ->
System.getProperty("java.io.tmpdir")))
@@ -51,4 +55,17 @@ class LocalDirsSuite extends SparkFunSuite with
BeforeAndAfter {
assert(new File(Utils.getLocalDir(conf)).exists())
}
+ test("Utils.getLocalDir() throws an exception if any temporary directory
cannot be retrieved") {
+ val path1 = "/NONEXISTENT_PATH_ONE"
+ val path2 = "/NONEXISTENT_PATH_TWO"
+ assert(!new File(path1).exists())
+ assert(!new File(path2).exists())
+ val conf = new SparkConf(false).set("spark.local.dir", s"$path1,$path2")
+ val message = intercept[IOException] {
+ Utils.getLocalDir(conf)
+ }.getMessage
+ // If any temporary directory could not be retrieved under the given paths
above, it should
+ // throw an exception with the message that includes the paths.
+ assert(message.contains(s"$path1,$path2"))
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]