Repository: spark
Updated Branches:
  refs/heads/branch-2.3 a9700cb4a -> fec43fe1b


[SPARK-19613][SS][TEST] Random.nextString is not safe for directory namePrefix

## What changes were proposed in this pull request?

`Random.nextString` is good for generating random string data, but it's not 
proper for directory name prefix in `Utils.createDirectory(tempDir, 
Random.nextString(10))`. This PR uses more safe directory namePrefix.

```scala
scala> scala.util.Random.nextString(10)
res0: String = 馨쭔ᎰႻ穚䃈兩㻞藑並
```

```scala
StateStoreRDDSuite:
- versioning and immutability
- recovering from files
- usage with iterators - only gets and only puts
- preferred locations using StateStoreCoordinator *** FAILED ***
  java.io.IOException: Failed to create a temp directory (under 
/.../spark/sql/core/target/tmp/StateStoreRDDSuite8712796397908632676) after 10 
attempts!
  at org.apache.spark.util.Utils$.createDirectory(Utils.scala:295)
  at 
org.apache.spark.sql.execution.streaming.state.StateStoreRDDSuite$$anonfun$13$$anonfun$apply$6.apply(StateStoreRDDSuite.scala:152)
  at 
org.apache.spark.sql.execution.streaming.state.StateStoreRDDSuite$$anonfun$13$$anonfun$apply$6.apply(StateStoreRDDSuite.scala:149)
  at org.apache.spark.sql.catalyst.util.package$.quietly(package.scala:42)
  at 
org.apache.spark.sql.execution.streaming.state.StateStoreRDDSuite$$anonfun$13.apply(StateStoreRDDSuite.scala:149)
  at 
org.apache.spark.sql.execution.streaming.state.StateStoreRDDSuite$$anonfun$13.apply(StateStoreRDDSuite.scala:149)
...
- distributed test *** FAILED ***
  java.io.IOException: Failed to create a temp directory (under 
/.../spark/sql/core/target/tmp/StateStoreRDDSuite8712796397908632676) after 10 
attempts!
  at org.apache.spark.util.Utils$.createDirectory(Utils.scala:295)
```

## How was this patch tested?

Pass the existing tests.StateStoreRDDSuite:

Author: Dongjoon Hyun <dongj...@apache.org>

Closes #21446 from dongjoon-hyun/SPARK-19613.

(cherry picked from commit b31b587cd091010337378cf448fd598c37757053)
Signed-off-by: hyukjinkwon <gurwls...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/fec43fe1
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/fec43fe1
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/fec43fe1

Branch: refs/heads/branch-2.3
Commit: fec43fe1bcd7553eff077805a13bb301445c62a3
Parents: a9700cb
Author: Dongjoon Hyun <dongj...@apache.org>
Authored: Tue May 29 10:35:30 2018 +0800
Committer: hyukjinkwon <gurwls...@apache.org>
Committed: Tue May 29 10:35:46 2018 +0800

----------------------------------------------------------------------
 .../execution/streaming/state/StateStoreRDDSuite.scala    | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/fec43fe1/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreRDDSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreRDDSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreRDDSuite.scala
index 65b39f0..579a364 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreRDDSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreRDDSuite.scala
@@ -55,7 +55,7 @@ class StateStoreRDDSuite extends SparkFunSuite with 
BeforeAndAfter with BeforeAn
 
   test("versioning and immutability") {
     withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { 
spark =>
-      val path = Utils.createDirectory(tempDir, Random.nextString(10)).toString
+      val path = Utils.createDirectory(tempDir, 
Random.nextFloat.toString).toString
       val rdd1 = makeRDD(spark.sparkContext, Seq("a", "b", 
"a")).mapPartitionsWithStateStore(
             spark.sqlContext, operatorStateInfo(path, version = 0), keySchema, 
valueSchema, None)(
             increment)
@@ -73,7 +73,7 @@ class StateStoreRDDSuite extends SparkFunSuite with 
BeforeAndAfter with BeforeAn
   }
 
   test("recovering from files") {
-    val path = Utils.createDirectory(tempDir, Random.nextString(10)).toString
+    val path = Utils.createDirectory(tempDir, 
Random.nextFloat.toString).toString
 
     def makeStoreRDD(
         spark: SparkSession,
@@ -101,7 +101,7 @@ class StateStoreRDDSuite extends SparkFunSuite with 
BeforeAndAfter with BeforeAn
   test("usage with iterators - only gets and only puts") {
     withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { 
spark =>
       implicit val sqlContext = spark.sqlContext
-      val path = Utils.createDirectory(tempDir, Random.nextString(10)).toString
+      val path = Utils.createDirectory(tempDir, 
Random.nextFloat.toString).toString
       val opId = 0
 
       // Returns an iterator of the incremented value made into the store
@@ -149,7 +149,7 @@ class StateStoreRDDSuite extends SparkFunSuite with 
BeforeAndAfter with BeforeAn
     quietly {
       val queryRunId = UUID.randomUUID
       val opId = 0
-      val path = Utils.createDirectory(tempDir, Random.nextString(10)).toString
+      val path = Utils.createDirectory(tempDir, 
Random.nextFloat.toString).toString
 
       withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { 
spark =>
         implicit val sqlContext = spark.sqlContext
@@ -189,7 +189,7 @@ class StateStoreRDDSuite extends SparkFunSuite with 
BeforeAndAfter with BeforeAn
           .config(sparkConf.setMaster("local-cluster[2, 1, 1024]"))
           .getOrCreate()) { spark =>
         implicit val sqlContext = spark.sqlContext
-        val path = Utils.createDirectory(tempDir, 
Random.nextString(10)).toString
+        val path = Utils.createDirectory(tempDir, 
Random.nextFloat.toString).toString
         val opId = 0
         val rdd1 = makeRDD(spark.sparkContext, Seq("a", "b", 
"a")).mapPartitionsWithStateStore(
           sqlContext, operatorStateInfo(path, version = 0), keySchema, 
valueSchema, None)(increment)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to