Github user JoshRosen commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3670#discussion_r23972698
  
    --- Diff: core/src/test/scala/org/apache/spark/SparkContextSuite.scala ---
    @@ -72,4 +77,85 @@ class SparkContextSuite extends FunSuite with 
LocalSparkContext {
         val byteArray2 = converter.convert(bytesWritable)
         assert(byteArray2.length === 0)
       }
    +
    +  test("addFile works") {
    +    val file = new File("somefile")
    +    val absolutePath = file.getAbsolutePath
    +    try {
    +      Files.write("somewords", file, UTF_8)
    +      val length = file.length()
    +      sc = new SparkContext(new 
SparkConf().setAppName("test").setMaster("local"))
    +      sc.addFile(file.getAbsolutePath)
    +      sc.parallelize(Array(1), 1).map(x => {
    +        val gotten = new File(SparkFiles.get(file.getName))
    +        if (!gotten.exists()) {
    +          throw new SparkException("file doesn't exist")
    +        }
    +        if (length != gotten.length()) {
    +          throw new SparkException(
    +            s"file has different length $length than added file 
${gotten.length()}")
    +        }
    +        if (absolutePath == gotten.getAbsolutePath) {
    +          throw new SparkException("file should have been copied")
    +        }
    +        x
    +      }).count()
    +    } finally {
    +      sc.stop()
    +      file.delete()
    +    }
    +  }
    +
    +  test("addFile recursive works") {
    +    val pluto = new File("pluto")
    +    val neptune = new File(pluto, "neptune")
    +    val saturn = new File(neptune, "saturn")
    +    val alien1 = new File(neptune, "alien1")
    +    val alien2 = new File(saturn, "alien2")
    +
    +    try {
    +      assert(neptune.mkdirs())
    +      assert(saturn.mkdir())
    +      assert(alien1.createNewFile())
    +      assert(alien2.createNewFile())
    +
    +      sc = new SparkContext(new 
SparkConf().setAppName("test").setMaster("local"))
    +      sc.addFile(neptune.getAbsolutePath, true)
    +      sc.parallelize(Array(1), 1).map(x => {
    +        val sep = File.separator
    +        if (!new File(SparkFiles.get("neptune" + sep + 
"alien1")).exists()) {
    +          throw new SparkException("can't access file under root added 
directory")
    +        }
    +        if (!new File(SparkFiles.get("neptune" + sep + "saturn" + sep + 
"alien2")).exists()) {
    +          throw new SparkException("can't access file in nested directory")
    +        }
    +        if (new File(SparkFiles.get("pluto" + sep + "neptune" + sep + 
"alien1")).exists()) {
    +          throw new SparkException("file exists that shouldn't")
    +        }
    +        x
    +      }).count()
    +    } finally {
    +      sc.stop()
    +      alien2.delete()
    +      saturn.delete()
    +      alien1.delete()
    +      neptune.delete()
    +      pluto.delete()
    +    }
    +  }
    +
    +  test("addFile recursive can't add directories by default") {
    +    val dir = new File("dir")
    +
    +    try {
    +      sc = new SparkContext(new 
SparkConf().setAppName("test").setMaster("local"))
    +      sc.addFile(dir.getAbsolutePath)
    +      assert(false, "should have thrown exception")
    --- End diff --
    
    ScalaTest's `intercept` is the idiomatic way to test that expected 
exceptions are thrown.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to