Github user andrewor14 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3233#discussion_r24306647
  
    --- Diff: 
yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala ---
    @@ -117,99 +116,149 @@ class YarnClusterSuite extends FunSuite with 
BeforeAndAfterAll with Matchers wit
         }
     
         logInfo(s"RM address in configuration is 
${config.get(YarnConfiguration.RM_ADDRESS)}")
    -    config.foreach { e =>
    -      sys.props += ("spark.hadoop." + e.getKey() -> e.getValue())
    -    }
     
         fakeSparkJar = File.createTempFile("sparkJar", null, tempDir)
    -    val sparkHome = sys.props.getOrElse("spark.test.home", 
fail("spark.test.home is not set!"))
    -    sys.props += ("spark.yarn.appMasterEnv.SPARK_HOME" ->  sparkHome)
    -    sys.props += ("spark.executorEnv.SPARK_HOME" -> sparkHome)
    -    sys.props += ("spark.yarn.jar" -> ("local:" + 
fakeSparkJar.getAbsolutePath()))
    -    sys.props += ("spark.executor.instances" -> "1")
    -    sys.props += ("spark.driver.extraClassPath" -> childClasspath)
    -    sys.props += ("spark.executor.extraClassPath" -> childClasspath)
     
         super.beforeAll()
       }
     
       override def afterAll() {
         yarnCluster.stop()
    -    sys.props.retain { case (k, v) => !k.startsWith("spark.") }
    -    sys.props ++= oldConf
         super.afterAll()
       }
     
       test("run Spark in yarn-client mode") {
    -    var result = File.createTempFile("result", null, tempDir)
    -    YarnClusterDriver.main(Array("yarn-client", result.getAbsolutePath()))
    -    checkResult(result)
    -
    -    // verify log urls are present
    -    YarnClusterDriver.listener.addedExecutorInfos.values.foreach { info =>
    -      assert(info.logUrlMap.nonEmpty)
    -    }
    +    testBasicYarnApp(true)
       }
     
       test("run Spark in yarn-cluster mode") {
    -    val main = YarnClusterDriver.getClass.getName().stripSuffix("$")
    -    var result = File.createTempFile("result", null, tempDir)
    -
    -    val args = Array("--class", main,
    -      "--jar", "file:" + fakeSparkJar.getAbsolutePath(),
    -      "--arg", "yarn-cluster",
    -      "--arg", result.getAbsolutePath(),
    -      "--num-executors", "1")
    -    Client.main(args)
    -    checkResult(result)
    -
    -    // verify log urls are present.
    -    YarnClusterDriver.listener.addedExecutorInfos.values.foreach { info =>
    -      assert(info.logUrlMap.nonEmpty)
    -    }
    +    testBasicYarnApp(false)
       }
     
       test("run Spark in yarn-cluster mode unsuccessfully") {
    -    val main = YarnClusterDriver.getClass.getName().stripSuffix("$")
    -
    -    // Use only one argument so the driver will fail
    -    val args = Array("--class", main,
    -      "--jar", "file:" + fakeSparkJar.getAbsolutePath(),
    -      "--arg", "yarn-cluster",
    -      "--num-executors", "1")
    +    // Don't provide arguments so the driver will fail.
         val exception = intercept[SparkException] {
    -      Client.main(args)
    +      runSpark(false, mainClassName(YarnClusterDriver.getClass))
    +      fail("Spark application should have failed.")
         }
    -    assert(Utils.exceptionString(exception).contains("Application finished 
with failed status"))
       }
     
       test("run Python application in yarn-cluster mode") {
         val primaryPyFile = new File(tempDir, "test.py")
    -    Files.write(TEST_PYFILE, primaryPyFile, Charsets.UTF_8)
    +    Files.write(TEST_PYFILE, primaryPyFile, UTF_8)
         val pyFile = new File(tempDir, "test2.py")
    -    Files.write(TEST_PYFILE, pyFile, Charsets.UTF_8)
    +    Files.write(TEST_PYFILE, pyFile, UTF_8)
         var result = File.createTempFile("result", null, tempDir)
     
    -    val args = Array("--class", "org.apache.spark.deploy.PythonRunner",
    -      "--primary-py-file", primaryPyFile.getAbsolutePath(),
    -      "--py-files", pyFile.getAbsolutePath(),
    -      "--arg", "yarn-cluster",
    -      "--arg", result.getAbsolutePath(),
    -      "--name", "python test in yarn-cluster mode",
    -      "--num-executors", "1")
    -    Client.main(args)
    +    runSpark(false, primaryPyFile.getAbsolutePath(),
    +      sparkArgs = Seq("--py-files", pyFile.getAbsolutePath()),
    +      appArgs = Seq(result.getAbsolutePath()))
         checkResult(result)
       }
     
    +  test("user class path first in client mode") {
    +    testUseClassPathFirst(true)
    +  }
    +
    +  test("user class path first in cluster mode") {
    +    testUseClassPathFirst(false)
    +  }
    +
    +  private def testBasicYarnApp(clientMode: Boolean) = {
    +    var result = File.createTempFile("result", null, tempDir)
    +    runSpark(clientMode, mainClassName(YarnClusterDriver.getClass),
    +      appArgs = Seq(result.getAbsolutePath()))
    +    checkResult(result)
    +  }
    +
    +  private def testUseClassPathFirst(clientMode: Boolean) = {
    --- End diff --
    
    these all seem to return `Unit`. It would be good if you could add the 
return types here


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to