Github user szyszy commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20761#discussion_r223139202
  
    --- Diff: 
resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
 ---
    @@ -199,6 +200,92 @@ class ClientSuite extends SparkFunSuite with Matchers {
         appContext.getMaxAppAttempts should be (42)
       }
     
    +  test("Resource type args propagate, resource type not defined") {
    +    assume(ResourceRequestHelper.isYarnResourceTypesAvailable())
    +    val sparkConf = new SparkConf()
    +      .set(YARN_AM_RESOURCE_TYPES_PREFIX + "some_resource_with_units_1", 
"121m")
    +    val args = new ClientArguments(Array())
    +
    +    val appContext = 
Records.newRecord(classOf[ApplicationSubmissionContext])
    +    val getNewApplicationResponse = 
Records.newRecord(classOf[GetNewApplicationResponse])
    +    val containerLaunchContext = 
Records.newRecord(classOf[ContainerLaunchContext])
    +
    +    val client = new Client(args, sparkConf)
    +
    +    try {
    +      client.createApplicationSubmissionContext(
    +        new YarnClientApplication(getNewApplicationResponse, appContext),
    +        containerLaunchContext)
    +    } catch {
    +      case NonFatal(e) =>
    +        val expectedExceptionClass = 
"org.apache.hadoop.yarn.exceptions.ResourceNotFoundException"
    +        if (e.getClass.getName != expectedExceptionClass) {
    +          fail(s"Exception caught: $e is not an instance of 
$expectedExceptionClass!")
    +        }
    +    }
    +  }
    +
    +  test("Resource type args propagate (client mode)") {
    +    assume(ResourceRequestHelper.isYarnResourceTypesAvailable())
    +    TestYarnResourceRequestHelper.initializeResourceTypes(List("gpu", 
"fpga"))
    +
    +    val sparkConf = new SparkConf()
    +      .set(YARN_AM_RESOURCE_TYPES_PREFIX + "some_resource_with_units_1", 
"121m")
    --- End diff --
    
    Nevermind what I commented about memory-mb. Fixed with latest commit pushed 
moments ago. The translation was only required because the test was wrong. We 
agreed on before that we don't need any translation in spark code from memory 
to memory-mb as memory is a standard resource and should be used as it was used 
to, with the standard spark config.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to