Github user szyszy commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20761#discussion_r222894728
  
    --- Diff: 
resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
 ---
    @@ -199,6 +200,92 @@ class ClientSuite extends SparkFunSuite with Matchers {
         appContext.getMaxAppAttempts should be (42)
       }
     
    +  test("Resource type args propagate, resource type not defined") {
    +    assume(ResourceRequestHelper.isYarnResourceTypesAvailable())
    +    val sparkConf = new SparkConf()
    +      .set(YARN_AM_RESOURCE_TYPES_PREFIX + "some_resource_with_units_1", 
"121m")
    +    val args = new ClientArguments(Array())
    +
    +    val appContext = 
Records.newRecord(classOf[ApplicationSubmissionContext])
    +    val getNewApplicationResponse = 
Records.newRecord(classOf[GetNewApplicationResponse])
    +    val containerLaunchContext = 
Records.newRecord(classOf[ContainerLaunchContext])
    +
    +    val client = new Client(args, sparkConf)
    +
    +    try {
    +      client.createApplicationSubmissionContext(
    +        new YarnClientApplication(getNewApplicationResponse, appContext),
    +        containerLaunchContext)
    +    } catch {
    +      case NonFatal(e) =>
    +        val expectedExceptionClass = 
"org.apache.hadoop.yarn.exceptions.ResourceNotFoundException"
    +        if (e.getClass.getName != expectedExceptionClass) {
    +          fail(s"Exception caught: $e is not an instance of 
$expectedExceptionClass!")
    +        }
    +    }
    +  }
    +
    +  test("Resource type args propagate (client mode)") {
    +    assume(ResourceRequestHelper.isYarnResourceTypesAvailable())
    +    TestYarnResourceRequestHelper.initializeResourceTypes(List("gpu", 
"fpga"))
    +
    +    val sparkConf = new SparkConf()
    +      .set(YARN_AM_RESOURCE_TYPES_PREFIX + "some_resource_with_units_1", 
"121m")
    --- End diff --
    
    Actually, these two tests were indeed failing. Forgot to run them lately 
and jenkins does not run them because the Hadoop version is not > 3.1
    This also pointed to another issue: in 
`ResourceRequestHelper.setResourceRequests`, the `setResourceInformationMethod` 
was invoked with value `"memory` which is not accepted by the YARN API so I had 
to translate it to `"memory-mb"` in order to work correctly.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to