Github user szyszy commented on a diff in the pull request: https://github.com/apache/spark/pull/20761#discussion_r222895698 --- Diff: resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala --- @@ -199,6 +200,92 @@ class ClientSuite extends SparkFunSuite with Matchers { appContext.getMaxAppAttempts should be (42) } + test("Resource type args propagate, resource type not defined") { + assume(ResourceRequestHelper.isYarnResourceTypesAvailable()) + val sparkConf = new SparkConf() + .set(YARN_AM_RESOURCE_TYPES_PREFIX + "some_resource_with_units_1", "121m") + val args = new ClientArguments(Array()) + + val appContext = Records.newRecord(classOf[ApplicationSubmissionContext]) + val getNewApplicationResponse = Records.newRecord(classOf[GetNewApplicationResponse]) + val containerLaunchContext = Records.newRecord(classOf[ContainerLaunchContext]) + + val client = new Client(args, sparkConf) + + try { + client.createApplicationSubmissionContext( + new YarnClientApplication(getNewApplicationResponse, appContext), + containerLaunchContext) + } catch { + case NonFatal(e) => + val expectedExceptionClass = "org.apache.hadoop.yarn.exceptions.ResourceNotFoundException" + if (e.getClass.getName != expectedExceptionClass) { + fail(s"Exception caught: $e is not an instance of $expectedExceptionClass!") + } + } + } + + test("Resource type args propagate (client mode)") { + assume(ResourceRequestHelper.isYarnResourceTypesAvailable()) + TestYarnResourceRequestHelper.initializeResourceTypes(List("gpu", "fpga")) + + val sparkConf = new SparkConf() + .set(YARN_AM_RESOURCE_TYPES_PREFIX + "some_resource_with_units_1", "121m") + .set(YARN_DRIVER_RESOURCE_TYPES_PREFIX + "some_resource_with_units_1", "122m") + .set(YARN_AM_RESOURCE_TYPES_PREFIX + "fpga", "222m") + .set(YARN_DRIVER_RESOURCE_TYPES_PREFIX + "fpga", "223m") + .set(YARN_AM_RESOURCE_TYPES_PREFIX + "memory", "1G") + .set(YARN_DRIVER_RESOURCE_TYPES_PREFIX + "memory", "2G") + val args = new ClientArguments(Array()) + + val appContext = Records.newRecord(classOf[ApplicationSubmissionContext]) + val getNewApplicationResponse = Records.newRecord(classOf[GetNewApplicationResponse]) + val containerLaunchContext = Records.newRecord(classOf[ContainerLaunchContext]) + + val client = new Client(args, sparkConf) + client.createApplicationSubmissionContext( + new YarnClientApplication(getNewApplicationResponse, appContext), + containerLaunchContext) + + appContext.getAMContainerSpec should be (containerLaunchContext) + appContext.getApplicationType should be ("SPARK") + TestYarnResourceRequestHelper.getResourceTypeValue(appContext.getResource, + "some_resource_with_units_1") should be (121) + TestYarnResourceRequestHelper + .getResourceTypeValue(appContext.getResource, "fpga") should be (222) + } + + test("configuration and resource type args propagate (cluster mode)") { --- End diff -- Indeed, extracted a common method, passing the expected values as a sequence of tuples.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org