Github user skonto commented on a diff in the pull request:
https://github.com/apache/spark/pull/21652#discussion_r201358932
--- Diff:
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala
---
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.deploy.k8s.integrationtest
+
+import io.fabric8.kubernetes.api.model.Pod
+
+private[spark] trait BasicTestsSuite { k8sSuite: KubernetesSuite =>
+
+ test("Run SparkPi with no resources.") {
+ runSparkPiAndVerifyCompletion()
+ }
+
+ test("Run SparkPi with a very long application name.") {
+ sparkAppConf.set("spark.app.name", "long" * 40)
+ runSparkPiAndVerifyCompletion()
+ }
+
+ test("Run SparkPi with a master URL without a scheme.") {
+ val url = kubernetesTestComponents.kubernetesClient.getMasterUrl
+ val k8sMasterUrl = if (url.getPort < 0) {
+ s"k8s://${url.getHost}"
+ } else {
+ s"k8s://${url.getHost}:${url.getPort}"
+ }
+ sparkAppConf.set("spark.master", k8sMasterUrl)
+ runSparkPiAndVerifyCompletion()
+ }
+
+ test("Run SparkPi with an argument.") {
+ runSparkPiAndVerifyCompletion(appArgs = Array("5"))
+ }
+
+ test("Run SparkPi with custom labels, annotations, and environment
variables.") {
+ sparkAppConf
+ .set("spark.kubernetes.driver.label.label1", "label1-value")
+ .set("spark.kubernetes.driver.label.label2", "label2-value")
+ .set("spark.kubernetes.driver.annotation.annotation1",
"annotation1-value")
+ .set("spark.kubernetes.driver.annotation.annotation2",
"annotation2-value")
+ .set("spark.kubernetes.driverEnv.ENV1", "VALUE1")
+ .set("spark.kubernetes.driverEnv.ENV2", "VALUE2")
+ .set("spark.kubernetes.executor.label.label1", "label1-value")
+ .set("spark.kubernetes.executor.label.label2", "label2-value")
+ .set("spark.kubernetes.executor.annotation.annotation1",
"annotation1-value")
+ .set("spark.kubernetes.executor.annotation.annotation2",
"annotation2-value")
+ .set("spark.executorEnv.ENV1", "VALUE1")
+ .set("spark.executorEnv.ENV2", "VALUE2")
+
+ runSparkPiAndVerifyCompletion(
+ driverPodChecker = (driverPod: Pod) => {
+ doBasicDriverPodCheck(driverPod)
+ checkCustomSettings(driverPod)
+ },
+ executorPodChecker = (executorPod: Pod) => {
+ doBasicExecutorPodCheck(executorPod)
+ checkCustomSettings(executorPod)
+ })
+ }
+
+ // TODO(ssuchter): Enable the below after debugging
+ // test("Run PageRank using remote data file") {
--- End diff --
yeah good idea as well, can try that.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]