Github user ssuchter commented on a diff in the pull request:
https://github.com/apache/spark/pull/20697#discussion_r192488928
--- Diff:
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala
---
@@ -0,0 +1,231 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.deploy.k8s.integrationtest
+
+import java.io.File
+import java.nio.file.{Path, Paths}
+import java.util.UUID
+import java.util.regex.Pattern
+
+import scala.collection.JavaConverters._
+
+import com.google.common.io.PatternFilenameFilter
+import io.fabric8.kubernetes.api.model.{Container, Pod}
+import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
+import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
+import org.scalatest.time.{Minutes, Seconds, Span}
+
+import org.apache.spark.SparkFunSuite
+import
org.apache.spark.deploy.k8s.integrationtest.backend.{IntegrationTestBackend,
IntegrationTestBackendFactory}
+import org.apache.spark.deploy.k8s.integrationtest.config._
+
+private[spark] class KubernetesSuite extends SparkFunSuite
+ with BeforeAndAfterAll with BeforeAndAfter {
+
+ import KubernetesSuite._
+
+ private var testBackend: IntegrationTestBackend = _
+ private var sparkHomeDir: Path = _
+ private var kubernetesTestComponents: KubernetesTestComponents = _
+ private var sparkAppConf: SparkAppConf = _
+ private var image: String = _
+ private var containerLocalSparkDistroExamplesJar: String = _
+ private var appLocator: String = _
+ private var driverPodName: String = _
+
+ override def beforeAll(): Unit = {
+ // The scalatest-maven-plugin gives system properties that are
referenced but not set null
+ // values. We need to remove the null-value properties before
initializing the test backend.
+ val nullValueProperties = System.getProperties.asScala
+ .filter(entry => entry._2.equals("null"))
+ .map(entry => entry._1.toString)
+ nullValueProperties.foreach { key =>
+ System.clearProperty(key)
+ }
+
+ val sparkDirProp =
System.getProperty("spark.kubernetes.test.unpackSparkDir")
+ require(sparkDirProp != null, "Spark home directory must be provided
in system properties.")
+ sparkHomeDir = Paths.get(sparkDirProp)
+ require(sparkHomeDir.toFile.isDirectory,
+ s"No directory found for spark home specified at $sparkHomeDir.")
+ val imageTag = getTestImageTag
+ val imageRepo = getTestImageRepo
+ image = s"$imageRepo/spark:$imageTag"
+
+ val sparkDistroExamplesJarFile: File =
sparkHomeDir.resolve(Paths.get("examples", "jars"))
+ .toFile
+ .listFiles(new
PatternFilenameFilter(Pattern.compile("^spark-examples_.*\\.jar$")))(0)
+ containerLocalSparkDistroExamplesJar =
s"local:///opt/spark/examples/jars/" +
+ s"${sparkDistroExamplesJarFile.getName}"
+ testBackend = IntegrationTestBackendFactory.getTestBackend
+ testBackend.initialize()
+ kubernetesTestComponents = new
KubernetesTestComponents(testBackend.getKubernetesClient)
+ }
+
+ override def afterAll(): Unit = {
+ testBackend.cleanUp()
+ }
+
+ before {
+ appLocator = UUID.randomUUID().toString.replaceAll("-", "")
+ driverPodName = "spark-test-app-" +
UUID.randomUUID().toString.replaceAll("-", "")
+ sparkAppConf = kubernetesTestComponents.newSparkAppConf()
+ .set("spark.kubernetes.container.image", image)
+ .set("spark.kubernetes.driver.pod.name", driverPodName)
+ .set("spark.kubernetes.driver.label.spark-app-locator", appLocator)
+ .set("spark.kubernetes.executor.label.spark-app-locator", appLocator)
+ if (!kubernetesTestComponents.hasUserSpecifiedNamespace) {
+ kubernetesTestComponents.createNamespace()
+ }
+ }
+
+ after {
+ if (!kubernetesTestComponents.hasUserSpecifiedNamespace) {
+ kubernetesTestComponents.deleteNamespace()
+ }
+ deleteDriverPod()
+ }
+
+ test("Run SparkPi with no resources") {
--- End diff --
I added in the non-Kerberos tests, but one of them (remote url fetching) is
failing. I commented it out, but I intend to debug it before this submission
and uncomment it.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]