rvesse commented on a change in pull request #31829:
URL: https://github.com/apache/spark/pull/31829#discussion_r594433527
##########
File path:
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/minikube/Minikube.scala
##########
@@ -16,100 +16,73 @@
*/
package org.apache.spark.deploy.k8s.integrationtest.backend.minikube
-import java.nio.file.{Files, Paths}
-
-import io.fabric8.kubernetes.client.{ConfigBuilder, DefaultKubernetesClient}
+import io.fabric8.kubernetes.client.Config
+import io.fabric8.kubernetes.client.DefaultKubernetesClient
import org.apache.spark.deploy.k8s.integrationtest.ProcessUtils
import org.apache.spark.internal.Logging
// TODO support windows
private[spark] object Minikube extends Logging {
private val MINIKUBE_STARTUP_TIMEOUT_SECONDS = 60
- private val HOST_PREFIX = "host:"
- private val KUBELET_PREFIX = "kubelet:"
- private val APISERVER_PREFIX = "apiserver:"
- private val KUBECTL_PREFIX = "kubectl:"
- private val KUBECONFIG_PREFIX = "kubeconfig:"
+ private val VERSION_PREFIX = "minikube version: "
+ private val HOST_PREFIX = "host: "
+ private val KUBELET_PREFIX = "kubelet: "
+ private val APISERVER_PREFIX = "apiserver: "
+ private val KUBECTL_PREFIX = "kubectl: "
+ private val KUBECONFIG_PREFIX = "kubeconfig: "
private val MINIKUBE_VM_PREFIX = "minikubeVM: "
private val MINIKUBE_PREFIX = "minikube: "
- private val MINIKUBE_PATH = ".minikube"
-
- def logVersion(): Unit = {
- logInfo(executeMinikube("version").mkString("\n"))
- }
- def getMinikubeIp: String = {
- val outputs = executeMinikube("ip")
- .filter(_.matches("^\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}$"))
- assert(outputs.size == 1, "Unexpected amount of output from minikube ip")
- outputs.head
- }
+ lazy val minikubeVersionString =
+ executeMinikube("version").find(_.contains(VERSION_PREFIX)).get
- def getMinikubeStatus: MinikubeStatus.Value = {
- val statusString = executeMinikube("status")
- logInfo(s"Minikube status command output:\n$statusString")
- // up to minikube version v0.30.0 use this to check for minikube status
- val oldMinikube = statusString
- .filter(line => line.contains(MINIKUBE_VM_PREFIX) ||
line.contains(MINIKUBE_PREFIX))
-
- if (oldMinikube.isEmpty) {
- getIfNewMinikubeStatus(statusString)
- } else {
- val finalStatusString = oldMinikube
- .head
- .replaceFirst(MINIKUBE_VM_PREFIX, "")
- .replaceFirst(MINIKUBE_PREFIX, "")
- MinikubeStatus.unapply(finalStatusString)
- .getOrElse(throw new IllegalStateException(s"Unknown status
$statusString"))
- }
- }
+ def logVersion(): Unit =
+ logInfo(minikubeVersionString)
def getKubernetesClient: DefaultKubernetesClient = {
- val kubernetesMaster = s"https://${getMinikubeIp}:8443"
- val userHome = System.getProperty("user.home")
- val minikubeBasePath = Paths.get(userHome, MINIKUBE_PATH).toString
- val profileDir = if (Files.exists(Paths.get(minikubeBasePath,
"apiserver.crt"))) {
- // For Minikube <1.9
- ""
- } else {
- // For Minikube >=1.9
- Paths.get("profiles", executeMinikube("profile")(0)).toString
+ // only the three-part version number is matched (the optional suffix like
"-beta.0" is dropped)
+ val versionArrayOpt = "\\d+\\.\\d+\\.\\d+".r
+ .findFirstIn(minikubeVersionString.split(VERSION_PREFIX)(1))
+ .map(_.split('.').map(_.toInt))
+
+ versionArrayOpt match {
+ case Some(Array(x, y, z)) =>
+ if (Ordering.Tuple3[Int, Int, Int].lt((x, y, z), (1, 7, 3))) {
+ assert(false, s"Unsupported Minikube version is detected:
$minikubeVersionString." +
+ "For integration testing Minikube version 1.7.3 or greater is
expected.")
+ }
+ case _ =>
+ assert(false, s"Unexpected version format detected in
`$minikubeVersionString`." +
+ "For minikube version a three-part version number is expected (the
optional " +
+ "non-numeric suffix is intentionally dropped)")
}
- val apiServerCertPath = Paths.get(minikubeBasePath, profileDir,
"apiserver.crt")
- val apiServerKeyPath = Paths.get(minikubeBasePath, profileDir,
"apiserver.key")
- val kubernetesConf = new ConfigBuilder()
- .withApiVersion("v1")
- .withMasterUrl(kubernetesMaster)
- .withCaCertFile(
- Paths.get(userHome, MINIKUBE_PATH, "ca.crt").toFile.getAbsolutePath)
- .withClientCertFile(apiServerCertPath.toFile.getAbsolutePath)
- .withClientKeyFile(apiServerKeyPath.toFile.getAbsolutePath)
- .build()
- new DefaultKubernetesClient(kubernetesConf)
+
+ new DefaultKubernetesClient(Config.autoConfigure("minikube"))
Review comment:
I'm sure it's probably is the default (though their community will be
best placed to confirm). The scenario I was thinking of was more the one where
users have renamed their contexts to their own preference. I have access to a
lot of R&D clusters in my $dayjob and at times I've had as many as 10+
different contexts in my `KUBECONFIG` so I would rename contexts appropriately
as some tools (GKE I'm looking at you) produce rather user unfriendly names by
default. Not sure I've ever renamed the `minikube` context itself but it's a
thing that users can and might do.
TL;DR If there's a naming assumption present in the code, particularly where
it pertains to user managed configuration files outside of Spark's control, it
should be documented as such.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]