This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 487e229206 Migrate CNCF system tests to new design AIP-47 (#24224)
487e229206 is described below
commit 487e229206396f8eaf7c933be996e6c0648ab078
Author: Chenglong Yan <[email protected]>
AuthorDate: Sun Jun 5 23:44:52 2022 +0800
Migrate CNCF system tests to new design AIP-47 (#24224)
closes: #22429
related: #22441
---
.../operators/test_spark_kubernetes_system.py | 68 ----------------------
1 file changed, 68 deletions(-)
diff --git
a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes_system.py
b/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes_system.py
deleted file mode 100644
index 6b00c26b62..0000000000
--- a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes_system.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import os
-import subprocess
-
-import pytest
-
-from tests.test_utils import AIRFLOW_MAIN_FOLDER
-from tests.test_utils.system_tests_class import SystemTest
-
-KUBERNETES_DAG_FOLDER = os.path.join(
- AIRFLOW_MAIN_FOLDER, "airflow", "providers", "cncf", "kubernetes",
"example_dags"
-)
-
-SPARK_OPERATOR_VERSION = "v1beta2-1.1.1-2.4.5"
-
-MANIFEST_BASE_URL = (
-
f'https://raw.githubusercontent.com/GoogleCloudPlatform/spark-on-k8s-operator/'
- f'{SPARK_OPERATOR_VERSION}/manifest/'
-)
-
-SPARK_OPERATOR_MANIFESTS = [
- f"{MANIFEST_BASE_URL}crds/sparkoperator.k8s.io_sparkapplications.yaml",
-
f"{MANIFEST_BASE_URL}crds/sparkoperator.k8s.io_scheduledsparkapplications.yaml",
- f"{MANIFEST_BASE_URL}spark-operator-rbac.yaml",
- f"{MANIFEST_BASE_URL}spark-operator.yaml",
- f"{MANIFEST_BASE_URL}spark-rbac.yaml",
-]
-
-
-def kubectl_apply_list(manifests):
- for manifest in manifests:
- command = ['kubectl', 'apply', '-f', manifest]
- subprocess.run(command, check=True)
-
-
-def kubectl_delete_list(manifests):
- for manifest in manifests:
- command = ['kubectl', 'delete', '--ignore-not-found', '-f', manifest]
- subprocess.run(command, check=True)
-
-
[email protected]("cncf.kubernetes")
-class SparkKubernetesExampleDagsSystemTest(SystemTest):
- def setUp(self):
- super().setUp()
- kubectl_apply_list(SPARK_OPERATOR_MANIFESTS)
-
- def tearDown(self):
- super().tearDown()
- kubectl_delete_list(SPARK_OPERATOR_MANIFESTS)
-
- def test_run_example_dag_spark_pi(self):
- self.run_dag('spark_pi', KUBERNETES_DAG_FOLDER)