This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 88a284dd59 Do not assume that `sys.path` always changed in `pytest` 
(#38612)
88a284dd59 is described below

commit 88a284dd59adfa3c064c6e0d2ed030dc7e6e6c0e
Author: Andrey Anshin <andrey.ans...@taragol.is>
AuthorDate: Fri Mar 29 17:22:14 2024 +0400

    Do not assume that `sys.path` always changed in `pytest` (#38612)
    
    * Do not assume that `sys.path` always changed in `pytest`
    
    * Fixup remaining test
    
    * Make sure all data files exists
    
    * Update kubernetes_tests/conftest.py
    
    Co-authored-by: Jarek Potiuk <ja...@potiuk.com>
    
    * Also remove absolute in the other place
    
    ---------
    
    Co-authored-by: Jarek Potiuk <ja...@potiuk.com>
---
 .../kubernetes => kubernetes_tests}/basic_pod.yaml |  0
 kubernetes_tests/conftest.py                       | 14 +++++
 .../cncf/kubernetes => kubernetes_tests}/pod.yaml  |  0
 kubernetes_tests/test_kubernetes_pod_operator.py   | 22 ++++----
 .../providers/cncf/kubernetes}/conftest.py         | 22 ++++++--
 .../__init__.py                                    |  0
 .../executor}/__init__.py                          |  0
 .../executor}/basic_template.yaml                  |  4 +-
 .../cncf/kubernetes/{ => data_files}/kube_config   |  0
 .../pods}/__init__.py                              |  0
 .../pods/generator_base.yaml}                      |  0
 .../pods/generator_base_with_secrets.yaml}         |  0
 .../{pod.yaml => data_files/pods/template.yaml}    |  0
 .../spark}/__init__.py                             |  0
 .../spark/application_template.yaml}               |  0
 .../spark/application_test.json}                   |  0
 .../spark/application_test.yaml}                   |  0
 .../executors/test_kubernetes_executor.py          | 48 ++++++++--------
 .../cncf/kubernetes/models/test_secret.py          |  7 +--
 .../kubernetes/operators/test_spark_kubernetes.py  | 37 +++++++-----
 .../cncf/kubernetes/test_pod_generator.py          | 65 +++++++++++-----------
 21 files changed, 124 insertions(+), 95 deletions(-)

diff --git a/tests/providers/cncf/kubernetes/basic_pod.yaml 
b/kubernetes_tests/basic_pod.yaml
similarity index 100%
rename from tests/providers/cncf/kubernetes/basic_pod.yaml
rename to kubernetes_tests/basic_pod.yaml
diff --git a/kubernetes_tests/conftest.py b/kubernetes_tests/conftest.py
index 3c861d00ba..1a78383f4b 100644
--- a/kubernetes_tests/conftest.py
+++ b/kubernetes_tests/conftest.py
@@ -16,11 +16,25 @@
 # under the License.
 from __future__ import annotations
 
+from pathlib import Path
+
 import pytest
 
+DATA_FILES_DIRECTORY = Path(__file__).resolve().parent
+
 
 @pytest.fixture(autouse=True)
 def initialize_providers_manager():
     from airflow.providers_manager import ProvidersManager
 
     ProvidersManager().initialize_providers_configuration()
+
+
+@pytest.fixture
+def pod_template() -> Path:
+    return (DATA_FILES_DIRECTORY / "pod.yaml").resolve(strict=True)
+
+
+@pytest.fixture
+def basic_pod_template() -> Path:
+    return (DATA_FILES_DIRECTORY / "basic_pod.yaml").resolve(strict=True)
diff --git a/tests/providers/cncf/kubernetes/pod.yaml 
b/kubernetes_tests/pod.yaml
similarity index 100%
copy from tests/providers/cncf/kubernetes/pod.yaml
copy to kubernetes_tests/pod.yaml
diff --git a/kubernetes_tests/test_kubernetes_pod_operator.py 
b/kubernetes_tests/test_kubernetes_pod_operator.py
index 3ca78340b2..82bc9a5906 100644
--- a/kubernetes_tests/test_kubernetes_pod_operator.py
+++ b/kubernetes_tests/test_kubernetes_pod_operator.py
@@ -20,7 +20,6 @@ import json
 import logging
 import os
 import shutil
-import sys
 from copy import copy
 from unittest import mock
 from unittest.mock import ANY, MagicMock
@@ -724,14 +723,13 @@ class TestKubernetesPodOperatorSystem:
         ]
         assert self.expected_pod == actual_pod
 
-    def test_pod_template_file_system(self, mock_get_connection):
+    def test_pod_template_file_system(self, mock_get_connection, 
basic_pod_template):
         """Note: this test requires that you have a namespace ``mem-example`` 
in your cluster."""
-        fixture = sys.path[0] + 
"/tests/providers/cncf/kubernetes/basic_pod.yaml"
         k = KubernetesPodOperator(
             task_id=str(uuid4()),
             in_cluster=False,
             labels=self.labels,
-            pod_template_file=fixture,
+            pod_template_file=basic_pod_template.as_posix(),
             do_xcom_push=True,
         )
 
@@ -747,14 +745,15 @@ class TestKubernetesPodOperatorSystem:
             pytest.param({"env_name": "value"}, id="backcompat"),  # todo: 
remove?
         ],
     )
-    def test_pod_template_file_with_overrides_system(self, env_vars, 
test_label, mock_get_connection):
-        fixture = sys.path[0] + 
"/tests/providers/cncf/kubernetes/basic_pod.yaml"
+    def test_pod_template_file_with_overrides_system(
+        self, env_vars, test_label, mock_get_connection, basic_pod_template
+    ):
         k = KubernetesPodOperator(
             task_id=str(uuid4()),
             labels=self.labels,
             env_vars=env_vars,
             in_cluster=False,
-            pod_template_file=fixture,
+            pod_template_file=basic_pod_template.as_posix(),
             do_xcom_push=True,
         )
 
@@ -774,8 +773,7 @@ class TestKubernetesPodOperatorSystem:
         assert k.pod.spec.containers[0].env == [k8s.V1EnvVar(name="env_name", 
value="value")]
         assert result == {"hello": "world"}
 
-    def test_pod_template_file_with_full_pod_spec(self, test_label, 
mock_get_connection):
-        fixture = sys.path[0] + 
"/tests/providers/cncf/kubernetes/basic_pod.yaml"
+    def test_pod_template_file_with_full_pod_spec(self, test_label, 
mock_get_connection, basic_pod_template):
         pod_spec = k8s.V1Pod(
             metadata=k8s.V1ObjectMeta(
                 labels={"test_label": test_label, "fizz": "buzz"},
@@ -793,7 +791,7 @@ class TestKubernetesPodOperatorSystem:
             task_id=str(uuid4()),
             labels=self.labels,
             in_cluster=False,
-            pod_template_file=fixture,
+            pod_template_file=basic_pod_template.as_posix(),
             full_pod_spec=pod_spec,
             do_xcom_push=True,
         )
@@ -927,6 +925,7 @@ class TestKubernetesPodOperatorSystem:
         await_xcom_sidecar_container_start_mock,
         caplog,
         test_label,
+        pod_template,
     ):
         # todo: This isn't really a system test
         await_xcom_sidecar_container_start_mock.return_value = None
@@ -935,12 +934,11 @@ class TestKubernetesPodOperatorSystem:
         
hook_mock.return_value.get_xcom_sidecar_container_resources.return_value = None
         hook_mock.return_value.get_connection.return_value = 
Connection(conn_id="kubernetes_default")
         extract_xcom_mock.return_value = "{}"
-        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod.yaml"
         k = KubernetesPodOperator(
             task_id=str(uuid4()),
             labels=self.labels,
             random_name_suffix=False,
-            pod_template_file=path,
+            pod_template_file=pod_template.as_posix(),
             do_xcom_push=True,
         )
         pod_mock = MagicMock()
diff --git a/kubernetes_tests/conftest.py 
b/tests/providers/cncf/kubernetes/conftest.py
similarity index 55%
copy from kubernetes_tests/conftest.py
copy to tests/providers/cncf/kubernetes/conftest.py
index 3c861d00ba..38a94c77ea 100644
--- a/kubernetes_tests/conftest.py
+++ b/tests/providers/cncf/kubernetes/conftest.py
@@ -14,13 +14,27 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from __future__ import annotations
 
+from pathlib import Path
+
 import pytest
 
+DATA_FILE_DIRECTORY = Path(__file__).resolve().parent / "data_files"
+
+
+@pytest.fixture
+def data_file():
+    """Helper fixture for obtain data file from data directory."""
+    if not DATA_FILE_DIRECTORY.exists():
+        msg = f"Data Directory {DATA_FILE_DIRECTORY.as_posix()!r} does not 
exist."
+        raise FileNotFoundError(msg)
+    elif not DATA_FILE_DIRECTORY.is_dir():
+        msg = f"Data Directory {DATA_FILE_DIRECTORY.as_posix()!r} expected to 
be a directory."
+        raise NotADirectoryError(msg)
 
-@pytest.fixture(autouse=True)
-def initialize_providers_manager():
-    from airflow.providers_manager import ProvidersManager
+    def wrapper(filepath: str | Path) -> Path:
+        return DATA_FILE_DIRECTORY.joinpath(filepath).resolve(strict=True)
 
-    ProvidersManager().initialize_providers_configuration()
+    return wrapper
diff --git 
a/tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/__init__.py
 b/tests/providers/cncf/kubernetes/data_files/__init__.py
similarity index 100%
copy from 
tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/__init__.py
copy to tests/providers/cncf/kubernetes/data_files/__init__.py
diff --git 
a/tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/__init__.py
 b/tests/providers/cncf/kubernetes/data_files/executor/__init__.py
similarity index 100%
copy from 
tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/__init__.py
copy to tests/providers/cncf/kubernetes/data_files/executor/__init__.py
diff --git 
a/tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/basic_template.yaml
 b/tests/providers/cncf/kubernetes/data_files/executor/basic_template.yaml
similarity index 94%
rename from 
tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/basic_template.yaml
rename to 
tests/providers/cncf/kubernetes/data_files/executor/basic_template.yaml
index 1fb00f2ddf..e5348034c1 100644
--- 
a/tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/basic_template.yaml
+++ b/tests/providers/cncf/kubernetes/data_files/executor/basic_template.yaml
@@ -27,8 +27,8 @@ spec:
     - name: base
       image: dummy-name-dont-delete
   securityContext:
-    runAsUser: 50000
+    runAsUser: 50001
     fsGroup: 50000
   imagePullSecrets:
-    - name: airflow-registry
+    - name: all-right-then-keep-your-secrets
   schedulerName: default-scheduler
diff --git a/tests/providers/cncf/kubernetes/kube_config 
b/tests/providers/cncf/kubernetes/data_files/kube_config
similarity index 100%
rename from tests/providers/cncf/kubernetes/kube_config
rename to tests/providers/cncf/kubernetes/data_files/kube_config
diff --git 
a/tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/__init__.py
 b/tests/providers/cncf/kubernetes/data_files/pods/__init__.py
similarity index 100%
copy from 
tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/__init__.py
copy to tests/providers/cncf/kubernetes/data_files/pods/__init__.py
diff --git a/tests/providers/cncf/kubernetes/pod_generator_base.yaml 
b/tests/providers/cncf/kubernetes/data_files/pods/generator_base.yaml
similarity index 100%
rename from tests/providers/cncf/kubernetes/pod_generator_base.yaml
rename to tests/providers/cncf/kubernetes/data_files/pods/generator_base.yaml
diff --git 
a/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml 
b/tests/providers/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml
similarity index 100%
rename from tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml
rename to 
tests/providers/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml
diff --git a/tests/providers/cncf/kubernetes/pod.yaml 
b/tests/providers/cncf/kubernetes/data_files/pods/template.yaml
similarity index 100%
rename from tests/providers/cncf/kubernetes/pod.yaml
rename to tests/providers/cncf/kubernetes/data_files/pods/template.yaml
diff --git 
a/tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/__init__.py
 b/tests/providers/cncf/kubernetes/data_files/spark/__init__.py
similarity index 100%
rename from 
tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/__init__.py
rename to tests/providers/cncf/kubernetes/data_files/spark/__init__.py
diff --git 
a/tests/providers/cncf/kubernetes/operators/spark_application_template.yaml 
b/tests/providers/cncf/kubernetes/data_files/spark/application_template.yaml
similarity index 100%
rename from 
tests/providers/cncf/kubernetes/operators/spark_application_template.yaml
rename to 
tests/providers/cncf/kubernetes/data_files/spark/application_template.yaml
diff --git 
a/tests/providers/cncf/kubernetes/operators/spark_application_test.json 
b/tests/providers/cncf/kubernetes/data_files/spark/application_test.json
similarity index 100%
rename from 
tests/providers/cncf/kubernetes/operators/spark_application_test.json
rename to tests/providers/cncf/kubernetes/data_files/spark/application_test.json
diff --git 
a/tests/providers/cncf/kubernetes/operators/spark_application_test.yaml 
b/tests/providers/cncf/kubernetes/data_files/spark/application_test.yaml
similarity index 100%
rename from 
tests/providers/cncf/kubernetes/operators/spark_application_test.yaml
rename to tests/providers/cncf/kubernetes/data_files/spark/application_test.yaml
diff --git 
a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py 
b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
index e65d9567fa..71cceca958 100644
--- a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
+++ b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
@@ -16,11 +16,9 @@
 # under the License.
 from __future__ import annotations
 
-import pathlib
 import random
 import re
 import string
-import sys
 from datetime import datetime, timedelta
 from unittest import mock
 
@@ -115,7 +113,7 @@ class TestAirflowKubernetesScheduler:
     )
     @mock.patch("airflow.providers.cncf.kubernetes.pod_generator.PodGenerator")
     
@mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubeConfig")
-    def test_get_base_pod_from_template(self, mock_kubeconfig, mock_generator):
+    def test_get_base_pod_from_template(self, mock_kubeconfig, mock_generator, 
data_file):
         # Provide non-existent file path,
         # so None will be passed to deserialize_model_dict().
         pod_template_file_path = "/bar/biz"
@@ -130,16 +128,16 @@ class TestAirflowKubernetesScheduler:
 
         # Provide existent file path,
         # so loaded YAML file content should be used to call 
deserialize_model_dict(), rather than None.
-        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod.yaml"
-        with open(path) as stream:
+        pod_template_file = data_file("pods/template.yaml")
+        with open(pod_template_file) as stream:
             expected_pod_dict = yaml.safe_load(stream)
 
-        pod_template_file_path = path
+        pod_template_file_path = pod_template_file.as_posix()
         get_base_pod_from_template(pod_template_file_path, None)
         assert "deserialize_model_dict" == mock_generator.mock_calls[2][0]
         assert mock_generator.mock_calls[2][1][0] == expected_pod_dict
 
-        mock_kubeconfig.pod_template_file = path
+        mock_kubeconfig.pod_template_file = pod_template_file.as_posix()
         get_base_pod_from_template(None, mock_kubeconfig)
         assert "deserialize_model_dict" == mock_generator.mock_calls[3][0]
         assert mock_generator.mock_calls[3][1][0] == expected_pod_dict
@@ -390,6 +388,7 @@ class TestKubernetesExecutor:
         task_publish_max_retries,
         should_requeue,
         task_expected_state,
+        data_file,
     ):
         """
         When pod scheduling fails with any reason not yet
@@ -411,8 +410,7 @@ class TestKubernetesExecutor:
             - your request parameters are valid but unsupported e.g. limits 
lower than requests.
 
         """
-        path = sys.path[0] + 
"/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
-
+        template_file = 
data_file("pods/generator_base_with_secrets.yaml").as_posix()
         # A mock kube_client that throws errors when making a pod
         mock_kube_client = mock.patch("kubernetes.client.CoreV1Api", 
autospec=True)
         mock_kube_client.create_namespaced_pod = 
mock.MagicMock(side_effect=ApiException(http_resp=response))
@@ -421,7 +419,7 @@ class TestKubernetesExecutor:
         mock_api_client.sanitize_for_serialization.return_value = {}
         mock_kube_client.api_client = mock_api_client
         config = {
-            ("kubernetes", "pod_template_file"): path,
+            ("kubernetes", "pod_template_file"): template_file,
         }
         with conf_vars(config):
             kubernetes_executor = self.kubernetes_executor
@@ -506,11 +504,13 @@ class TestKubernetesExecutor:
     )
     
@mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
     
@mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
-    def test_run_next_pod_reconciliation_error(self, mock_get_kube_client, 
mock_kubernetes_job_watcher):
+    def test_run_next_pod_reconciliation_error(
+        self, mock_get_kube_client, mock_kubernetes_job_watcher, data_file
+    ):
         """
         When construct_pod raises PodReconciliationError, we should fail the 
task.
         """
-        path = sys.path[0] + 
"/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
+        template_file = 
data_file("pods/generator_base_with_secrets.yaml").as_posix()
 
         mock_kube_client = mock.patch("kubernetes.client.CoreV1Api", 
autospec=True)
         fail_msg = "test message"
@@ -519,7 +519,7 @@ class TestKubernetesExecutor:
         mock_api_client = mock.MagicMock()
         mock_api_client.sanitize_for_serialization.return_value = {}
         mock_kube_client.api_client = mock_api_client
-        config = {("kubernetes", "pod_template_file"): path}
+        config = {("kubernetes", "pod_template_file"): template_file}
         with conf_vars(config):
             kubernetes_executor = self.kubernetes_executor
             kubernetes_executor.start()
@@ -596,16 +596,14 @@ class TestKubernetesExecutor:
         
"airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler.run_pod_async"
     )
     
@mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
-    def test_pod_template_file_override_in_executor_config(self, 
mock_get_kube_client, mock_run_pod_async):
-        current_folder = pathlib.Path(__file__).parent.resolve()
-        template_file = str(
-            (current_folder / "kubernetes_executor_template_files" / 
"basic_template.yaml").resolve()
-        )
-
+    def test_pod_template_file_override_in_executor_config(
+        self, mock_get_kube_client, mock_run_pod_async, data_file
+    ):
+        executor_template_file = data_file("executor/basic_template.yaml")
         mock_kube_client = mock.patch("kubernetes.client.CoreV1Api", 
autospec=True)
         mock_get_kube_client.return_value = mock_kube_client
 
-        with conf_vars({("kubernetes", "pod_template_file"): ""}):
+        with conf_vars({("kubernetes", "pod_template_file"): None}):
             executor = self.kubernetes_executor
             executor.start()
             try:
@@ -617,7 +615,7 @@ class TestKubernetesExecutor:
                     queue=None,
                     command=["airflow", "tasks", "run", "true", 
"some_parameter"],
                     executor_config={
-                        "pod_template_file": template_file,
+                        "pod_template_file": executor_template_file,
                         "pod_override": k8s.V1Pod(
                             metadata=k8s.V1ObjectMeta(labels={"release": 
"stable"}),
                             spec=k8s.V1PodSpec(
@@ -633,7 +631,7 @@ class TestKubernetesExecutor:
                 executor.task_queue.task_done()
                 # Test that the correct values have been put to queue
                 assert expected_executor_config.metadata.labels == {"release": 
"stable"}
-                assert expected_pod_template_file == template_file
+                assert expected_pod_template_file == executor_template_file
 
                 self.kubernetes_executor.kube_scheduler.run_next(task)
                 mock_run_pod_async.assert_called_once_with(
@@ -670,9 +668,11 @@ class TestKubernetesExecutor:
                                     
env=[k8s.V1EnvVar(name="AIRFLOW_IS_K8S_EXECUTOR_POD", value="True")],
                                 )
                             ],
-                            
image_pull_secrets=[k8s.V1LocalObjectReference(name="airflow-registry")],
+                            image_pull_secrets=[
+                                
k8s.V1LocalObjectReference(name="all-right-then-keep-your-secrets")
+                            ],
                             scheduler_name="default-scheduler",
-                            
security_context=k8s.V1PodSecurityContext(fs_group=50000, run_as_user=50000),
+                            
security_context=k8s.V1PodSecurityContext(fs_group=50000, run_as_user=50001),
                         ),
                     )
                 )
diff --git a/tests/providers/cncf/kubernetes/models/test_secret.py 
b/tests/providers/cncf/kubernetes/models/test_secret.py
index 556a5cc30a..51fd17ca71 100644
--- a/tests/providers/cncf/kubernetes/models/test_secret.py
+++ b/tests/providers/cncf/kubernetes/models/test_secret.py
@@ -16,7 +16,6 @@
 # under the License.
 from __future__ import annotations
 
-import sys
 import uuid
 from unittest import mock
 
@@ -64,13 +63,13 @@ class TestSecret:
 
     @mock.patch("uuid.uuid4")
     @mock.patch("airflow.providers.cncf.kubernetes.pod_generator.rand_str")
-    def test_attach_to_pod(self, mock_rand_str, mock_uuid):
+    def test_attach_to_pod(self, mock_rand_str, mock_uuid, data_file):
         static_uuid = uuid.UUID("cf4a56d2-8101-4217-b027-2af6216feb48")
         mock_uuid.return_value = static_uuid
         rand_str = "abcd1234"
         mock_rand_str.return_value = rand_str
-        path = sys.path[0] + 
"/tests/providers/cncf/kubernetes/pod_generator_base.yaml"
-        pod = PodGenerator(pod_template_file=path).ud_pod
+        template_file = data_file("pods/generator_base.yaml").as_posix()
+        pod = PodGenerator(pod_template_file=template_file).ud_pod
         secrets = [
             # This should be a secretRef
             Secret("env", None, "secret_a"),
diff --git a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py 
b/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py
index 39c0328dd5..7982c0c3ff 100644
--- a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py
+++ b/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py
@@ -20,8 +20,6 @@ from __future__ import annotations
 import copy
 import json
 from datetime import date
-from os.path import join
-from pathlib import Path
 from unittest import mock
 from unittest.mock import patch
 
@@ -39,10 +37,10 @@ from airflow.utils.types import DagRunType
 
 
 
@patch("airflow.providers.cncf.kubernetes.operators.spark_kubernetes.KubernetesHook")
-def test_spark_kubernetes_operator(mock_kubernetes_hook):
+def test_spark_kubernetes_operator(mock_kubernetes_hook, data_file):
     operator = SparkKubernetesOperator(
         task_id="task_id",
-        application_file=join(Path(__file__).parent, 
"spark_application_test.yaml"),
+        application_file=data_file("spark/application_test.yaml").as_posix(),
         kubernetes_conn_id="kubernetes_conn_id",
         in_cluster=True,
         cluster_context="cluster_context",
@@ -54,10 +52,10 @@ def test_spark_kubernetes_operator(mock_kubernetes_hook):
 
 
 
@patch("airflow.providers.cncf.kubernetes.operators.spark_kubernetes.KubernetesHook")
-def test_spark_kubernetes_operator_hook(mock_kubernetes_hook):
+def test_spark_kubernetes_operator_hook(mock_kubernetes_hook, data_file):
     operator = SparkKubernetesOperator(
         task_id="task_id",
-        application_file=join(Path(__file__).parent, 
"spark_application_test.yaml"),
+        application_file=data_file("spark/application_test.yaml").as_posix(),
         kubernetes_conn_id="kubernetes_conn_id",
         in_cluster=True,
         cluster_context="cluster_context",
@@ -220,11 +218,12 @@ class TestSparkKubernetesOperator:
         mock_create_pod,
         mock_await_pod_start,
         mock_await_pod_completion,
+        data_file,
     ):
         task_name = "default_yaml"
         mock_create_job_name.return_value = task_name
         op = SparkKubernetesOperator(
-            application_file=join(Path(__file__).parent, 
"spark_application_test.yaml"),
+            
application_file=data_file("spark/application_test.yaml").as_posix(),
             kubernetes_conn_id="kubernetes_default_kube_config",
             task_id=task_name,
         )
@@ -242,7 +241,7 @@ class TestSparkKubernetesOperator:
         task_name = "default_json"
         mock_create_job_name.return_value = task_name
         op = SparkKubernetesOperator(
-            application_file=join(Path(__file__).parent, 
"spark_application_test.json"),
+            
application_file=data_file("spark/application_test.json").as_posix(),
             kubernetes_conn_id="kubernetes_default_kube_config",
             task_id=task_name,
         )
@@ -267,11 +266,12 @@ class TestSparkKubernetesOperator:
         mock_create_pod,
         mock_await_pod_start,
         mock_await_pod_completion,
+        data_file,
     ):
         task_name = "default_yaml_template"
         mock_create_job_name.return_value = task_name
         op = SparkKubernetesOperator(
-            application_file=join(Path(__file__).parent, 
"spark_application_template.yaml"),
+            
application_file=data_file("spark/application_template.yaml").as_posix(),
             kubernetes_conn_id="kubernetes_default_kube_config",
             task_id=task_name,
         )
@@ -296,9 +296,11 @@ class TestSparkKubernetesOperator:
         mock_create_pod,
         mock_await_pod_start,
         mock_await_pod_completion,
+        data_file,
     ):
         task_name = "default_yaml_template"
-        job_spec = yaml.safe_load(open(join(Path(__file__).parent, 
"spark_application_template.yaml")))
+
+        job_spec = 
yaml.safe_load(data_file("spark/application_template.yaml").read_text())
         self.execute_operator(task_name, mock_create_job_name, 
job_spec=job_spec)
 
         TEST_K8S_DICT["metadata"]["name"] = task_name
@@ -320,9 +322,10 @@ class TestSparkKubernetesOperator:
         mock_create_pod,
         mock_await_pod_start,
         mock_await_pod_completion,
+        data_file,
     ):
         task_name = "default_env"
-        job_spec = yaml.safe_load(open(join(Path(__file__).parent, 
"spark_application_template.yaml")))
+        job_spec = 
yaml.safe_load(data_file("spark/application_template.yaml").read_text())
         # test env vars
         job_spec["kubernetes"]["env_vars"] = {"TEST_ENV_1": "VALUE1"}
 
@@ -362,9 +365,10 @@ class TestSparkKubernetesOperator:
         mock_create_pod,
         mock_await_pod_start,
         mock_await_pod_completion,
+        data_file,
     ):
         task_name = "default_volume"
-        job_spec = yaml.safe_load(open(join(Path(__file__).parent, 
"spark_application_template.yaml")))
+        job_spec = 
yaml.safe_load(data_file("spark/application_template.yaml").read_text())
         volumes = [
             k8s.V1Volume(
                 name="test-pvc",
@@ -406,9 +410,10 @@ class TestSparkKubernetesOperator:
         mock_create_pod,
         mock_await_pod_start,
         mock_await_pod_completion,
+        data_file,
     ):
         task_name = "test_pull_secret"
-        job_spec = yaml.safe_load(open(join(Path(__file__).parent, 
"spark_application_template.yaml")))
+        job_spec = 
yaml.safe_load(data_file("spark/application_template.yaml").read_text())
         job_spec["kubernetes"]["image_pull_secrets"] = "secret1,secret2"
         op = self.execute_operator(task_name, mock_create_job_name, 
job_spec=job_spec)
 
@@ -425,9 +430,10 @@ class TestSparkKubernetesOperator:
         mock_create_pod,
         mock_await_pod_start,
         mock_await_pod_completion,
+        data_file,
     ):
         task_name = "test_affinity"
-        job_spec = yaml.safe_load(open(join(Path(__file__).parent, 
"spark_application_template.yaml")))
+        job_spec = 
yaml.safe_load(data_file("spark/application_template.yaml").read_text())
         job_spec["kubernetes"]["affinity"] = k8s.V1Affinity(
             node_affinity=k8s.V1NodeAffinity(
                 
required_during_scheduling_ignored_during_execution=k8s.V1NodeSelector(
@@ -477,6 +483,7 @@ class TestSparkKubernetesOperator:
         mock_create_pod,
         mock_await_pod_start,
         mock_await_pod_completion,
+        data_file,
     ):
         toleration = k8s.V1Toleration(
             key="dedicated",
@@ -485,7 +492,7 @@ class TestSparkKubernetesOperator:
             effect="NoSchedule",
         )
         task_name = "test_tolerations"
-        job_spec = yaml.safe_load(open(join(Path(__file__).parent, 
"spark_application_template.yaml")))
+        job_spec = 
yaml.safe_load(data_file("spark/application_template.yaml").read_text())
         job_spec["kubernetes"]["tolerations"] = [toleration]
         op = self.execute_operator(task_name, mock_create_job_name, 
job_spec=job_spec)
 
diff --git a/tests/providers/cncf/kubernetes/test_pod_generator.py 
b/tests/providers/cncf/kubernetes/test_pod_generator.py
index 682af03780..bcd49e0d26 100644
--- a/tests/providers/cncf/kubernetes/test_pod_generator.py
+++ b/tests/providers/cncf/kubernetes/test_pod_generator.py
@@ -16,9 +16,7 @@
 # under the License.
 from __future__ import annotations
 
-import os
 import re
-import sys
 from unittest import mock
 from unittest.mock import MagicMock
 
@@ -163,15 +161,15 @@ class TestPodGenerator:
         )
 
     
@mock.patch("airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str")
-    def test_gen_pod_extract_xcom(self, mock_rand_str):
+    def test_gen_pod_extract_xcom(self, mock_rand_str, data_file):
         """
         Method gen_pod is used nowhere in codebase and is deprecated.
         This test is only retained for backcompat.
         """
         mock_rand_str.return_value = self.rand_str
-        path = sys.path[0] + 
"/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
+        template_file = 
data_file("pods/generator_base_with_secrets.yaml").as_posix()
 
-        pod_generator = PodGenerator(pod_template_file=path, extract_xcom=True)
+        pod_generator = PodGenerator(pod_template_file=template_file, 
extract_xcom=True)
         result = pod_generator.gen_pod()
         container_two = {
             "name": "airflow-xcom-sidecar",
@@ -326,9 +324,9 @@ class TestPodGenerator:
             },
         } == result
 
-    def test_reconcile_pods_empty_mutator_pod(self):
-        path = sys.path[0] + 
"/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
-        pod_generator = PodGenerator(pod_template_file=path, extract_xcom=True)
+    def test_reconcile_pods_empty_mutator_pod(self, data_file):
+        template_file = 
data_file("pods/generator_base_with_secrets.yaml").as_posix()
+        pod_generator = PodGenerator(pod_template_file=template_file, 
extract_xcom=True)
         base_pod = pod_generator.ud_pod
         mutator_pod = None
         result = PodGenerator.reconcile_pods(base_pod, mutator_pod)
@@ -339,11 +337,10 @@ class TestPodGenerator:
         assert base_pod == result
 
     
@mock.patch("airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str")
-    def test_reconcile_pods(self, mock_rand_str):
+    def test_reconcile_pods(self, mock_rand_str, data_file):
         mock_rand_str.return_value = self.rand_str
-        path = sys.path[0] + 
"/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
-
-        base_pod = PodGenerator(pod_template_file=path, 
extract_xcom=False).ud_pod
+        template_file = 
data_file("pods/generator_base_with_secrets.yaml").as_posix()
+        base_pod = PodGenerator(pod_template_file=template_file, 
extract_xcom=False).ud_pod
 
         mutator_pod = k8s.V1Pod(
             metadata=k8s.V1ObjectMeta(
@@ -409,9 +406,9 @@ class TestPodGenerator:
         ],
     )
     def test_construct_pod(
-        self, config_image, expected_image, pod_override_object_namespace, 
expected_namespace
+        self, config_image, expected_image, pod_override_object_namespace, 
expected_namespace, data_file
     ):
-        template_file = sys.path[0] + 
"/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
+        template_file = 
data_file("pods/generator_base_with_secrets.yaml").as_posix()
         worker_config = PodGenerator.deserialize_model_file(template_file)
         executor_config = k8s.V1Pod(
             metadata=k8s.V1ObjectMeta(
@@ -459,8 +456,8 @@ class TestPodGenerator:
 
         assert expected_dict == result_dict
 
-    def test_construct_pod_mapped_task(self):
-        template_file = sys.path[0] + 
"/tests/providers/cncf/kubernetes/pod_generator_base.yaml"
+    def test_construct_pod_mapped_task(self, data_file):
+        template_file = data_file("pods/generator_base.yaml").as_posix()
         worker_config = PodGenerator.deserialize_model_file(template_file)
         result = PodGenerator.construct_pod(
             dag_id=self.dag_id,
@@ -493,9 +490,9 @@ class TestPodGenerator:
 
         assert result_dict == expected_dict
 
-    def test_construct_pod_empty_executor_config(self):
-        path = sys.path[0] + 
"/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
-        worker_config = PodGenerator.deserialize_model_file(path)
+    def test_construct_pod_empty_executor_config(self, data_file):
+        template_file = 
data_file("pods/generator_base_with_secrets.yaml").as_posix()
+        worker_config = PodGenerator.deserialize_model_file(template_file)
         executor_config = None
 
         result = PodGenerator.construct_pod(
@@ -526,13 +523,13 @@ class TestPodGenerator:
         assert sanitized_result == worker_config_result
 
     
@mock.patch("airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str")
-    def test_construct_pod_attribute_error(self, mock_rand_str):
+    def test_construct_pod_attribute_error(self, mock_rand_str, data_file):
         """
         After upgrading k8s library we might get attribute error.
         In this case it should raise PodReconciliationError
         """
-        path = sys.path[0] + 
"/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
-        worker_config = PodGenerator.deserialize_model_file(path)
+        template_file = 
data_file("pods/generator_base_with_secrets.yaml").as_posix()
+        worker_config = PodGenerator.deserialize_model_file(template_file)
         mock_rand_str.return_value = self.rand_str
         executor_config = MagicMock()
         executor_config.side_effect = AttributeError("error")
@@ -553,10 +550,10 @@ class TestPodGenerator:
             )
 
     
@mock.patch("airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str")
-    def test_ensure_max_identifier_length(self, mock_rand_str):
+    def test_ensure_max_identifier_length(self, mock_rand_str, data_file):
         mock_rand_str.return_value = self.rand_str
-        path = os.path.join(os.path.dirname(__file__), 
"pod_generator_base_with_secrets.yaml")
-        worker_config = PodGenerator.deserialize_model_file(path)
+        pod_template = data_file("pods/generator_base_with_secrets.yaml")
+        worker_config = 
PodGenerator.deserialize_model_file(pod_template.as_posix())
 
         result = PodGenerator.construct_pod(
             dag_id="a" * 512,
@@ -714,20 +711,20 @@ class TestPodGenerator:
         res = PodGenerator.reconcile_specs(base_spec, client_spec)
         assert res.init_containers == base_spec.init_containers + 
client_spec.init_containers
 
-    def test_deserialize_model_file(self, caplog):
-        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod.yaml"
-        result = PodGenerator.deserialize_model_file(path)
+    def test_deserialize_model_file(self, caplog, data_file):
+        template_file = data_file("pods/template.yaml").as_posix()
+        result = PodGenerator.deserialize_model_file(template_file)
         sanitized_res = self.k8s_client.sanitize_for_serialization(result)
         assert sanitized_res == self.deserialize_result
         assert len(caplog.records) == 0
 
-    def test_deserialize_non_existent_model_file(self, caplog):
-        path = sys.path[0] + 
"/tests/providers/cncf/kubernetes/non_existent.yaml"
-        result = PodGenerator.deserialize_model_file(path)
+    def test_deserialize_non_existent_model_file(self, caplog, tmp_path):
+        template_file = (tmp_path / "non_existent.yaml").absolute().as_posix()
+        result = PodGenerator.deserialize_model_file(template_file)
         sanitized_res = self.k8s_client.sanitize_for_serialization(result)
         assert sanitized_res == {}
         assert len(caplog.records) == 1
-        assert "does not exist" in caplog.text
+        assert "non_existent.yaml does not exist" in caplog.text
 
     @pytest.mark.parametrize(
         "input",
@@ -780,12 +777,12 @@ class TestPodGenerator:
         # verify ends with 8 char lowercase alphanum string
         assert re.match(rf"^{expected_starts_with}-[a-z0-9]{{8}}$", actual), 
"doesn't match expected pattern"
 
-    def test_validate_pod_generator(self):
+    def test_validate_pod_generator(self, data_file):
         with pytest.raises(AirflowConfigException):
             PodGenerator(pod=k8s.V1Pod(), pod_template_file="k")
         with pytest.raises(AirflowConfigException):
             PodGenerator()
-        PodGenerator(pod_template_file="tests/kubernetes/pod.yaml")
+        
PodGenerator(pod_template_file=data_file("pods/template.yaml").as_posix())
         PodGenerator(pod=k8s.V1Pod())
 
     @pytest.mark.parametrize(


Reply via email to