This is an automated email from the ASF dual-hosted git repository.
vincbeck pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new d1e500c450 Deprecated configuration removed (#42129)
d1e500c450 is described below
commit d1e500c45069dc42254d55d8175e2c494cb41167
Author: Gopal Dirisala <[email protected]>
AuthorDate: Mon Sep 16 20:50:34 2024 +0530
Deprecated configuration removed (#42129)
---
airflow/configuration.py | 20 +-------------------
.../cncf/kubernetes/executors/kubernetes_executor.py | 4 +++-
.../providers/fab/auth_manager/fab_auth_manager.py | 12 +++++++++---
newsfragments/42129.significant.rst | 17 +++++++++++++++++
tests/core/test_configuration.py | 15 +++++++--------
.../kubernetes/executors/test_kubernetes_executor.py | 14 +++++++-------
tests/providers/cncf/kubernetes/test_client.py | 4 +++-
7 files changed, 47 insertions(+), 39 deletions(-)
diff --git a/airflow/configuration.py b/airflow/configuration.py
index 4238d59054..9da7a99fc1 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -326,15 +326,6 @@ class AirflowConfigParser(ConfigParser):
# When reading new option, the old option will be checked to see if it
exists. If it does a
# DeprecationWarning will be issued and the old option will be used instead
deprecated_options: dict[tuple[str, str], tuple[str, str, str]] = {
- ("celery", "worker_precheck"): ("core", "worker_precheck", "2.0.0"),
- ("scheduler", "parsing_processes"): ("scheduler", "max_threads",
"1.10.14"),
- ("operators", "default_queue"): ("celery", "default_queue", "2.1.0"),
- ("core", "hide_sensitive_var_conn_fields"): ("admin",
"hide_sensitive_variable_fields", "2.1.0"),
- ("core", "sensitive_var_conn_names"): ("admin",
"sensitive_variable_fields", "2.1.0"),
- ("core", "default_pool_task_slot_count"): ("core",
"non_pooled_task_slot_count", "1.10.4"),
- ("core", "max_active_tasks_per_dag"): ("core", "dag_concurrency",
"2.2.0"),
- ("api", "access_control_allow_origins"): ("api",
"access_control_allow_origin", "2.2.0"),
- ("api", "auth_backends"): ("api", "auth_backend", "2.3.0"),
("database", "sql_alchemy_conn"): ("core", "sql_alchemy_conn",
"2.3.0"),
("database", "sql_engine_encoding"): ("core", "sql_engine_encoding",
"2.3.0"),
("database", "sql_engine_collation_for_ids"): ("core",
"sql_engine_collation_for_ids", "2.3.0"),
@@ -347,19 +338,10 @@ class AirflowConfigParser(ConfigParser):
("database", "sql_alchemy_connect_args"): ("core",
"sql_alchemy_connect_args", "2.3.0"),
("database", "load_default_connections"): ("core",
"load_default_connections", "2.3.0"),
("database", "max_db_retries"): ("core", "max_db_retries", "2.3.0"),
- ("scheduler", "parsing_cleanup_interval"): ("scheduler",
"deactivate_stale_dags_interval", "2.5.0"),
- ("scheduler", "task_queued_timeout_check_interval"): (
- "kubernetes_executor",
- "worker_pods_pending_timeout_check_interval",
- "2.6.0",
- ),
- ("fab", "update_fab_perms"): ("webserver", "update_fab_perms",
"2.9.0"),
- ("fab", "auth_rate_limited"): ("webserver", "auth_rate_limited",
"2.9.0"),
- ("fab", "auth_rate_limit"): ("webserver", "auth_rate_limit", "2.9.0"),
}
# A mapping of new section -> (old section, since_version).
- deprecated_sections: dict[str, tuple[str, str]] = {"kubernetes_executor":
("kubernetes", "2.5.0")}
+ deprecated_sections: dict[str, tuple[str, str]] = {}
# Now build the inverse so we can go from old_section/old_key to
new_section/new_key
# if someone tries to retrieve it based on old_section/old_key
diff --git a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
index 754d835dc3..0b2de8085c 100644
--- a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
+++ b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
@@ -140,7 +140,9 @@ class KubernetesExecutor(BaseExecutor):
self.last_handled: dict[TaskInstanceKey, float] = {}
self.kubernetes_queue: str | None = None
self.task_publish_retries: Counter[TaskInstanceKey] = Counter()
- self.task_publish_max_retries = conf.getint("kubernetes",
"task_publish_max_retries", fallback=0)
+ self.task_publish_max_retries = conf.getint(
+ "kubernetes_executor", "task_publish_max_retries", fallback=0
+ )
super().__init__(parallelism=self.kube_config.parallelism)
def _list_pods(self, query_kwargs):
diff --git a/airflow/providers/fab/auth_manager/fab_auth_manager.py
b/airflow/providers/fab/auth_manager/fab_auth_manager.py
index 5ea926f5b6..a0ec27bc67 100644
--- a/airflow/providers/fab/auth_manager/fab_auth_manager.py
+++ b/airflow/providers/fab/auth_manager/fab_auth_manager.py
@@ -523,9 +523,15 @@ class FabAuthManager(BaseAuthManager):
# Otherwise, when the name of a view or menu is changed, the framework
# will add the new Views and Menus names to the backend, but will not
# delete the old ones.
- if conf.getboolean(
- "fab", "UPDATE_FAB_PERMS", fallback=conf.getboolean("webserver",
"UPDATE_FAB_PERMS")
- ):
+ from packaging.version import Version
+
+ from airflow.version import version
+
+ if Version(Version(version).base_version) >= Version("3.0.0"):
+ fallback = None
+ else:
+ fallback = conf.getboolean("webserver", "UPDATE_FAB_PERMS")
+ if conf.getboolean("fab", "UPDATE_FAB_PERMS", fallback=fallback):
self.security_manager.sync_roles()
diff --git a/newsfragments/42129.significant.rst
b/newsfragments/42129.significant.rst
new file mode 100644
index 0000000000..06a5da7fc8
--- /dev/null
+++ b/newsfragments/42129.significant.rst
@@ -0,0 +1,17 @@
+Removed deprecated configuration.
+
+ * Removed deprecated configuration ``worker_precheck`` from ``core``.
Please use ``worker_precheck`` from ``celery`` instead.
+ * Removed deprecated configuration ``max_threads`` from ``scheduler``.
Please use ``parsing_processes`` from ``scheduler`` instead.
+ * Removed deprecated configuration ``default_queue`` from ``celery``.
Please use ``default_queue`` from ``operators`` instead.
+ * Removed deprecated configuration ``hide_sensitive_variable_fields``
from ``admin``. Please use ``hide_sensitive_var_conn_fields`` from ``core``
instead.
+ * Removed deprecated configuration ``sensitive_variable_fields`` from
``admin``. Please use ``sensitive_var_conn_names`` from ``core`` instead.
+ * Removed deprecated configuration ``non_pooled_task_slot_count`` from
``core``. Please use ``default_pool_task_slot_count`` from ``core`` instead.
+ * Removed deprecated configuration ``dag_concurrency`` from ``core``.
Please use ``max_active_tasks_per_dag`` from ``core`` instead.
+ * Removed deprecated configuration ``access_control_allow_origin`` from
``api``. Please use ``access_control_allow_origins`` from ``api`` instead.
+ * Removed deprecated configuration ``auth_backend`` from ``api``. Please
use ``auth_backends`` from ``api`` instead.
+ * Removed deprecated configuration ``deactivate_stale_dags_interval``
from ``scheduler``. Please use ``parsing_cleanup_interval`` from ``scheduler``
instead.
+ * Removed deprecated configuration
``worker_pods_pending_timeout_check_interval`` from ``kubernetes_executor``.
Please use ``task_queued_timeout_check_interval`` from ``scheduler`` instead.
+ * Removed deprecated configuration ``update_fab_perms`` from
``webserver``. Please use ``update_fab_perms`` from ``fab`` instead.
+ * Removed deprecated configuration ``auth_rate_limited`` from
``webserver``. Please use ``auth_rate_limited`` from ``fab`` instead.
+ * Removed deprecated configuration ``auth_rate_limit`` from
``webserver``. Please use ``auth_rate_limit`` from ``fab`` instead.
+ * Removed deprecated configuration section ``kubernetes``. Please use
``kubernetes_executor`` instead.
diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py
index af43daa303..55c4aade4f 100644
--- a/tests/core/test_configuration.py
+++ b/tests/core/test_configuration.py
@@ -53,6 +53,13 @@ from tests.utils.test_config import (
HOME_DIR = os.path.expanduser("~")
+# The conf has been updated with deactivate_stale_dags_interval to test the
functionality of deprecated options support.
+conf.deprecated_options[("scheduler", "parsing_cleanup_interval")] = (
+ "scheduler",
+ "deactivate_stale_dags_interval",
+ "2.5.0",
+)
+
@pytest.fixture(scope="module", autouse=True)
def restore_env():
@@ -1002,14 +1009,6 @@ sql_alchemy_conn=sqlite://test
@pytest.mark.parametrize(
"old, new",
[
- (
- ("api", "auth_backend",
"airflow.providers.fab.auth_manager.api.auth.backend.basic_auth"),
- (
- "api",
- "auth_backends",
-
"airflow.providers.fab.auth_manager.api.auth.backend.basic_auth,airflow.api.auth.backend.session",
- ),
- ),
(
("core", "sql_alchemy_conn",
"postgres+psycopg2://localhost/postgres"),
("database", "sql_alchemy_conn",
"postgresql://localhost/postgres"),
diff --git
a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
index 769eb9c980..8cc46c3dba 100644
--- a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
+++ b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
@@ -413,7 +413,7 @@ class TestKubernetesExecutor:
mock_api_client.sanitize_for_serialization.return_value = {}
mock_kube_client.api_client = mock_api_client
config = {
- ("kubernetes", "pod_template_file"): template_file,
+ ("kubernetes_executor", "pod_template_file"): template_file,
}
with conf_vars(config):
kubernetes_executor = self.kubernetes_executor
@@ -513,7 +513,7 @@ class TestKubernetesExecutor:
mock_api_client = mock.MagicMock()
mock_api_client.sanitize_for_serialization.return_value = {}
mock_kube_client.api_client = mock_api_client
- config = {("kubernetes", "pod_template_file"): template_file}
+ config = {("kubernetes_executor", "pod_template_file"): template_file}
with conf_vars(config):
kubernetes_executor = self.kubernetes_executor
kubernetes_executor.start()
@@ -597,7 +597,7 @@ class TestKubernetesExecutor:
mock_kube_client = mock.patch("kubernetes.client.CoreV1Api",
autospec=True)
mock_get_kube_client.return_value = mock_kube_client
- with conf_vars({("kubernetes", "pod_template_file"): None}):
+ with conf_vars({("kubernetes_executor", "pod_template_file"): None}):
executor = self.kubernetes_executor
executor.start()
try:
@@ -1227,8 +1227,8 @@ class TestKubernetesExecutor:
self, raw_multi_namespace_mode, raw_value_namespace_list,
expected_value_in_kube_config
):
config = {
- ("kubernetes", "multi_namespace_mode"): raw_multi_namespace_mode,
- ("kubernetes", "multi_namespace_mode_namespace_list"):
raw_value_namespace_list,
+ ("kubernetes_executor", "multi_namespace_mode"):
raw_multi_namespace_mode,
+ ("kubernetes_executor", "multi_namespace_mode_namespace_list"):
raw_value_namespace_list,
}
with conf_vars(config):
executor = KubernetesExecutor()
@@ -1504,7 +1504,7 @@ class TestKubernetesExecutor:
}
get_logs_task_metadata.cache_clear()
try:
- with conf_vars({("kubernetes", "logs_task_metadata"): "True"}):
+ with conf_vars({("kubernetes_executor", "logs_task_metadata"):
"True"}):
expected_annotations = {
"dag_id": "dag",
"run_id": "run_id",
@@ -1525,7 +1525,7 @@ class TestKubernetesExecutor:
}
get_logs_task_metadata.cache_clear()
try:
- with conf_vars({("kubernetes", "logs_task_metadata"): "False"}):
+ with conf_vars({("kubernetes_executor", "logs_task_metadata"):
"False"}):
expected_annotations = "<omitted>"
annotations_actual =
annotations_for_logging_task_metadata(annotations_test)
assert annotations_actual == expected_annotations
diff --git a/tests/providers/cncf/kubernetes/test_client.py
b/tests/providers/cncf/kubernetes/test_client.py
index 836bdefdac..1384068fd2 100644
--- a/tests/providers/cncf/kubernetes/test_client.py
+++ b/tests/providers/cncf/kubernetes/test_client.py
@@ -92,7 +92,9 @@ class TestClient:
assert not configuration.verify_ssl
@mock.patch("kubernetes.config.incluster_config.InClusterConfigLoader")
- @conf_vars({("kubernetes", "api_client_retry_configuration"): '{"total":
3, "backoff_factor": 0.5}'})
+ @conf_vars(
+ {("kubernetes_executor", "api_client_retry_configuration"): '{"total":
3, "backoff_factor": 0.5}'}
+ )
def test_api_client_retry_configuration_correct_values(self,
mock_in_cluster_loader):
get_kube_client(in_cluster=True)
client_configuration =
mock_in_cluster_loader().load_and_set.call_args.args[0]