This is an automated email from the ASF dual-hosted git repository.
rahulvats pushed a commit to branch v3-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/v3-1-test by this push:
new b20bd350302 remove mp_start_method remnants (#61150) (#62762)
b20bd350302 is described below
commit b20bd3503024fae8e8ace105fb7831df5c4a95a3
Author: Rahul Vats <[email protected]>
AuthorDate: Tue Mar 3 15:00:41 2026 +0530
remove mp_start_method remnants (#61150) (#62762)
the main core of this functionality was removed in 11acefa462b6, this is
just removing what remains in config. Although multiprocessing is still
used in some places python-daemon is now used as the main driver; if it
makes sense it can be added back in the future.
(cherry picked from commit 3b9fe5eeb15ef5df330cf94928e3efcef079313a)
Co-authored-by: Nataneljpwd <[email protected]>
Co-authored-by: Matt Phillips <[email protected]>
---
airflow-core/src/airflow/config_templates/config.yml | 12 ------------
airflow-core/src/airflow/configuration.py | 2 --
devel-common/src/tests_common/pytest_plugin.py | 6 ------
3 files changed, 20 deletions(-)
diff --git a/airflow-core/src/airflow/config_templates/config.yml
b/airflow-core/src/airflow/config_templates/config.yml
index 62edde02e2a..2bc97399194 100644
--- a/airflow-core/src/airflow/config_templates/config.yml
+++ b/airflow-core/src/airflow/config_templates/config.yml
@@ -158,18 +158,6 @@ core:
type: integer
example: ~
default: "0"
- mp_start_method:
- description: |
- The name of the method used in order to start Python processes via the
multiprocessing module.
- This corresponds directly with the options available in the Python
docs:
- `multiprocessing.set_start_method
-
<https://docs.python.org/3/library/multiprocessing.html#multiprocessing.set_start_method>`__
- must be one of the values returned by
`multiprocessing.get_all_start_methods()
-
<https://docs.python.org/3/library/multiprocessing.html#multiprocessing.get_all_start_methods>`__.
- version_added: "2.0.0"
- type: string
- default: ~
- example: "fork"
load_examples:
description: |
Whether to load the Dag examples that ship with Airflow. It's good to
diff --git a/airflow-core/src/airflow/configuration.py
b/airflow-core/src/airflow/configuration.py
index 216804fae17..7084563f366 100644
--- a/airflow-core/src/airflow/configuration.py
+++ b/airflow-core/src/airflow/configuration.py
@@ -22,7 +22,6 @@ import functools
import itertools
import json
import logging
-import multiprocessing
import os
import pathlib
import re
@@ -417,7 +416,6 @@ class AirflowConfigParser(ConfigParser):
enums_options = {
("core", "default_task_weight_rule"):
sorted(WeightRule.all_weight_rules()),
("core", "dag_ignore_file_syntax"): ["regexp", "glob"],
- ("core", "mp_start_method"): multiprocessing.get_all_start_methods(),
("dag_processor", "file_parsing_sort_mode"): [
"modified_time",
"random_seeded_by_host",
diff --git a/devel-common/src/tests_common/pytest_plugin.py
b/devel-common/src/tests_common/pytest_plugin.py
index ebe160137b3..69314717376 100644
--- a/devel-common/src/tests_common/pytest_plugin.py
+++ b/devel-common/src/tests_common/pytest_plugin.py
@@ -21,7 +21,6 @@ import importlib
import json
import logging
import os
-import platform
import re
import subprocess
import sys
@@ -210,11 +209,6 @@ os.environ["AIRFLOW__CORE__UNIT_TEST_MODE"] = "True"
os.environ["AWS_DEFAULT_REGION"] = os.environ.get("AWS_DEFAULT_REGION") or
"us-east-1"
os.environ["CREDENTIALS_DIR"] = os.environ.get("CREDENTIALS_DIR") or
"/files/airflow-breeze-config/keys"
-if platform.system() == "Darwin":
- # mocks from unittest.mock work correctly in subprocesses only if they are
created by "fork" method
- # but macOS uses "spawn" by default
- os.environ["AIRFLOW__CORE__MP_START_METHOD"] = "fork"
-
@pytest.fixture
def reset_db():