This is an automated email from the ASF dual-hosted git repository.
rom pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 420b24aa79 Move Hooks to Standard provider (#42794)
420b24aa79 is described below
commit 420b24aa798f73bd6fa8c525e80958e9c9e85dec
Author: GPK <[email protected]>
AuthorDate: Thu Oct 10 06:50:43 2024 +0100
Move Hooks to Standard provider (#42794)
---
airflow/providers_manager.py | 4 +--
airflow/sensors/filesystem.py | 2 +-
.../logging-monitoring/errors.rst | 2 +-
docs/apache-airflow/operators-and-hooks-ref.rst | 4 +--
.../airflow/providers/standard/hooks/__init__.py | 16 +++++++++
.../providers/standard}/hooks/filesystem.py | 0
.../providers/standard}/hooks/package_index.py | 0
.../providers/standard}/hooks/subprocess.py | 4 +--
.../airflow/providers/standard/operators/bash.py | 2 +-
.../src/airflow/providers/standard/provider.yaml | 7 ++++
providers/tests/standard/hooks/__init__.py | 16 +++++++++
providers/tests/standard/hooks/test_filesystem.py | 39 ++++++++++++++++++++++
.../tests/standard}/hooks/test_package_index.py | 6 ++--
.../tests/standard}/hooks/test_subprocess.py | 6 ++--
tests/sensors/test_filesystem.py | 2 +-
15 files changed, 94 insertions(+), 16 deletions(-)
diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py
index 1b1ca469f2..573d256d6e 100644
--- a/airflow/providers_manager.py
+++ b/airflow/providers_manager.py
@@ -36,8 +36,8 @@ from typing import TYPE_CHECKING, Any, Callable,
MutableMapping, NamedTuple, Typ
from packaging.utils import canonicalize_name
from airflow.exceptions import AirflowOptionalProviderFeatureException
-from airflow.hooks.filesystem import FSHook
-from airflow.hooks.package_index import PackageIndexHook
+from airflow.providers.standard.hooks.filesystem import FSHook
+from airflow.providers.standard.hooks.package_index import PackageIndexHook
from airflow.typing_compat import ParamSpec
from airflow.utils import yaml
from airflow.utils.entry_points import entry_points_with_dist
diff --git a/airflow/sensors/filesystem.py b/airflow/sensors/filesystem.py
index 5d32ab07ad..4496f5d6ab 100644
--- a/airflow/sensors/filesystem.py
+++ b/airflow/sensors/filesystem.py
@@ -25,7 +25,7 @@ from typing import TYPE_CHECKING, Any, Sequence
from airflow.configuration import conf
from airflow.exceptions import AirflowException
-from airflow.hooks.filesystem import FSHook
+from airflow.providers.standard.hooks.filesystem import FSHook
from airflow.sensors.base import BaseSensorOperator
from airflow.triggers.base import StartTriggerArgs
from airflow.triggers.file import FileTrigger
diff --git
a/docs/apache-airflow/administration-and-deployment/logging-monitoring/errors.rst
b/docs/apache-airflow/administration-and-deployment/logging-monitoring/errors.rst
index cb09843422..0ad3fa8c51 100644
---
a/docs/apache-airflow/administration-and-deployment/logging-monitoring/errors.rst
+++
b/docs/apache-airflow/administration-and-deployment/logging-monitoring/errors.rst
@@ -96,7 +96,7 @@ Impact of Sentry on Environment variables passed to
Subprocess Hook
When Sentry is enabled, by default it changes the standard library to pass all
environment variables to
subprocesses opened by Airflow. This changes the default behaviour of
-:class:`airflow.hooks.subprocess.SubprocessHook` - always all environment
variables are passed to the
+:class:`airflow.providers.standard.hooks.subprocess.SubprocessHook` - always
all environment variables are passed to the
subprocess executed with specific set of environment variables. In this case
not only the specified
environment variables are passed but also all existing environment variables
are passed with
``SUBPROCESS_`` prefix added. This happens also for all other subprocesses.
diff --git a/docs/apache-airflow/operators-and-hooks-ref.rst
b/docs/apache-airflow/operators-and-hooks-ref.rst
index c82a4f3a66..655551705e 100644
--- a/docs/apache-airflow/operators-and-hooks-ref.rst
+++ b/docs/apache-airflow/operators-and-hooks-ref.rst
@@ -106,8 +106,8 @@ For details see:
:doc:`apache-airflow-providers:operators-and-hooks-ref/index`.
* - Hooks
- Guides
- * - :mod:`airflow.hooks.filesystem`
+ * - :mod:`airflow.providers.standard.hooks.filesystem`
-
- * - :mod:`airflow.hooks.subprocess`
+ * - :mod:`airflow.providers.standard.hooks.subprocess`
-
diff --git a/providers/src/airflow/providers/standard/hooks/__init__.py
b/providers/src/airflow/providers/standard/hooks/__init__.py
new file mode 100644
index 0000000000..13a83393a9
--- /dev/null
+++ b/providers/src/airflow/providers/standard/hooks/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/hooks/filesystem.py
b/providers/src/airflow/providers/standard/hooks/filesystem.py
similarity index 100%
rename from airflow/hooks/filesystem.py
rename to providers/src/airflow/providers/standard/hooks/filesystem.py
diff --git a/airflow/hooks/package_index.py
b/providers/src/airflow/providers/standard/hooks/package_index.py
similarity index 100%
rename from airflow/hooks/package_index.py
rename to providers/src/airflow/providers/standard/hooks/package_index.py
diff --git a/airflow/hooks/subprocess.py
b/providers/src/airflow/providers/standard/hooks/subprocess.py
similarity index 96%
rename from airflow/hooks/subprocess.py
rename to providers/src/airflow/providers/standard/hooks/subprocess.py
index bc20b5c20b..9e578a7d80 100644
--- a/airflow/hooks/subprocess.py
+++ b/providers/src/airflow/providers/standard/hooks/subprocess.py
@@ -52,8 +52,8 @@ class SubprocessHook(BaseHook):
:param env: Optional dict containing environment variables to be made
available to the shell
environment in which ``command`` will be executed. If omitted,
``os.environ`` will be used.
Note, that in case you have Sentry configured, original variables
from the environment
- will also be passed to the subprocess with ``SUBPROCESS_`` prefix.
See
- :doc:`/administration-and-deployment/logging-monitoring/errors`
for details.
+ will also be passed to the subprocess with ``SUBPROCESS_`` prefix.
See:
+
https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/errors.html
for details.
:param output_encoding: encoding to use for decoding stdout
:param cwd: Working directory to run the command in.
If None (default), the command is run in a temporary directory.
diff --git a/providers/src/airflow/providers/standard/operators/bash.py
b/providers/src/airflow/providers/standard/operators/bash.py
index 2ec0341a0d..bf4a943df6 100644
--- a/providers/src/airflow/providers/standard/operators/bash.py
+++ b/providers/src/airflow/providers/standard/operators/bash.py
@@ -24,8 +24,8 @@ from functools import cached_property
from typing import TYPE_CHECKING, Any, Callable, Container, Sequence, cast
from airflow.exceptions import AirflowException, AirflowSkipException
-from airflow.hooks.subprocess import SubprocessHook
from airflow.models.baseoperator import BaseOperator
+from airflow.providers.standard.hooks.subprocess import SubprocessHook
from airflow.utils.operator_helpers import context_to_airflow_vars
from airflow.utils.types import ArgNotSet
diff --git a/providers/src/airflow/providers/standard/provider.yaml
b/providers/src/airflow/providers/standard/provider.yaml
index 2d4c4f29be..b3111d62b1 100644
--- a/providers/src/airflow/providers/standard/provider.yaml
+++ b/providers/src/airflow/providers/standard/provider.yaml
@@ -52,3 +52,10 @@ sensors:
- airflow.providers.standard.sensors.time
- airflow.providers.standard.sensors.weekday
- airflow.providers.standard.sensors.bash
+
+hooks:
+ - integration-name: Standard
+ python-modules:
+ - airflow.providers.standard.hooks.filesystem
+ - airflow.providers.standard.hooks.package_index
+ - airflow.providers.standard.hooks.subprocess
diff --git a/providers/tests/standard/hooks/__init__.py
b/providers/tests/standard/hooks/__init__.py
new file mode 100644
index 0000000000..13a83393a9
--- /dev/null
+++ b/providers/tests/standard/hooks/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/providers/tests/standard/hooks/test_filesystem.py
b/providers/tests/standard/hooks/test_filesystem.py
new file mode 100644
index 0000000000..bbcd22dc94
--- /dev/null
+++ b/providers/tests/standard/hooks/test_filesystem.py
@@ -0,0 +1,39 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import pytest
+
+from airflow.providers.standard.hooks.filesystem import FSHook
+
+pytestmark = pytest.mark.db_test
+
+
+class TestFSHook:
+ def test_get_ui_field_behaviour(self):
+ fs_hook = FSHook()
+ assert fs_hook.get_ui_field_behaviour() == {
+ "hidden_fields": ["host", "schema", "port", "login", "password",
"extra"],
+ "relabeling": {},
+ "placeholders": {},
+ }
+
+ def test_get_path(self):
+ fs_hook = FSHook(fs_conn_id="fs_default")
+
+ assert fs_hook.get_path() == "/"
diff --git a/tests/hooks/test_package_index.py
b/providers/tests/standard/hooks/test_package_index.py
similarity index 93%
rename from tests/hooks/test_package_index.py
rename to providers/tests/standard/hooks/test_package_index.py
index 9da429c5a0..6a90db0715 100644
--- a/tests/hooks/test_package_index.py
+++ b/providers/tests/standard/hooks/test_package_index.py
@@ -21,8 +21,8 @@ from __future__ import annotations
import pytest
-from airflow.hooks.package_index import PackageIndexHook
from airflow.models.connection import Connection
+from airflow.providers.standard.hooks.package_index import PackageIndexHook
class MockConnection(Connection):
@@ -73,7 +73,7 @@ def mock_get_connection(monkeypatch: pytest.MonkeyPatch,
request: pytest.Fixture
password: str | None = testdata.get("password", None)
expected_result: str | None = testdata.get("expected_result", None)
monkeypatch.setattr(
- "airflow.hooks.package_index.PackageIndexHook.get_connection",
+
"airflow.providers.standard.hooks.package_index.PackageIndexHook.get_connection",
lambda *_: MockConnection(host, login, password),
)
return expected_result
@@ -104,7 +104,7 @@ def test_test_connection(monkeypatch: pytest.MonkeyPatch,
mock_get_connection: s
return MockProc()
- monkeypatch.setattr("airflow.hooks.package_index.subprocess.run", mock_run)
+
monkeypatch.setattr("airflow.providers.standard.hooks.package_index.subprocess.run",
mock_run)
hook_instance = PackageIndexHook()
if mock_get_connection:
diff --git a/tests/hooks/test_subprocess.py
b/providers/tests/standard/hooks/test_subprocess.py
similarity index 95%
rename from tests/hooks/test_subprocess.py
rename to providers/tests/standard/hooks/test_subprocess.py
index 0f625be816..2b2e947335 100644
--- a/tests/hooks/test_subprocess.py
+++ b/providers/tests/standard/hooks/test_subprocess.py
@@ -26,7 +26,7 @@ from unittest.mock import MagicMock
import pytest
-from airflow.hooks.subprocess import SubprocessHook
+from airflow.providers.standard.hooks.subprocess import SubprocessHook
OS_ENV_KEY = "SUBPROCESS_ENV_TEST"
OS_ENV_VAL = "this-is-from-os-environ"
@@ -81,11 +81,11 @@ class TestSubprocessHook:
@mock.patch.dict("os.environ", clear=True)
@mock.patch(
- "airflow.hooks.subprocess.TemporaryDirectory",
+ "airflow.providers.standard.hooks.subprocess.TemporaryDirectory",
return_value=MagicMock(__enter__=MagicMock(return_value="/tmp/airflowtmpcatcat")),
)
@mock.patch(
- "airflow.hooks.subprocess.Popen",
+ "airflow.providers.standard.hooks.subprocess.Popen",
return_value=MagicMock(stdout=MagicMock(readline=MagicMock(side_effect=StopIteration),
returncode=0)),
)
def test_should_exec_subprocess(self, mock_popen,
mock_temporary_directory):
diff --git a/tests/sensors/test_filesystem.py b/tests/sensors/test_filesystem.py
index 1fb123cfe7..641f2f218f 100644
--- a/tests/sensors/test_filesystem.py
+++ b/tests/sensors/test_filesystem.py
@@ -40,7 +40,7 @@ DEFAULT_DATE = datetime(2015, 1, 1)
@pytest.mark.skip_if_database_isolation_mode # Test is broken in db isolation
mode
class TestFileSensor:
def setup_method(self):
- from airflow.hooks.filesystem import FSHook
+ from airflow.providers.standard.hooks.filesystem import FSHook
hook = FSHook()
args = {"owner": "airflow", "start_date": DEFAULT_DATE}