This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 5863c42962 Cache the custom secrets backend so the same instance gets
re-used (#25556)
5863c42962 is described below
commit 5863c42962404607013422a40118d8b9f4603f0b
Author: Peter Debelak <[email protected]>
AuthorDate: Sat Aug 6 09:21:22 2022 -0500
Cache the custom secrets backend so the same instance gets re-used (#25556)
* Cache the custom secrets backend so the same instance gets re-used
Fixes #25555
This uses `functools.lru_cache` to re-use the same secrets backend
instance between the `conf` global when it loads configuration from
secrets and uses outside the `configuration` module like variables and
connections. Previously, each fetch of a configuration value from
secrets would use its own secrets backend instance.
Also add unit test to confirm that only one secrets backend instance
gets created.
---
airflow/configuration.py | 9 ++++-
tests/core/test_configuration.py | 81 ++++++++++++++++++++++++++++++++++++++++
2 files changed, 88 insertions(+), 2 deletions(-)
diff --git a/airflow/configuration.py b/airflow/configuration.py
index 139172d0f3..5e84de6ea4 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -1534,7 +1534,6 @@ def ensure_secrets_loaded() -> List[BaseSecretsBackend]:
def get_custom_secret_backend() -> Optional[BaseSecretsBackend]:
"""Get Secret Backend if defined in airflow.cfg"""
secrets_backend_cls = conf.getimport(section='secrets', key='backend')
-
if secrets_backend_cls:
try:
backends: Any = conf.get(section='secrets', key='backend_kwargs',
fallback='{}')
@@ -1542,10 +1541,16 @@ def get_custom_secret_backend() ->
Optional[BaseSecretsBackend]:
except JSONDecodeError:
alternative_secrets_config_dict = {}
- return secrets_backend_cls(**alternative_secrets_config_dict)
+ return _custom_secrets_backend(secrets_backend_cls,
**alternative_secrets_config_dict)
return None
[email protected]_cache(maxsize=2)
+def _custom_secrets_backend(secrets_backend_cls,
**alternative_secrets_config_dict):
+ """Separate function to create secrets backend instance to allow caching"""
+ return secrets_backend_cls(**alternative_secrets_config_dict)
+
+
def initialize_secrets_backends() -> List[BaseSecretsBackend]:
"""
* import secrets backend classes
diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py
index 15c8bbffc1..1759788654 100644
--- a/tests/core/test_configuration.py
+++ b/tests/core/test_configuration.py
@@ -33,6 +33,7 @@ from airflow import configuration
from airflow.configuration import (
AirflowConfigException,
AirflowConfigParser,
+ _custom_secrets_backend,
conf,
expand_env_var,
get_airflow_config,
@@ -296,6 +297,7 @@ sql_alchemy_conn = airflow
def test_config_raise_exception_from_secret_backend_connection_error(self,
mock_hvac):
"""Get Config Value from a Secret Backend"""
+ _custom_secrets_backend.cache_clear()
mock_client = mock.MagicMock()
# mock_client.side_effect = AirflowConfigException
mock_hvac.Client.return_value = mock_client
@@ -322,6 +324,7 @@ sql_alchemy_conn = airflow
),
):
test_conf.get('test', 'sql_alchemy_conn')
+ _custom_secrets_backend.cache_clear()
def test_getboolean(self):
"""Test AirflowConfigParser.getboolean"""
@@ -1297,3 +1300,81 @@ sql_alchemy_conn=sqlite://test
conf.read_dict(dictionary=cfg_dict)
os.environ.clear()
assert conf.get('database', 'sql_alchemy_conn') ==
f'sqlite:///{HOME_DIR}/airflow/airflow.db'
+
+
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
+ @conf_vars(
+ {
+ ("secrets", "backend"):
"airflow.providers.hashicorp.secrets.vault.VaultBackend",
+ ("secrets", "backend_kwargs"): '{"url": "http://127.0.0.1:8200",
"token": "token"}',
+ }
+ )
+ def test_config_from_secret_backend_caches_instance(self, mock_hvac):
+ """Get Config Value from a Secret Backend"""
+ _custom_secrets_backend.cache_clear()
+
+ test_config = '''[test]
+sql_alchemy_conn_secret = sql_alchemy_conn
+secret_key_secret = secret_key
+'''
+ test_config_default = '''[test]
+sql_alchemy_conn = airflow
+secret_key = airflow
+'''
+
+ mock_client = mock.MagicMock()
+ mock_hvac.Client.return_value = mock_client
+
+ def fake_read_secret(path, mount_point, version):
+ if path.endswith('sql_alchemy_conn'):
+ return {
+ 'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56',
+ 'lease_id': '',
+ 'renewable': False,
+ 'lease_duration': 0,
+ 'data': {
+ 'data': {'value': 'fake_conn'},
+ 'metadata': {
+ 'created_time': '2020-03-28T02:10:54.301784Z',
+ 'deletion_time': '',
+ 'destroyed': False,
+ 'version': 1,
+ },
+ },
+ 'wrap_info': None,
+ 'warnings': None,
+ 'auth': None,
+ }
+ if path.endswith('secret_key'):
+ return {
+ 'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56',
+ 'lease_id': '',
+ 'renewable': False,
+ 'lease_duration': 0,
+ 'data': {
+ 'data': {'value': 'fake_key'},
+ 'metadata': {
+ 'created_time': '2020-03-28T02:10:54.301784Z',
+ 'deletion_time': '',
+ 'destroyed': False,
+ 'version': 1,
+ },
+ },
+ 'wrap_info': None,
+ 'warnings': None,
+ 'auth': None,
+ }
+
+ mock_client.secrets.kv.v2.read_secret_version.side_effect =
fake_read_secret
+
+ test_conf =
AirflowConfigParser(default_config=parameterized_config(test_config_default))
+ test_conf.read_string(test_config)
+ test_conf.sensitive_config_values = test_conf.sensitive_config_values
| {
+ ('test', 'sql_alchemy_conn'),
+ ('test', 'secret_key'),
+ }
+
+ assert 'fake_conn' == test_conf.get('test', 'sql_alchemy_conn')
+ mock_hvac.Client.assert_called_once()
+ assert 'fake_key' == test_conf.get('test', 'secret_key')
+ mock_hvac.Client.assert_called_once()
+ _custom_secrets_backend.cache_clear()