This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 5e1e5fa637 D401 Support - Root Files (#33352)
5e1e5fa637 is described below
commit 5e1e5fa637aa90b92e4b0e1d62c577461ea92369
Author: D. Ferruzzi <[email protected]>
AuthorDate: Sun Aug 13 16:56:43 2023 -0700
D401 Support - Root Files (#33352)
---
airflow/__main__.py | 1 -
airflow/configuration.py | 45 ++++++++++++++++++++++---------------------
airflow/policies.py | 8 ++++----
airflow/sentry.py | 4 ++--
airflow/settings.py | 4 ++--
helm_tests/other/test_keda.py | 2 +-
setup.py | 16 +++++++--------
7 files changed, 40 insertions(+), 40 deletions(-)
diff --git a/airflow/__main__.py b/airflow/__main__.py
index 20af0ac274..e49f7e9bdf 100644
--- a/airflow/__main__.py
+++ b/airflow/__main__.py
@@ -38,7 +38,6 @@ from airflow.configuration import
write_webserver_configuration_if_needed
def main():
- """Main executable function."""
conf = configuration.conf
if conf.get("core", "security") == "kerberos":
os.environ["KRB5CCNAME"] = conf.get("kerberos", "ccache")
diff --git a/airflow/configuration.py b/airflow/configuration.py
index 8cdc2a1542..b456dae28e 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -87,7 +87,7 @@ def expand_env_var(env_var: str) -> str:
def expand_env_var(env_var: str | None) -> str | None:
"""
- Expands (potentially nested) env vars.
+ Expand (potentially nested) env vars.
Repeat and apply `expandvars` and `expanduser` until
interpolation stops having any effect.
@@ -103,7 +103,7 @@ def expand_env_var(env_var: str | None) -> str | None:
def run_command(command: str) -> str:
- """Runs command and returns stdout."""
+ """Run command and returns stdout."""
process = subprocess.Popen(
shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE,
close_fds=True
)
@@ -239,7 +239,7 @@ class AirflowConfigParser(ConfigParser):
def _update_defaults_from_string(self, config_string: str):
"""
- The defaults in _default_values are updated based on values in
config_string ("ini" format).
+ Update the defaults in _default_values based on values in
config_string ("ini" format).
Note that those values are not validated and cannot contain variables
because we are using
regular config parser to load them. This method is used to test the
config parser in unit tests.
@@ -268,7 +268,7 @@ class AirflowConfigParser(ConfigParser):
def get_default_value(self, section: str, key: str, fallback: Any = None,
raw=False, **kwargs) -> Any:
"""
- Retrieves default value from default config parser.
+ Retrieve default value from default config parser.
This will retrieve the default value from the default config parser.
Optionally a raw, stored
value can be retrieved by setting skip_interpolation to True. This is
useful for example when
@@ -470,7 +470,7 @@ class AirflowConfigParser(ConfigParser):
def get_sections_including_defaults(self) -> list[str]:
"""
- Retrieves all sections from the configuration parser, including
sections defined by built-in defaults.
+ Retrieve all sections from the configuration parser, including
sections defined by built-in defaults.
:return: list of section names
"""
@@ -484,7 +484,7 @@ class AirflowConfigParser(ConfigParser):
def get_options_including_defaults(self, section: str) -> list[str]:
"""
- Retrieves all possible option from the configuration parser for the
section given.
+ Retrieve all possible option from the configuration parser for the
section given.
Includes options defined by built-in defaults.
@@ -501,7 +501,7 @@ class AirflowConfigParser(ConfigParser):
def optionxform(self, optionstr: str) -> str:
"""
- This method transforms option names on every read, get, or set
operation.
+ Transform option names on every read, get, or set operation.
This changes from the default behaviour of ConfigParser from
lower-casing
to instead be case-preserving.
@@ -543,7 +543,7 @@ class AirflowConfigParser(ConfigParser):
section_config_description: dict[str, str],
section_to_write: str,
) -> None:
- """Writes header for configuration section."""
+ """Write header for configuration section."""
file.write(f"[{section_to_write}]\n")
section_description = section_config_description.get("description")
if section_description and include_descriptions:
@@ -564,7 +564,8 @@ class AirflowConfigParser(ConfigParser):
section_to_write: str,
sources_dict: ConfigSourcesType,
) -> tuple[bool, bool]:
- """Writes header for configuration option.
+ """
+ Write header for configuration option.
Returns tuple of (should_continue, needs_separation) where
needs_separation should be
set if the option needs additional separation to visually separate it
from the next option.
@@ -652,7 +653,7 @@ class AirflowConfigParser(ConfigParser):
**kwargs: Any,
) -> None:
"""
- Writes configuration with comments and examples to a file.
+ Write configuration with comments and examples to a file.
:param file: file to write to
:param section: section of the config to write, defaults to all
sections
@@ -1202,7 +1203,7 @@ class AirflowConfigParser(ConfigParser):
def getimport(self, section: str, key: str, **kwargs) -> Any:
"""
- Reads options, imports the full qualified name, and returns the object.
+ Read options, import the full qualified name, and return the object.
In case of failure, it throws an exception with the key and section
names
@@ -1246,7 +1247,7 @@ class AirflowConfigParser(ConfigParser):
self, section: str, key: str, fallback: Any = None, **kwargs
) -> datetime.timedelta | None:
"""
- Gets the config value for the given section and key, and converts it
into datetime.timedelta object.
+ Get the config value for the given section and key, and convert it
into datetime.timedelta object.
If the key is missing, then it is considered as `None`.
@@ -1332,7 +1333,7 @@ class AirflowConfigParser(ConfigParser):
def getsection(self, section: str) -> ConfigOptionsDictType | None:
"""
- Returns the section as a dict.
+ Return the section as a dict.
Values are converted to int, float, bool as required.
@@ -1385,7 +1386,7 @@ class AirflowConfigParser(ConfigParser):
include_secret: bool = True,
) -> ConfigSourcesType:
"""
- Returns the current configuration as an OrderedDict of OrderedDicts.
+ Return the current configuration as an OrderedDict of OrderedDicts.
When materializing current configuration Airflow defaults are
materialized along with user set configs. If any of the `include_*`
@@ -1567,7 +1568,7 @@ class AirflowConfigParser(ConfigParser):
getter_func,
):
"""
- Deletes default configs from current configuration.
+ Delete default configs from current configuration.
An OrderedDict of OrderedDicts, if it would conflict with special
sensitive_config_values.
@@ -1772,7 +1773,7 @@ class AirflowConfigParser(ConfigParser):
def load_test_config(self):
"""
- Uses test configuration rather than the configuration coming from
airflow defaults.
+ Use test configuration rather than the configuration coming from
airflow defaults.
When running tests we use special the unit_test configuration to avoid
accidental modifications and
different behaviours when running the tests. Values for those test
configuration are stored in
@@ -1796,7 +1797,7 @@ class AirflowConfigParser(ConfigParser):
log.info("Unit test configuration loaded from 'config_unit_tests.cfg'")
def expand_all_configuration_values(self):
- """Expands all configuration values using global and local variables
defined in this module."""
+ """Expand all configuration values using global and local variables
defined in this module."""
all_vars = get_all_expansion_variables()
for section in self.sections():
for key, value in self.items(section):
@@ -1809,7 +1810,7 @@ class AirflowConfigParser(ConfigParser):
self.set(section, key, value.format(**all_vars))
def remove_all_read_configurations(self):
- """Removes all read configurations, leaving only default values in the
config."""
+ """Remove all read configurations, leaving only default values in the
config."""
for section in self.sections():
self.remove_section(section)
@@ -1820,7 +1821,7 @@ class AirflowConfigParser(ConfigParser):
def load_providers_configuration(self):
"""
- Loads configuration for providers.
+ Load configuration for providers.
This should be done after initial configuration have been performed.
Initializing and discovering
providers is an expensive operation and cannot be performed when we
load configuration for the first
@@ -1931,7 +1932,7 @@ def _generate_fernet_key() -> str:
def create_default_config_parser(configuration_description: dict[str,
dict[str, Any]]) -> ConfigParser:
"""
- Creates default config parser based on configuration description.
+ Create default config parser based on configuration description.
It creates ConfigParser with all default values retrieved from the
configuration description and
expands all the variables from the global and local variables defined in
this module.
@@ -1958,7 +1959,7 @@ def
create_default_config_parser(configuration_description: dict[str, dict[str,
def create_pre_2_7_defaults() -> ConfigParser:
"""
- Creates parser using the old defaults from Airflow < 2.7.0.
+ Create parser using the old defaults from Airflow < 2.7.0.
This is used in order to be able to fall-back to those defaults when old
version of provider,
not supporting "config contribution" is installed with Airflow 2.7.0+.
This "default"
@@ -1996,7 +1997,7 @@ def write_default_airflow_configuration_if_needed() ->
AirflowConfigParser:
def load_standard_airflow_configuration(airflow_config_parser:
AirflowConfigParser):
"""
- Loads standard airflow configuration.
+ Load standard airflow configuration.
In case it finds that the configuration file is missing, it will create it
and write the default
configuration values there, based on defaults passed, and will add the
comments and examples
diff --git a/airflow/policies.py b/airflow/policies.py
index 47c3dffcb2..f37703fe2a 100644
--- a/airflow/policies.py
+++ b/airflow/policies.py
@@ -35,7 +35,7 @@ if TYPE_CHECKING:
@local_settings_hookspec
def task_policy(task: BaseOperator) -> None:
"""
- This policy setting allows altering tasks after they are loaded in the
DagBag.
+ Allow altering tasks after they are loaded in the DagBag.
It allows administrator to rewire some task's parameters. Alternatively
you can raise
``AirflowClusterPolicyViolation`` exception to stop DAG from being
executed.
@@ -53,7 +53,7 @@ def task_policy(task: BaseOperator) -> None:
@local_settings_hookspec
def dag_policy(dag: DAG) -> None:
"""
- This policy setting allows altering DAGs after they are loaded in the
DagBag.
+ Allow altering DAGs after they are loaded in the DagBag.
It allows administrator to rewire some DAG's parameters.
Alternatively you can raise ``AirflowClusterPolicyViolation`` exception
@@ -71,7 +71,7 @@ def dag_policy(dag: DAG) -> None:
@local_settings_hookspec
def task_instance_mutation_hook(task_instance: TaskInstance) -> None:
"""
- This setting allows altering task instances before being queued by the
Airflow scheduler.
+ Allow altering task instances before being queued by the Airflow scheduler.
This could be used, for instance, to modify the task instance during
retries.
@@ -108,7 +108,7 @@ def get_airflow_context_vars(context) -> dict[str, str]: #
type: ignore[empty-b
@local_settings_hookspec(firstresult=True)
def get_dagbag_import_timeout(dag_file_path: str) -> int | float: # type:
ignore[empty-body]
"""
- This setting allows for dynamic control of the DAG file parsing timeout
based on the DAG file path.
+ Allow for dynamic control of the DAG file parsing timeout based on the DAG
file path.
It is useful when there are a few DAG files requiring longer parsing
times, while others do not.
You can control them separately instead of having one value for all DAG
files.
diff --git a/airflow/sentry.py b/airflow/sentry.py
index 443063af8a..34133228ca 100644
--- a/airflow/sentry.py
+++ b/airflow/sentry.py
@@ -122,7 +122,7 @@ if conf.getboolean("sentry", "sentry_on", fallback=False):
sentry_sdk.init(integrations=integrations,
**sentry_config_opts)
def add_tagging(self, task_instance):
- """Function to add tagging for a task_instance."""
+ """Add tagging for a task_instance."""
dag_run = task_instance.dag_run
task = task_instance.task
@@ -141,7 +141,7 @@ if conf.getboolean("sentry", "sentry_on", fallback=False):
task_instance: TaskInstance,
session: Session | None = None,
) -> None:
- """Function to add breadcrumbs inside of a task_instance."""
+ """Add breadcrumbs inside of a task_instance."""
if session is None:
return
dr = task_instance.get_dagrun(session)
diff --git a/airflow/settings.py b/airflow/settings.py
index 8e18cfcec7..4ba9e80bd7 100644
--- a/airflow/settings.py
+++ b/airflow/settings.py
@@ -118,7 +118,7 @@ def _get_rich_console(file):
def custom_show_warning(message, category, filename, lineno, file=None,
line=None):
- """Custom function to print rich and visible warnings."""
+ """Print rich and visible warnings."""
# Delay imports until we need it
from rich.markup import escape
@@ -432,7 +432,7 @@ def prepare_syspath():
def get_session_lifetime_config():
- """Gets session timeout configs and handles outdated configs gracefully."""
+ """Get session timeout configs and handle outdated configs gracefully."""
session_lifetime_minutes = conf.get("webserver",
"session_lifetime_minutes", fallback=None)
session_lifetime_days = conf.get("webserver", "session_lifetime_days",
fallback=None)
uses_deprecated_lifetime_configs = session_lifetime_days or conf.get(
diff --git a/helm_tests/other/test_keda.py b/helm_tests/other/test_keda.py
index e171f3f6a9..de801118ee 100644
--- a/helm_tests/other/test_keda.py
+++ b/helm_tests/other/test_keda.py
@@ -83,7 +83,7 @@ class TestKeda:
@staticmethod
def build_query(executor, concurrency=16, queue=None):
- """Builds the query used by KEDA autoscaler to determine how many
workers there should be."""
+ """Build the query used by KEDA autoscaler to determine how many
workers there should be."""
query = (
f"SELECT ceil(COUNT(*)::decimal / {concurrency}) "
"FROM task_instance WHERE (state='running' OR state='queued')"
diff --git a/setup.py b/setup.py
index d0302981c5..7dc2e9f6eb 100644
--- a/setup.py
+++ b/setup.py
@@ -704,7 +704,7 @@ PACKAGES_EXCLUDED_FOR_ALL: list[str] = []
def is_package_excluded(package: str, exclusion_list: list[str]) -> bool:
"""
- Checks if package should be excluded.
+ Check if package should be excluded.
:param package: package name (beginning of it)
:param exclusion_list: list of excluded packages
@@ -715,7 +715,7 @@ def is_package_excluded(package: str, exclusion_list:
list[str]) -> bool:
def remove_provider_limits(package: str) -> str:
"""
- Removes the limit for providers in devel_all to account for pre-release
and development packages.
+ Remove the limit for providers in devel_all to account for pre-release and
development packages.
:param package: package name (beginning of it)
:return: true if package should be excluded
@@ -749,7 +749,7 @@ EXTRAS_DEPENDENCIES["devel_ci"] = devel_ci
def sort_extras_dependencies() -> dict[str, list[str]]:
"""
- The dictionary order remains when keys() are retrieved.
+ Sort dependencies; the dictionary order remains when keys() are retrieved.
Sort both: extras and list of dependencies to make it easier to analyse
problems
external packages will be first, then if providers are added they are
added at the end of the lists.
@@ -777,7 +777,7 @@ PREINSTALLED_PROVIDERS = [
def get_provider_package_name_from_package_id(package_id: str) -> str:
"""
- Builds the name of provider package out of the package id provided/.
+ Build the name of provider package out of the package id provided.
:param package_id: id of the package (like amazon or microsoft.azure)
:return: full name of package in PyPI
@@ -796,12 +796,12 @@ def get_provider_package_name_from_package_id(package_id:
str) -> str:
def get_excluded_providers() -> list[str]:
- """Returns packages excluded for the current python version."""
+ """Return packages excluded for the current python version."""
return []
def get_all_provider_packages() -> str:
- """Returns all provider packages configured in setup.py."""
+ """Return all provider packages configured in setup.py."""
excluded_providers = get_excluded_providers()
return " ".join(
get_provider_package_name_from_package_id(package)
@@ -844,7 +844,7 @@ class AirflowDistribution(Distribution):
def replace_extra_dependencies_with_provider_packages(extra: str, providers:
list[str]) -> None:
"""
- Replaces extra dependencies with provider package.
+ Replace extra dependencies with provider package.
The intention here is that when the provider is added as dependency of
extra, there is no
need to add the dependencies separately. This is not needed and even
harmful, because in
@@ -897,7 +897,7 @@ def
replace_extra_dependencies_with_provider_packages(extra: str, providers: lis
def add_provider_packages_to_extra_dependencies(extra: str, providers:
list[str]) -> None:
"""
- Adds provider packages as dependencies to extra.
+ Add provider packages as dependencies to extra.
This is used to add provider packages as dependencies to the "bulk" kind
of extras.
Those bulk extras do not have the detailed 'extra' dependencies as initial
values,