This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new f5132681dc Completed D400 for multiple folders (#27748)
f5132681dc is described below
commit f5132681dcf6bdb03f1edd86bea293a1c262cdad
Author: Dov Benyomin Sohacheski <[email protected]>
AuthorDate: Fri Nov 18 03:22:49 2022 +0200
Completed D400 for multiple folders (#27748)
---
airflow/config_templates/airflow_local_settings.py | 2 +-
.../config_templates/default_webserver_config.py | 2 +-
airflow/configuration.py | 81 ++++++++++++----------
airflow/listeners/events.py | 4 +-
airflow/listeners/listener.py | 2 +-
airflow/sensors/base.py | 12 ++--
airflow/sensors/bash.py | 11 ++-
airflow/sensors/date_time.py | 4 +-
airflow/sensors/external_task.py | 11 +--
airflow/sensors/time_delta.py | 5 +-
airflow/sensors/time_sensor.py | 5 +-
airflow/sensors/weekday.py | 7 +-
airflow/task/task_runner/base_task_runner.py | 9 ++-
airflow/task/task_runner/cgroup_task_runner.py | 9 +--
airflow/task/task_runner/standard_task_runner.py | 2 +-
15 files changed, 92 insertions(+), 74 deletions(-)
diff --git a/airflow/config_templates/airflow_local_settings.py
b/airflow/config_templates/airflow_local_settings.py
index 1cedc1e3df..01edea7520 100644
--- a/airflow/config_templates/airflow_local_settings.py
+++ b/airflow/config_templates/airflow_local_settings.py
@@ -15,7 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-"""Airflow logging settings"""
+"""Airflow logging settings."""
from __future__ import annotations
import os
diff --git a/airflow/config_templates/default_webserver_config.py
b/airflow/config_templates/default_webserver_config.py
index 0f3ac255ce..ac999a0dea 100644
--- a/airflow/config_templates/default_webserver_config.py
+++ b/airflow/config_templates/default_webserver_config.py
@@ -15,7 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-"""Default configuration for the Airflow webserver"""
+"""Default configuration for the Airflow webserver."""
from __future__ import annotations
import os
diff --git a/airflow/configuration.py b/airflow/configuration.py
index 9908aa55b0..32aa151b4d 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -83,9 +83,10 @@ def expand_env_var(env_var: str) -> str:
def expand_env_var(env_var: str | None) -> str | None:
"""
- Expands (potentially nested) env vars by repeatedly applying
- `expandvars` and `expanduser` until interpolation stops having
- any effect.
+ Expands (potentially nested) env vars.
+
+ Repeat and apply `expandvars` and `expanduser` until
+ interpolation stops having any effect.
"""
if not env_var:
return env_var
@@ -98,7 +99,7 @@ def expand_env_var(env_var: str | None) -> str | None:
def run_command(command: str) -> str:
- """Runs command and returns stdout"""
+ """Runs command and returns stdout."""
process = subprocess.Popen(
shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE,
close_fds=True
)
@@ -114,7 +115,7 @@ def run_command(command: str) -> str:
def _get_config_value_from_secret_backend(config_key: str) -> str | None:
- """Get Config option values from Secret Backend"""
+ """Get Config option values from Secret Backend."""
try:
secrets_client = get_custom_secret_backend()
if not secrets_client:
@@ -136,7 +137,7 @@ def _default_config_file_path(file_name: str) -> str:
def default_config_yaml() -> list[dict[str, Any]]:
"""
- Read Airflow configs from YAML file
+ Read Airflow configs from YAML file.
:return: Python dictionary containing configs & their info
"""
@@ -159,7 +160,7 @@ SENSITIVE_CONFIG_VALUES = {
class AirflowConfigParser(ConfigParser):
- """Custom Airflow Configparser supporting defaults and deprecated
options"""
+ """Custom Airflow Configparser supporting defaults and deprecated
options."""
# These configuration elements can be fetched as the stdout of commands
# following the "{section}__{name}_cmd" pattern, the idea behind this
@@ -370,8 +371,9 @@ class AirflowConfigParser(ConfigParser):
def _upgrade_auth_backends(self):
"""
- Ensure a custom auth_backends setting contains session,
- which is needed by the UI for ajax queries.
+ Ensure a custom auth_backends setting contains session.
+
+ This is required by the UI for ajax queries.
"""
old_value = self.get("api", "auth_backends", fallback="")
if old_value in ("airflow.api.auth.backend.default", ""):
@@ -396,6 +398,8 @@ class AirflowConfigParser(ConfigParser):
def _upgrade_postgres_metastore_conn(self):
"""
+ Upgrade SQL schemas.
+
As of SQLAlchemy 1.4, schemes `postgres+psycopg2` and `postgres`
must be replaced with `postgresql`.
"""
@@ -421,7 +425,7 @@ class AirflowConfigParser(ConfigParser):
os.environ.pop(old_env_var, None)
def _validate_enums(self):
- """Validate that enum type config has an accepted value"""
+ """Validate that enum type config has an accepted value."""
for (section_key, option_key), enum_options in
self.enums_options.items():
if self.has_option(section_key, option_key):
value = self.get(section_key, option_key)
@@ -433,7 +437,9 @@ class AirflowConfigParser(ConfigParser):
def _validate_config_dependencies(self):
"""
- Validate that config values aren't invalid given other config values
+ Validate that config based on condition.
+
+ Values are considered invalid when they conflict with other config
values
or system-level limitations and requirements.
"""
is_executor_without_sqlite_support = self.get("core", "executor") not
in (
@@ -521,7 +527,7 @@ class AirflowConfigParser(ConfigParser):
return None
def _get_secret_option(self, section: str, key: str) -> str | None:
- """Get Config option values from Secret Backend"""
+ """Get Config option values from Secret Backend."""
fallback_key = key + "_secret"
if (section, key) in self.sensitive_config_values:
if super().has_option(section, fallback_key):
@@ -756,6 +762,7 @@ class AirflowConfigParser(ConfigParser):
) -> datetime.timedelta | None:
"""
Gets the config value for the given section and key, and converts it
into datetime.timedelta object.
+
If the key is missing, then it is considered as `None`.
:param section: the section from the config
@@ -813,9 +820,10 @@ class AirflowConfigParser(ConfigParser):
def remove_option(self, section: str, option: str, remove_default: bool =
True):
"""
- Remove an option if it exists in config from a file or
- default config. If both of config have the same option, this removes
- the option in both configs unless remove_default=False.
+ Remove an option if it exists in config from a file or default config.
+
+ If both of config have the same option, this removes the option
+ in both configs unless remove_default=False.
"""
if super().has_option(section, option):
super().remove_option(section, option)
@@ -825,8 +833,9 @@ class AirflowConfigParser(ConfigParser):
def getsection(self, section: str) -> ConfigOptionsDictType | None:
"""
- Returns the section as a dict. Values are converted to int, float, bool
- as required.
+ Returns the section as a dict.
+
+ Values are converted to int, float, bool as required.
:param section: section from the config
"""
@@ -1071,8 +1080,9 @@ class AirflowConfigParser(ConfigParser):
getter_func,
):
"""
- Deletes default configs from current configuration (an OrderedDict of
- OrderedDicts) if it would conflict with special
sensitive_config_values.
+ Deletes default configs from current configuration.
+
+ An OrderedDict of OrderedDicts, if it would conflict with special
sensitive_config_values.
This is necessary because bare configs take precedence over the command
or secret key equivalents so if the current running config is
@@ -1300,12 +1310,12 @@ class AirflowConfigParser(ConfigParser):
def get_airflow_home() -> str:
- """Get path to Airflow Home"""
+ """Get path to Airflow Home."""
return expand_env_var(os.environ.get("AIRFLOW_HOME", "~/airflow"))
def get_airflow_config(airflow_home) -> str:
- """Get Path to airflow.cfg path"""
+ """Get Path to airflow.cfg path."""
airflow_config_var = os.environ.get("AIRFLOW_CONFIG")
if airflow_config_var is None:
return os.path.join(airflow_home, "airflow.cfg")
@@ -1326,8 +1336,7 @@ def _parameterized_config_from_template(filename) -> str:
def parameterized_config(template) -> str:
"""
- Generates a configuration from the provided template + variables defined in
- current scope
+ Generates configuration from provided template & variables defined in
current scope.
:param template: a config content templated with {{variables}}
"""
@@ -1336,7 +1345,7 @@ def parameterized_config(template) -> str:
def get_airflow_test_config(airflow_home) -> str:
- """Get path to unittests.cfg"""
+ """Get path to unittests.cfg."""
if "AIRFLOW_TEST_CONFIG" not in os.environ:
return os.path.join(airflow_home, "unittests.cfg")
# It will never return None
@@ -1432,7 +1441,7 @@ def initialize_config() -> AirflowConfigParser:
# Historical convenience functions to access config entries
def load_test_config():
- """Historical load_test_config"""
+ """Historical load_test_config."""
warnings.warn(
"Accessing configuration method 'load_test_config' directly from the
configuration module is "
"deprecated. Please access the configuration from the
'configuration.conf' object via "
@@ -1444,7 +1453,7 @@ def load_test_config():
def get(*args, **kwargs) -> ConfigType | None:
- """Historical get"""
+ """Historical get."""
warnings.warn(
"Accessing configuration method 'get' directly from the configuration
module is "
"deprecated. Please access the configuration from the
'configuration.conf' object via "
@@ -1456,7 +1465,7 @@ def get(*args, **kwargs) -> ConfigType | None:
def getboolean(*args, **kwargs) -> bool:
- """Historical getboolean"""
+ """Historical getboolean."""
warnings.warn(
"Accessing configuration method 'getboolean' directly from the
configuration module is "
"deprecated. Please access the configuration from the
'configuration.conf' object via "
@@ -1468,7 +1477,7 @@ def getboolean(*args, **kwargs) -> bool:
def getfloat(*args, **kwargs) -> float:
- """Historical getfloat"""
+ """Historical getfloat."""
warnings.warn(
"Accessing configuration method 'getfloat' directly from the
configuration module is "
"deprecated. Please access the configuration from the
'configuration.conf' object via "
@@ -1480,7 +1489,7 @@ def getfloat(*args, **kwargs) -> float:
def getint(*args, **kwargs) -> int:
- """Historical getint"""
+ """Historical getint."""
warnings.warn(
"Accessing configuration method 'getint' directly from the
configuration module is "
"deprecated. Please access the configuration from the
'configuration.conf' object via "
@@ -1492,7 +1501,7 @@ def getint(*args, **kwargs) -> int:
def getsection(*args, **kwargs) -> ConfigOptionsDictType | None:
- """Historical getsection"""
+ """Historical getsection."""
warnings.warn(
"Accessing configuration method 'getsection' directly from the
configuration module is "
"deprecated. Please access the configuration from the
'configuration.conf' object via "
@@ -1504,7 +1513,7 @@ def getsection(*args, **kwargs) -> ConfigOptionsDictType
| None:
def has_option(*args, **kwargs) -> bool:
- """Historical has_option"""
+ """Historical has_option."""
warnings.warn(
"Accessing configuration method 'has_option' directly from the
configuration module is "
"deprecated. Please access the configuration from the
'configuration.conf' object via "
@@ -1516,7 +1525,7 @@ def has_option(*args, **kwargs) -> bool:
def remove_option(*args, **kwargs) -> bool:
- """Historical remove_option"""
+ """Historical remove_option."""
warnings.warn(
"Accessing configuration method 'remove_option' directly from the
configuration module is "
"deprecated. Please access the configuration from the
'configuration.conf' object via "
@@ -1528,7 +1537,7 @@ def remove_option(*args, **kwargs) -> bool:
def as_dict(*args, **kwargs) -> ConfigSourcesType:
- """Historical as_dict"""
+ """Historical as_dict."""
warnings.warn(
"Accessing configuration method 'as_dict' directly from the
configuration module is "
"deprecated. Please access the configuration from the
'configuration.conf' object via "
@@ -1540,7 +1549,7 @@ def as_dict(*args, **kwargs) -> ConfigSourcesType:
def set(*args, **kwargs) -> None:
- """Historical set"""
+ """Historical set."""
warnings.warn(
"Accessing configuration method 'set' directly from the configuration
module is "
"deprecated. Please access the configuration from the
'configuration.conf' object via "
@@ -1563,7 +1572,7 @@ def ensure_secrets_loaded() -> list[BaseSecretsBackend]:
def get_custom_secret_backend() -> BaseSecretsBackend | None:
- """Get Secret Backend if defined in airflow.cfg"""
+ """Get Secret Backend if defined in airflow.cfg."""
secrets_backend_cls = conf.getimport(section="secrets", key="backend")
if not secrets_backend_cls:
@@ -1587,6 +1596,8 @@ def get_custom_secret_backend() -> BaseSecretsBackend |
None:
def initialize_secrets_backends() -> list[BaseSecretsBackend]:
"""
+ Initialize secrets backend.
+
* import secrets backend classes
* instantiate them and return them in a list
"""
diff --git a/airflow/listeners/events.py b/airflow/listeners/events.py
index ca598d0004..53c113af8e 100644
--- a/airflow/listeners/events.py
+++ b/airflow/listeners/events.py
@@ -71,7 +71,7 @@ def on_task_instance_state_session_flush(session,
flush_context):
def register_task_instance_state_events():
- """Register a task instance state event"""
+ """Register a task instance state event."""
global _is_listening
if not _is_listening:
event.listen(Session, "after_flush",
on_task_instance_state_session_flush)
@@ -79,7 +79,7 @@ def register_task_instance_state_events():
def unregister_task_instance_state_events():
- """Unregister a task instance state event"""
+ """Unregister a task instance state event."""
global _is_listening
event.remove(Session, "after_flush", on_task_instance_state_session_flush)
_is_listening = False
diff --git a/airflow/listeners/listener.py b/airflow/listeners/listener.py
index a53f149807..2f20e67956 100644
--- a/airflow/listeners/listener.py
+++ b/airflow/listeners/listener.py
@@ -66,7 +66,7 @@ class ListenerManager:
def get_listener_manager() -> ListenerManager:
- """Get singleton listener manager"""
+ """Get singleton listener manager."""
global _listener_manager
if not _listener_manager:
_listener_manager = ListenerManager()
diff --git a/airflow/sensors/base.py b/airflow/sensors/base.py
index c64498f115..013edb31b9 100644
--- a/airflow/sensors/base.py
+++ b/airflow/sensors/base.py
@@ -57,6 +57,8 @@ def _is_metadatabase_mysql() -> bool:
class PokeReturnValue:
"""
+ Optional return value for poke methods.
+
Sensors can optionally return an instance of the PokeReturnValue class in
the poke method.
If an XCom value is supplied when the sensor is done, then the XCom value
will be
pushed through the operator return value.
@@ -158,10 +160,7 @@ class BaseSensorOperator(BaseOperator, SkipMixin):
)
def poke(self, context: Context) -> bool | PokeReturnValue:
- """
- Function that the sensors defined while deriving this class should
- override.
- """
+ """Function defined by the sensors while deriving this class should
override."""
raise AirflowException("Override me.")
def execute(self, context: Context) -> Any:
@@ -278,8 +277,9 @@ class BaseSensorOperator(BaseOperator, SkipMixin):
def poke_mode_only(cls):
"""
- Class Decorator for child classes of BaseSensorOperator to indicate
- that instances of this class are only safe to use poke mode.
+ Decorate a subclass of BaseSensorOperator with poke.
+
+ Indicate that instances of this class are only safe to use poke mode.
Will decorate all methods in the class to assert they did not change
the mode from 'poke'.
diff --git a/airflow/sensors/bash.py b/airflow/sensors/bash.py
index 1947c22051..f1fcb97139 100644
--- a/airflow/sensors/bash.py
+++ b/airflow/sensors/bash.py
@@ -28,8 +28,9 @@ from airflow.utils.context import Context
class BashSensor(BaseSensorOperator):
"""
- Executes a bash command/script and returns True if and only if the
- return code is 0.
+ Executes a bash command/script.
+
+ Return True if and only if the return code is 0.
:param bash_command: The command, set of commands or reference to a
bash script (must be '.sh') to be executed.
@@ -43,7 +44,6 @@ class BashSensor(BaseSensorOperator):
.. seealso::
For more information on how to use this sensor,take a look at the
guide:
:ref:`howto/operator:BashSensor`
-
"""
template_fields: Sequence[str] = ("bash_command", "env")
@@ -55,10 +55,7 @@ class BashSensor(BaseSensorOperator):
self.output_encoding = output_encoding
def poke(self, context: Context):
- """
- Execute the bash command in a temporary directory
- which will be cleaned afterwards
- """
+ """Execute the bash command in a temporary directory."""
bash_command = self.bash_command
self.log.info("Tmp dir root location: \n %s", gettempdir())
with TemporaryDirectory(prefix="airflowtmp") as tmp_dir:
diff --git a/airflow/sensors/date_time.py b/airflow/sensors/date_time.py
index 838b481902..19168e98f3 100644
--- a/airflow/sensors/date_time.py
+++ b/airflow/sensors/date_time.py
@@ -77,9 +77,9 @@ class DateTimeSensor(BaseSensorOperator):
class DateTimeSensorAsync(DateTimeSensor):
"""
- Waits until the specified datetime, deferring itself to avoid taking up
- a worker slot while it is waiting.
+ Waits until the specified datetime occurs.
+ Deferring itself to avoid taking up a worker slot while it is waiting.
It is a drop-in replacement for DateTimeSensor.
:param target_time: datetime after which the job succeeds. (templated)
diff --git a/airflow/sensors/external_task.py b/airflow/sensors/external_task.py
index 3b20531a60..e9573a0671 100644
--- a/airflow/sensors/external_task.py
+++ b/airflow/sensors/external_task.py
@@ -45,6 +45,7 @@ if TYPE_CHECKING:
class ExternalDagLink(BaseOperatorLink):
"""
Operator link for ExternalTaskSensor and ExternalTaskMarker.
+
It allows users to access DAG waited with ExternalTaskSensor or cleared by
ExternalTaskMarker.
"""
@@ -59,14 +60,13 @@ class ExternalDagLink(BaseOperatorLink):
class ExternalTaskSensor(BaseSensorOperator):
"""
- Waits for a different DAG, a task group, or a task in a different DAG to
complete for a
- specific logical date.
+ Waits for a different DAG, task group, or task to complete for a specific
logical date.
If both `external_task_group_id` and `external_task_id` are ``None``
(default), the sensor
waits for the DAG.
Values for `external_task_group_id` and `external_task_id` can't be set at
the same time.
- By default the ExternalTaskSensor will wait for the external task to
+ By default, the ExternalTaskSensor will wait for the external task to
succeed, at which point it will also succeed. However, by default it will
*not* fail if the external task fails, but will continue to check the
status
until the sensor times out (thus giving you time to retry the external task
@@ -287,7 +287,7 @@ class ExternalTaskSensor(BaseSensorOperator):
def get_count(self, dttm_filter, session, states) -> int:
"""
- Get the count of records against dttm filter and states
+ Get the count of records against dttm filter and states.
:param dttm_filter: date time filter for execution date
:param session: airflow session object
@@ -337,6 +337,8 @@ class ExternalTaskSensor(BaseSensorOperator):
def _handle_execution_date_fn(self, context) -> Any:
"""
+ Handle backward compatibility.
+
This function is to handle backwards compatibility with how this
operator was
previously where it only passes the execution date, but also allow for
the newer
implementation to pass all context variables as keyword arguments, to
allow
@@ -359,6 +361,7 @@ class ExternalTaskSensor(BaseSensorOperator):
class ExternalTaskMarker(EmptyOperator):
"""
Use this operator to indicate that a task on a different DAG depends on
this task.
+
When this task is cleared with "Recursive" selected, Airflow will clear
the task on
the other DAG and its downstream tasks recursively. Transitive
dependencies are followed
until the recursion_depth is reached.
diff --git a/airflow/sensors/time_delta.py b/airflow/sensors/time_delta.py
index 6facad52e3..a73d123c3d 100644
--- a/airflow/sensors/time_delta.py
+++ b/airflow/sensors/time_delta.py
@@ -49,8 +49,9 @@ class TimeDeltaSensor(BaseSensorOperator):
class TimeDeltaSensorAsync(TimeDeltaSensor):
"""
- A drop-in replacement for TimeDeltaSensor that defers itself to avoid
- taking up a worker slot while it is waiting.
+ A deferrable drop-in replacement for TimeDeltaSensor.
+
+ Will defers itself to avoid taking up a worker slot while it is waiting.
:param delta: time length to wait after the data interval before
succeeding.
diff --git a/airflow/sensors/time_sensor.py b/airflow/sensors/time_sensor.py
index c08fa4639a..12b26d06bd 100644
--- a/airflow/sensors/time_sensor.py
+++ b/airflow/sensors/time_sensor.py
@@ -48,8 +48,9 @@ class TimeSensor(BaseSensorOperator):
class TimeSensorAsync(BaseSensorOperator):
"""
- Waits until the specified time of the day, freeing up a worker slot while
- it is waiting.
+ Waits until the specified time of the day.
+
+ This frees up a worker slot while it is waiting.
:param target_time: time after which the job succeeds
diff --git a/airflow/sensors/weekday.py b/airflow/sensors/weekday.py
index fe3bf9be52..1cc3e36ddf 100644
--- a/airflow/sensors/weekday.py
+++ b/airflow/sensors/weekday.py
@@ -29,9 +29,10 @@ from airflow.utils.weekday import WeekDay
class DayOfWeekSensor(BaseSensorOperator):
"""
- Waits until the first specified day of the week. For example, if the
execution
- day of the task is '2018-12-22' (Saturday) and you pass 'FRIDAY', the task
will wait
- until next Friday.
+ Waits until the first specified day of the week.
+
+ For example, if the execution day of the task is '2018-12-22' (Saturday)
+ and you pass 'FRIDAY', the task will wait until next Friday.
**Example** (with single day): ::
diff --git a/airflow/task/task_runner/base_task_runner.py
b/airflow/task/task_runner/base_task_runner.py
index 952fee405a..095c9b7300 100644
--- a/airflow/task/task_runner/base_task_runner.py
+++ b/airflow/task/task_runner/base_task_runner.py
@@ -15,7 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-"""Base task runner"""
+"""Base task runner."""
from __future__ import annotations
import os
@@ -42,8 +42,9 @@ PYTHONPATH_VAR = "PYTHONPATH"
class BaseTaskRunner(LoggingMixin):
"""
- Runs Airflow task instances by invoking the `airflow tasks run` command
with raw
- mode enabled in a subprocess.
+ Runs Airflow task instances via CLI.
+
+ Invoke the `airflow tasks run` command with raw mode enabled in a
subprocess.
:param local_task_job: The local task job associated with running the
associated task instance.
@@ -166,6 +167,8 @@ class BaseTaskRunner(LoggingMixin):
def return_code(self, timeout: int = 0) -> int | None:
"""
+ Extract the return code.
+
:return: The return code associated with running the task instance or
None if the task is not yet done.
"""
diff --git a/airflow/task/task_runner/cgroup_task_runner.py
b/airflow/task/task_runner/cgroup_task_runner.py
index 86770c6792..0bd3f616de 100644
--- a/airflow/task/task_runner/cgroup_task_runner.py
+++ b/airflow/task/task_runner/cgroup_task_runner.py
@@ -15,7 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-"""Task runner for cgroup to run Airflow task"""
+"""Task runner for cgroup to run Airflow task."""
from __future__ import annotations
import datetime
@@ -33,9 +33,10 @@ from airflow.utils.process_utils import reap_process_group
class CgroupTaskRunner(BaseTaskRunner):
"""
- Runs the raw Airflow task in a cgroup that has containment for memory and
- cpu. It uses the resource requirements defined in the task to construct
- the settings for the cgroup.
+ Runs the raw Airflow task in a cgroup container.
+
+ With containment for memory and cpu. It uses the resource requirements
+ defined in the task to construct the settings for the cgroup.
Cgroup must be mounted first otherwise CgroupTaskRunner
will not be able to work.
diff --git a/airflow/task/task_runner/standard_task_runner.py
b/airflow/task/task_runner/standard_task_runner.py
index 620633e74c..4d2d55e927 100644
--- a/airflow/task/task_runner/standard_task_runner.py
+++ b/airflow/task/task_runner/standard_task_runner.py
@@ -15,7 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-"""Standard task runner"""
+"""Standard task runner."""
from __future__ import annotations
import logging