This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 44a752aa96 D401 Support - Secrets to Triggers (Inclusive) (#33338)
44a752aa96 is described below

commit 44a752aa96d5aa4263f4ac1725642084fd612a48
Author: D. Ferruzzi <[email protected]>
AuthorDate: Sun Aug 13 16:19:21 2023 -0700

    D401 Support - Secrets to Triggers (Inclusive) (#33338)
---
 airflow/secrets/cache.py                         | 18 +++++++++++-------
 airflow/secrets/local_filesystem.py              |  6 +++---
 airflow/security/permissions.py                  |  2 +-
 airflow/security/utils.py                        |  6 +++---
 airflow/sensors/base.py                          |  4 ++--
 airflow/sensors/date_time.py                     |  4 ++--
 airflow/sensors/external_task.py                 |  4 ++--
 airflow/sensors/time_delta.py                    |  2 +-
 airflow/sensors/time_sensor.py                   |  2 +-
 airflow/serialization/serde.py                   |  4 ++--
 airflow/serialization/serialized_objects.py      | 20 +++++++++++---------
 airflow/task/task_runner/base_task_runner.py     |  2 +-
 airflow/template/templater.py                    |  5 +++--
 airflow/ti_deps/dep_context.py                   |  2 +-
 airflow/ti_deps/deps/base_ti_dep.py              |  6 +++---
 airflow/ti_deps/deps/dagrun_backfill_dep.py      |  2 +-
 airflow/ti_deps/deps/pool_slots_available_dep.py |  2 +-
 airflow/ti_deps/deps/ready_to_reschedule.py      |  2 +-
 airflow/timetables/interval.py                   |  3 ++-
 airflow/triggers/base.py                         |  4 ++--
 airflow/triggers/external_task.py                | 18 ++++++++----------
 airflow/triggers/file.py                         |  2 +-
 22 files changed, 63 insertions(+), 57 deletions(-)

diff --git a/airflow/secrets/cache.py b/airflow/secrets/cache.py
index c4f4ec5065..25b315efcd 100644
--- a/airflow/secrets/cache.py
+++ b/airflow/secrets/cache.py
@@ -46,7 +46,11 @@ class SecretCache:
 
     @classmethod
     def init(cls):
-        """Initializes the cache, provided the configuration allows it. Safe 
to call several times."""
+        """
+        Initialize the cache, provided the configuration allows it.
+
+        Safe to call several times.
+        """
         if cls._cache is not None:
             return
         use_cache = conf.getboolean(section="secrets", key="use_cache", 
fallback=False)
@@ -62,13 +66,13 @@ class SecretCache:
 
     @classmethod
     def reset(cls):
-        """For test purposes only."""
+        """Use for test purposes only."""
         cls._cache = None
 
     @classmethod
     def get_variable(cls, key: str) -> str | None:
         """
-        Tries to get the value associated with the key from the cache.
+        Try to get the value associated with the key from the cache.
 
         :return: The saved value (which can be None) if present in cache and 
not expired,
             a NotPresent exception otherwise.
@@ -78,7 +82,7 @@ class SecretCache:
     @classmethod
     def get_connection_uri(cls, conn_id: str) -> str:
         """
-        Tries to get the uri associated with the conn_id from the cache.
+        Try to get the uri associated with the conn_id from the cache.
 
         :return: The saved uri if present in cache and not expired,
             a NotPresent exception otherwise.
@@ -101,12 +105,12 @@ class SecretCache:
 
     @classmethod
     def save_variable(cls, key: str, value: str | None):
-        """Saves the value for that key in the cache, if initialized."""
+        """Save the value for that key in the cache, if initialized."""
         cls._save(key, value, cls._VARIABLE_PREFIX)
 
     @classmethod
     def save_connection_uri(cls, conn_id: str, uri: str):
-        """Saves the uri representation for that connection in the cache, if 
initialized."""
+        """Save the uri representation for that connection in the cache, if 
initialized."""
         if uri is None:
             # connections raise exceptions if not present, so we shouldn't 
have any None value to save.
             return
@@ -119,7 +123,7 @@ class SecretCache:
 
     @classmethod
     def invalidate_variable(cls, key: str):
-        """Invalidates (actually removes) the value stored in the cache for 
that Variable."""
+        """Invalidate (actually removes) the value stored in the cache for 
that Variable."""
         if cls._cache is not None:
             # second arg ensures no exception if key is absent
             cls._cache.pop(f"{cls._VARIABLE_PREFIX}{key}", None)
diff --git a/airflow/secrets/local_filesystem.py 
b/airflow/secrets/local_filesystem.py
index 7490c8e25c..742b5c4589 100644
--- a/airflow/secrets/local_filesystem.py
+++ b/airflow/secrets/local_filesystem.py
@@ -46,7 +46,7 @@ if TYPE_CHECKING:
 
 
 def get_connection_parameter_names() -> set[str]:
-    """Returns :class:`airflow.models.connection.Connection` constructor 
parameters."""
+    """Return :class:`airflow.models.connection.Connection` constructor 
parameters."""
     from airflow.models.connection import Connection
 
     return {k for k in signature(Connection.__init__).parameters.keys() if k 
!= "self"}
@@ -186,7 +186,7 @@ def _parse_secret_file(file_path: str) -> dict[str, Any]:
 
 
 def _create_connection(conn_id: str, value: Any):
-    """Creates a connection based on a URL or JSON object."""
+    """Create a connection based on a URL or JSON object."""
     from airflow.models.connection import Connection
 
     if isinstance(value, str):
@@ -243,7 +243,7 @@ def load_variables(file_path: str) -> dict[str, str]:
 
 
 def load_connections(file_path) -> dict[str, list[Any]]:
-    """Deprecated: Please use 
`airflow.secrets.local_filesystem.load_connections_dict`."""
+    """Use `airflow.secrets.local_filesystem.load_connections_dict`, this is 
deprecated."""
     warnings.warn(
         "This function is deprecated. Please use 
`airflow.secrets.local_filesystem.load_connections_dict`.",
         RemovedInAirflow3Warning,
diff --git a/airflow/security/permissions.py b/airflow/security/permissions.py
index 3259b48dc3..a5c862c170 100644
--- a/airflow/security/permissions.py
+++ b/airflow/security/permissions.py
@@ -70,7 +70,7 @@ DAG_ACTIONS = {ACTION_CAN_READ, ACTION_CAN_EDIT, 
ACTION_CAN_DELETE}
 
 
 def resource_name_for_dag(root_dag_id: str) -> str:
-    """Returns the resource name for a DAG id.
+    """Return the resource name for a DAG id.
 
     Note that since a sub-DAG should follow the permission of its
     parent DAG, you should pass ``DagModel.root_dag_id`` to this function,
diff --git a/airflow/security/utils.py b/airflow/security/utils.py
index 139e96a13c..9ad7fc10e2 100644
--- a/airflow/security/utils.py
+++ b/airflow/security/utils.py
@@ -54,7 +54,7 @@ def get_components(principal) -> list[str] | None:
 
 
 def replace_hostname_pattern(components, host=None):
-    """Replaces hostname with the right pattern including lowercase of the 
name."""
+    """Replace hostname with the right pattern including lowercase of the 
name."""
     fqdn = host
     if not fqdn or fqdn == "0.0.0.0":
         fqdn = get_hostname()
@@ -62,7 +62,7 @@ def replace_hostname_pattern(components, host=None):
 
 
 def get_fqdn(hostname_or_ip=None):
-    """Retrieves FQDN - hostname for the IP or hostname."""
+    """Retrieve FQDN - hostname for the IP or hostname."""
     try:
         if hostname_or_ip:
             fqdn = socket.gethostbyaddr(hostname_or_ip)[0]
@@ -77,7 +77,7 @@ def get_fqdn(hostname_or_ip=None):
 
 
 def principal_from_username(username, realm):
-    """Retrieves principal from the user name and realm."""
+    """Retrieve principal from the username and realm."""
     if ("@" not in username) and realm:
         username = f"{username}@{realm}"
 
diff --git a/airflow/sensors/base.py b/airflow/sensors/base.py
index e217727785..53d9b661ed 100644
--- a/airflow/sensors/base.py
+++ b/airflow/sensors/base.py
@@ -199,7 +199,7 @@ class BaseSensorOperator(BaseOperator, SkipMixin):
                 )
 
     def poke(self, context: Context) -> bool | PokeReturnValue:
-        """Function defined by the sensors while deriving this class should 
override."""
+        """Override when deriving this class."""
         raise AirflowException("Override me.")
 
     def execute(self, context: Context) -> Any:
@@ -287,7 +287,7 @@ class BaseSensorOperator(BaseOperator, SkipMixin):
         run_duration: Callable[[], float],
         try_number: int,
     ) -> float:
-        """Using the similar logic which is used for exponential backoff retry 
delay for operators."""
+        """Use similar logic which is used for exponential backoff retry delay 
for operators."""
         if not self.exponential_backoff:
             return self.poke_interval
 
diff --git a/airflow/sensors/date_time.py b/airflow/sensors/date_time.py
index 19168e98f3..1425028870 100644
--- a/airflow/sensors/date_time.py
+++ b/airflow/sensors/date_time.py
@@ -77,7 +77,7 @@ class DateTimeSensor(BaseSensorOperator):
 
 class DateTimeSensorAsync(DateTimeSensor):
     """
-    Waits until the specified datetime occurs.
+    Wait until the specified datetime occurs.
 
     Deferring itself to avoid taking up a worker slot while it is waiting.
     It is a drop-in replacement for DateTimeSensor.
@@ -92,5 +92,5 @@ class DateTimeSensorAsync(DateTimeSensor):
         )
 
     def execute_complete(self, context, event=None):
-        """Callback for when the trigger fires - returns immediately."""
+        """Execute when the trigger fires - returns immediately."""
         return None
diff --git a/airflow/sensors/external_task.py b/airflow/sensors/external_task.py
index 5e42820ffe..2406f274aa 100644
--- a/airflow/sensors/external_task.py
+++ b/airflow/sensors/external_task.py
@@ -349,7 +349,7 @@ class ExternalTaskSensor(BaseSensorOperator):
             )
 
     def execute_complete(self, context, event=None):
-        """Callback for when the trigger fires - returns immediately."""
+        """Execute when the trigger fires - return immediately."""
         if event["status"] == "success":
             self.log.info("External task %s has executed successfully.", 
self.external_task_id)
             return None
@@ -522,7 +522,7 @@ class ExternalTaskMarker(EmptyOperator):
 
     @classmethod
     def get_serialized_fields(cls):
-        """Serialized ExternalTaskMarker contain exactly these fields + 
templated_fields ."""
+        """Serialize ExternalTaskMarker to contain exactly these fields + 
templated_fields ."""
         if not cls.__serialized_fields:
             cls.__serialized_fields = 
frozenset(super().get_serialized_fields() | {"recursion_depth"})
         return cls.__serialized_fields
diff --git a/airflow/sensors/time_delta.py b/airflow/sensors/time_delta.py
index a73d123c3d..1571334757 100644
--- a/airflow/sensors/time_delta.py
+++ b/airflow/sensors/time_delta.py
@@ -67,5 +67,5 @@ class TimeDeltaSensorAsync(TimeDeltaSensor):
         self.defer(trigger=DateTimeTrigger(moment=target_dttm), 
method_name="execute_complete")
 
     def execute_complete(self, context, event=None):
-        """Callback for when the trigger fires - returns immediately."""
+        """Execute for when the trigger fires - return immediately."""
         return None
diff --git a/airflow/sensors/time_sensor.py b/airflow/sensors/time_sensor.py
index 12b26d06bd..7f6809851a 100644
--- a/airflow/sensors/time_sensor.py
+++ b/airflow/sensors/time_sensor.py
@@ -76,5 +76,5 @@ class TimeSensorAsync(BaseSensorOperator):
         )
 
     def execute_complete(self, context, event=None):
-        """Callback for when the trigger fires - returns immediately."""
+        """Execute when the trigger fires - returns immediately."""
         return None
diff --git a/airflow/serialization/serde.py b/airflow/serialization/serde.py
index a9a09d86db..5e5908df90 100644
--- a/airflow/serialization/serde.py
+++ b/airflow/serialization/serde.py
@@ -65,7 +65,7 @@ _builtin_collections = (frozenset, list, set, tuple)  # dict 
is treated speciall
 
 
 def encode(cls: str, version: int, data: T) -> dict[str, str | int | T]:
-    """Encodes o so it can be understood by the deserializer."""
+    """Encode an object so it can be understood by the deserializer."""
     return {CLASSNAME: cls, VERSION: version, DATA: data}
 
 
@@ -274,7 +274,7 @@ def deserialize(o: T | None, full=True, type_hint: Any = 
None) -> object:
 
 
 def _convert(old: dict) -> dict:
-    """Converts an old style serialization to new style."""
+    """Convert an old style serialization to new style."""
     if OLD_TYPE in old and OLD_DATA in old:
         # Return old style dicts directly as they do not need wrapping
         if old[OLD_TYPE] == OLD_DICT:
diff --git a/airflow/serialization/serialized_objects.py 
b/airflow/serialization/serialized_objects.py
index 5feb000edc..17a084f20e 100644
--- a/airflow/serialization/serialized_objects.py
+++ b/airflow/serialization/serialized_objects.py
@@ -364,7 +364,7 @@ class BaseSerialization:
     def serialize_to_json(
         cls, object_to_serialize: BaseOperator | MappedOperator | DAG, 
decorated_fields: set
     ) -> dict[str, Any]:
-        """Serializes an object to JSON."""
+        """Serialize an object to JSON."""
         serialized_object: dict[str, Any] = {}
         keys_to_serialize = object_to_serialize.get_serialized_fields()
         for key in keys_to_serialize:
@@ -394,7 +394,8 @@ class BaseSerialization:
     def serialize(
         cls, var: Any, *, strict: bool = False, use_pydantic_models: bool = 
False
     ) -> Any:  # Unfortunately there is no support for recursive types in mypy
-        """Helper function of depth first search for serialization.
+        """
+        Serialize an object; helper function of depth first search for 
serialization.
 
         The serialization protocol is:
 
@@ -510,7 +511,8 @@ class BaseSerialization:
 
     @classmethod
     def deserialize(cls, encoded_var: Any, use_pydantic_models=False) -> Any:
-        """Helper function of depth first search for deserialization.
+        """
+        Deserialize an object; helper function of depth first search for 
deserialization.
 
         :meta private:
         """
@@ -690,7 +692,7 @@ class DependencyDetector:
 
     @staticmethod
     def detect_task_dependencies(task: Operator) -> list[DagDependency]:
-        """Detects dependencies caused by tasks."""
+        """Detect dependencies caused by tasks."""
         from airflow.operators.trigger_dagrun import TriggerDagRunOperator
         from airflow.sensors.external_task import ExternalTaskSensor
 
@@ -727,7 +729,7 @@ class DependencyDetector:
 
     @staticmethod
     def detect_dag_dependencies(dag: DAG | None) -> Iterable[DagDependency]:
-        """Detects dependencies set directly on the DAG object."""
+        """Detect dependencies set directly on the DAG object."""
         if not dag:
             return
         for x in dag.dataset_triggers:
@@ -826,7 +828,7 @@ class SerializedBaseOperator(BaseOperator, 
BaseSerialization):
 
     @classmethod
     def _serialize_node(cls, op: BaseOperator | MappedOperator, include_deps: 
bool) -> dict[str, Any]:
-        """Serializes operator into a JSON object."""
+        """Serialize operator into a JSON object."""
         serialize_op = cls.serialize_to_json(op, cls._decorated_fields)
         serialize_op["_task_type"] = getattr(op, "_task_type", 
type(op).__name__)
         serialize_op["_task_module"] = getattr(op, "_task_module", 
type(op).__module__)
@@ -1074,7 +1076,7 @@ class SerializedBaseOperator(BaseOperator, 
BaseSerialization):
 
     @classmethod
     def detect_dependencies(cls, op: Operator) -> set[DagDependency]:
-        """Detects between DAG dependencies for the operator."""
+        """Detect between DAG dependencies for the operator."""
 
         def get_custom_dep() -> list[DagDependency]:
             """
@@ -1270,7 +1272,7 @@ class SerializedDAG(DAG, BaseSerialization):
 
     @classmethod
     def serialize_dag(cls, dag: DAG) -> dict:
-        """Serializes a DAG into a JSON object."""
+        """Serialize a DAG into a JSON object."""
         try:
             serialized_dag = cls.serialize_to_json(dag, cls._decorated_fields)
 
@@ -1404,7 +1406,7 @@ class TaskGroupSerialization(BaseSerialization):
 
     @classmethod
     def serialize_task_group(cls, task_group: TaskGroup) -> dict[str, Any] | 
None:
-        """Serializes TaskGroup into a JSON object."""
+        """Serialize TaskGroup into a JSON object."""
         if not task_group:
             return None
 
diff --git a/airflow/task/task_runner/base_task_runner.py 
b/airflow/task/task_runner/base_task_runner.py
index 64523b17c5..c1045280dc 100644
--- a/airflow/task/task_runner/base_task_runner.py
+++ b/airflow/task/task_runner/base_task_runner.py
@@ -178,7 +178,7 @@ class BaseTaskRunner(LoggingMixin):
         raise NotImplementedError()
 
     def on_finish(self) -> None:
-        """A callback that should be called when this is done running."""
+        """Execute when this is done running."""
         if self._cfg_path and os.path.isfile(self._cfg_path):
             if self.run_as_user:
                 subprocess.call(["sudo", "rm", self._cfg_path], close_fds=True)
diff --git a/airflow/template/templater.py b/airflow/template/templater.py
index 83c4e763f8..2d2033696b 100644
--- a/airflow/template/templater.py
+++ b/airflow/template/templater.py
@@ -56,14 +56,15 @@ class Templater(LoggingMixin):
         return SandboxedEnvironment(cache_size=0)
 
     def prepare_template(self) -> None:
-        """Hook triggered after the templated fields get replaced by their 
content.
+        """
+        Execute after the templated fields get replaced by their content.
 
         If you need your object to alter the content of the file before the
         template is rendered, it should override this method to do so.
         """
 
     def resolve_template_files(self) -> None:
-        """Getting the content of files for template_field / template_ext."""
+        """Get the content of files for template_field / template_ext."""
         if self.template_ext:
             for field in self.template_fields:
                 content = getattr(self, field, None)
diff --git a/airflow/ti_deps/dep_context.py b/airflow/ti_deps/dep_context.py
index 5a19c5bfc7..bd4c0db461 100644
--- a/airflow/ti_deps/dep_context.py
+++ b/airflow/ti_deps/dep_context.py
@@ -85,7 +85,7 @@ class DepContext:
 
     def ensure_finished_tis(self, dag_run: DagRun, session: Session) -> 
list[TaskInstance]:
         """
-        Ensures finished_tis is populated if it's currently None, which allows 
running tasks without dag_run.
+        Ensure finished_tis is populated if it's currently None, which allows 
running tasks without dag_run.
 
          :param dag_run: The DagRun for which to find finished tasks
          :return: A list of all the finished tasks of this DAG and 
execution_date
diff --git a/airflow/ti_deps/deps/base_ti_dep.py 
b/airflow/ti_deps/deps/base_ti_dep.py
index b4b34ae444..ef18f3fcdf 100644
--- a/airflow/ti_deps/deps/base_ti_dep.py
+++ b/airflow/ti_deps/deps/base_ti_dep.py
@@ -90,7 +90,7 @@ class BaseTIDep:
         dep_context: DepContext | None = None,
     ) -> Iterator[TIDepStatus]:
         """
-        Wrapper around the private _get_dep_statuses method.
+        Wrap around the private _get_dep_statuses method.
 
         Contains some global checks for all dependencies.
 
@@ -113,7 +113,7 @@ class BaseTIDep:
     @provide_session
     def is_met(self, ti: TaskInstance, session: Session, dep_context: 
DepContext | None = None) -> bool:
         """
-        Returns whether a dependency is met for a given task instance.
+        Return whether a dependency is met for a given task instance.
 
         A dependency is considered met if all the dependency statuses it 
reports are passing.
 
@@ -132,7 +132,7 @@ class BaseTIDep:
         dep_context: DepContext | None = None,
     ) -> Iterator[str]:
         """
-        Returns an iterable of strings that explain why this dependency wasn't 
met.
+        Return an iterable of strings that explain why this dependency wasn't 
met.
 
         :param ti: the task instance to see if this dependency is met for
         :param session: database session
diff --git a/airflow/ti_deps/deps/dagrun_backfill_dep.py 
b/airflow/ti_deps/deps/dagrun_backfill_dep.py
index d33e1af593..6d0a1efad2 100644
--- a/airflow/ti_deps/deps/dagrun_backfill_dep.py
+++ b/airflow/ti_deps/deps/dagrun_backfill_dep.py
@@ -32,7 +32,7 @@ class DagRunNotBackfillDep(BaseTIDep):
     @provide_session
     def _get_dep_statuses(self, ti, session, dep_context=None):
         """
-        Determines if the DagRun is valid for scheduling from scheduler.
+        Determine if the DagRun is valid for scheduling from scheduler.
 
         :param ti: the task instance to get the dependency status for
         :param session: database session
diff --git a/airflow/ti_deps/deps/pool_slots_available_dep.py 
b/airflow/ti_deps/deps/pool_slots_available_dep.py
index e095cfb68e..6263dc2ed1 100644
--- a/airflow/ti_deps/deps/pool_slots_available_dep.py
+++ b/airflow/ti_deps/deps/pool_slots_available_dep.py
@@ -31,7 +31,7 @@ class PoolSlotsAvailableDep(BaseTIDep):
     @provide_session
     def _get_dep_statuses(self, ti, session, dep_context=None):
         """
-        Determines if the pool task instance is in has available slots.
+        Determine if the pool task instance is in has available slots.
 
         :param ti: the task instance to get the dependency status for
         :param session: database session
diff --git a/airflow/ti_deps/deps/ready_to_reschedule.py 
b/airflow/ti_deps/deps/ready_to_reschedule.py
index 4fca6f5538..0eaa52c1eb 100644
--- a/airflow/ti_deps/deps/ready_to_reschedule.py
+++ b/airflow/ti_deps/deps/ready_to_reschedule.py
@@ -36,7 +36,7 @@ class ReadyToRescheduleDep(BaseTIDep):
     @provide_session
     def _get_dep_statuses(self, ti, session, dep_context):
         """
-        Determines whether a task is ready to be rescheduled.
+        Determine whether a task is ready to be rescheduled.
 
         Only tasks in NONE state with at least one row in task_reschedule 
table are
         handled by this dependency class, otherwise this dependency is 
considered as passed.
diff --git a/airflow/timetables/interval.py b/airflow/timetables/interval.py
index 50478c6f75..27e128ff52 100644
--- a/airflow/timetables/interval.py
+++ b/airflow/timetables/interval.py
@@ -185,7 +185,8 @@ class DeltaDataIntervalTimetable(_DataIntervalTimetable):
         return cls(datetime.timedelta(seconds=delta))
 
     def __eq__(self, other: Any) -> bool:
-        """The offset should match.
+        """
+        Return if the offsets match.
 
         This is only for testing purposes and should not be relied on 
otherwise.
         """
diff --git a/airflow/triggers/base.py b/airflow/triggers/base.py
index 87dd12f317..4612c4edbf 100644
--- a/airflow/triggers/base.py
+++ b/airflow/triggers/base.py
@@ -50,7 +50,7 @@ class BaseTrigger(abc.ABC, LoggingMixin):
     @abc.abstractmethod
     def serialize(self) -> tuple[str, dict[str, Any]]:
         """
-        Returns the information needed to reconstruct this Trigger.
+        Return the information needed to reconstruct this Trigger.
 
         :return: Tuple of (class path, keyword arguments needed to 
re-instantiate).
         """
@@ -59,7 +59,7 @@ class BaseTrigger(abc.ABC, LoggingMixin):
     @abc.abstractmethod
     async def run(self) -> AsyncIterator[TriggerEvent]:
         """
-        Runs the trigger in an asynchronous context.
+        Run the trigger in an asynchronous context.
 
         The trigger should yield an Event whenever it wants to fire off
         an event, and return None if it is finished. Single-event triggers
diff --git a/airflow/triggers/external_task.py 
b/airflow/triggers/external_task.py
index f179cba259..74742fcccb 100644
--- a/airflow/triggers/external_task.py
+++ b/airflow/triggers/external_task.py
@@ -69,7 +69,7 @@ class TaskStateTrigger(BaseTrigger):
         self._timeout_sec = 60
 
     def serialize(self) -> tuple[str, dict[str, typing.Any]]:
-        """Serializes TaskStateTrigger arguments and classpath."""
+        """Serialize TaskStateTrigger arguments and classpath."""
         return (
             "airflow.triggers.external_task.TaskStateTrigger",
             {
@@ -84,10 +84,11 @@ class TaskStateTrigger(BaseTrigger):
 
     async def run(self) -> typing.AsyncIterator[TriggerEvent]:
         """
-        Checks periodically in the database to see if the dag exists and is in 
the running state. If found,
-        wait until the task specified will reach one of the expected states. 
If dag with specified name was
-        not in the running state after _timeout_sec seconds after starting 
execution process of the trigger,
-        terminate with status 'timeout'.
+        Check periodically in the database to see if the dag exists and is in 
the running state.
+
+        If found, wait until the task specified will reach one of the expected 
states.
+        If dag with specified name was not in the running state after 
_timeout_sec seconds
+        after starting execution process of the trigger, terminate with status 
'timeout'.
         """
         while True:
             try:
@@ -167,7 +168,7 @@ class DagStateTrigger(BaseTrigger):
         self.poll_interval = poll_interval
 
     def serialize(self) -> tuple[str, dict[str, typing.Any]]:
-        """Serializes DagStateTrigger arguments and classpath."""
+        """Serialize DagStateTrigger arguments and classpath."""
         return (
             "airflow.triggers.external_task.DagStateTrigger",
             {
@@ -179,10 +180,7 @@ class DagStateTrigger(BaseTrigger):
         )
 
     async def run(self) -> typing.AsyncIterator[TriggerEvent]:
-        """
-        Checks periodically in the database to see if the dag run exists, and 
has
-        hit one of the states yet, or not.
-        """
+        """Check the database to see if the dag run exists, and has hit one of 
the states yet, or not."""
         while True:
             # mypy confuses typing here
             num_dags = await self.count_dags()  # type: ignore[call-arg]
diff --git a/airflow/triggers/file.py b/airflow/triggers/file.py
index 4128b09814..12b2d0e827 100644
--- a/airflow/triggers/file.py
+++ b/airflow/triggers/file.py
@@ -48,7 +48,7 @@ class FileTrigger(BaseTrigger):
         self.poll_interval = poll_interval
 
     def serialize(self) -> tuple[str, dict[str, Any]]:
-        """Serializes FileTrigger arguments and classpath."""
+        """Serialize FileTrigger arguments and classpath."""
         return (
             "airflow.triggers.file.FileTrigger",
             {

Reply via email to