This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 5066844513 D400 first line should end with period batch02 (#25268)
5066844513 is described below
commit 50668445137e4037bb4a3b652bec22e53d1eddd7
Author: Edith Puclla <[email protected]>
AuthorDate: Thu Sep 8 19:57:53 2022 -0500
D400 first line should end with period batch02 (#25268)
---
airflow/executors/executor_loader.py | 4 +---
.../versions/0038_1_10_2_add_sm_dag_index.py | 2 +-
.../versions/0108_2_3_0_default_dag_view_grid.py | 2 +-
.../amazon/aws/transfers/redshift_to_s3.py | 2 +-
.../providers/databricks/hooks/databricks_base.py | 24 +++++++++++++---------
.../google/cloud/hooks/cloud_memorystore.py | 19 +++++++++--------
airflow/providers/mysql/hooks/mysql.py | 22 ++++++++++++++------
airflow/providers/qubole/sensors/qubole.py | 12 ++++++-----
airflow/utils/process_utils.py | 15 +++++++++-----
docs/apache-airflow/migrations-ref.rst | 4 ++--
10 files changed, 63 insertions(+), 43 deletions(-)
diff --git a/airflow/executors/executor_loader.py
b/airflow/executors/executor_loader.py
index 723060db0f..153b8ed243 100644
--- a/airflow/executors/executor_loader.py
+++ b/airflow/executors/executor_loader.py
@@ -65,7 +65,7 @@ class ExecutorLoader:
@classmethod
def get_default_executor(cls) -> "BaseExecutor":
- """Creates a new instance of the configured executor if none exists
and returns it"""
+ """Creates a new instance of the configured executor if none exists
and returns it."""
if cls._default_executor is not None:
return cls._default_executor
@@ -134,7 +134,6 @@ class ExecutorLoader:
@classmethod
def __load_celery_kubernetes_executor(cls) -> "BaseExecutor":
- """:return: an instance of CeleryKubernetesExecutor"""
celery_executor = import_string(cls.executors[CELERY_EXECUTOR])()
kubernetes_executor =
import_string(cls.executors[KUBERNETES_EXECUTOR])()
@@ -143,7 +142,6 @@ class ExecutorLoader:
@classmethod
def __load_local_kubernetes_executor(cls) -> "BaseExecutor":
- """:return: an instance of LocalKubernetesExecutor"""
local_executor = import_string(cls.executors[LOCAL_EXECUTOR])()
kubernetes_executor =
import_string(cls.executors[KUBERNETES_EXECUTOR])()
diff --git a/airflow/migrations/versions/0038_1_10_2_add_sm_dag_index.py
b/airflow/migrations/versions/0038_1_10_2_add_sm_dag_index.py
index a4bf9a03e1..fd2403f89c 100644
--- a/airflow/migrations/versions/0038_1_10_2_add_sm_dag_index.py
+++ b/airflow/migrations/versions/0038_1_10_2_add_sm_dag_index.py
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-"""Merge migrations Heads
+"""Merge migrations Heads.
Revision ID: 03bc53e68815
Revises: 0a2a5b66e19d, bf00311e1990
diff --git a/airflow/migrations/versions/0108_2_3_0_default_dag_view_grid.py
b/airflow/migrations/versions/0108_2_3_0_default_dag_view_grid.py
index 03a9f43c3a..1cf18e0d35 100644
--- a/airflow/migrations/versions/0108_2_3_0_default_dag_view_grid.py
+++ b/airflow/migrations/versions/0108_2_3_0_default_dag_view_grid.py
@@ -16,7 +16,7 @@
# specific language governing permissions and limitations
# under the License.
-"""Update dag.default_view to grid
+"""Update dag.default_view to grid.
Revision ID: b1b348e02d07
Revises: 75d5ed6c2b43
diff --git a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
index f5a3cd9bf6..b09044dd71 100644
--- a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
@@ -29,7 +29,7 @@ if TYPE_CHECKING:
class RedshiftToS3Operator(BaseOperator):
"""
- Executes an UNLOAD command to s3 as a CSV with headers
+ Execute an UNLOAD command to s3 as a CSV with headers.
.. seealso::
For more information on how to use this operator, take a look at the
guide:
diff --git a/airflow/providers/databricks/hooks/databricks_base.py
b/airflow/providers/databricks/hooks/databricks_base.py
index 1f8680e41e..5a30960353 100644
--- a/airflow/providers/databricks/hooks/databricks_base.py
+++ b/airflow/providers/databricks/hooks/databricks_base.py
@@ -169,8 +169,7 @@ class BaseDatabricksHook(BaseHook):
@staticmethod
def _parse_host(host: str) -> str:
"""
- The purpose of this function is to be robust to improper connections
- settings provided by users, specifically in the host field.
+ This function is resistant to incorrect connection settings provided
by users, in the host field.
For example -- when users supply ``https://xx.cloud.databricks.com``
as the
host, we must strip out the protocol to get the host.::
@@ -195,21 +194,23 @@ class BaseDatabricksHook(BaseHook):
def _get_retry_object(self) -> Retrying:
"""
- Instantiates a retry object
+ Instantiate a retry object.
:return: instance of Retrying class
"""
return Retrying(**self.retry_args)
def _a_get_retry_object(self) -> AsyncRetrying:
"""
- Instantiates an async retry object
+ Instantiate an async retry object.
:return: instance of AsyncRetrying class
"""
return AsyncRetrying(**self.retry_args)
def _get_aad_token(self, resource: str) -> str:
"""
- Function to get AAD token for given resource. Supports managed
identity or service principal auth
+ Function to get AAD token for given resource.
+
+ Supports managed identity or service principal auth.
:param resource: resource to issue token to
:return: AAD token, or raise an exception
"""
@@ -340,7 +341,7 @@ class BaseDatabricksHook(BaseHook):
def _get_aad_headers(self) -> dict:
"""
- Fills AAD headers if necessary (SPN is outside of the workspace)
+ Fill AAD headers if necessary (SPN is outside of the workspace).
:return: dictionary with filled AAD headers
"""
headers = {}
@@ -369,7 +370,8 @@ class BaseDatabricksHook(BaseHook):
@staticmethod
def _is_aad_token_valid(aad_token: dict) -> bool:
"""
- Utility function to check AAD token hasn't expired yet
+ Utility function to check AAD token hasn't expired yet.
+
:param aad_token: dict with properties of AAD token
:return: true if token is valid, false otherwise
:rtype: bool
@@ -382,7 +384,7 @@ class BaseDatabricksHook(BaseHook):
@staticmethod
def _check_azure_metadata_service() -> None:
"""
- Check for Azure Metadata Service
+ Check for Azure Metadata Service.
https://docs.microsoft.com/en-us/azure/virtual-machines/linux/instance-metadata-service
"""
try:
@@ -472,7 +474,7 @@ class BaseDatabricksHook(BaseHook):
wrap_http_errors: bool = True,
):
"""
- Utility function to perform an API call with retries
+ Utility function to perform an API call with retries.
:param endpoint_info: Tuple of method and endpoint
:param json: Parameters for this API call.
@@ -617,7 +619,9 @@ class BaseDatabricksHook(BaseHook):
class _TokenAuth(AuthBase):
"""
- Helper class for requests Auth field. AuthBase requires you to implement
the __call__
+ Helper class for requests Auth field.
+
+ AuthBase requires you to implement the ``__call__``
magic function.
"""
diff --git a/airflow/providers/google/cloud/hooks/cloud_memorystore.py
b/airflow/providers/google/cloud/hooks/cloud_memorystore.py
index aa4ab4eb6c..112495b830 100644
--- a/airflow/providers/google/cloud/hooks/cloud_memorystore.py
+++ b/airflow/providers/google/cloud/hooks/cloud_memorystore.py
@@ -90,7 +90,7 @@ class CloudMemorystoreHook(GoogleBaseHook):
@staticmethod
def _append_label(instance: Instance, key: str, val: str) -> Instance:
"""
- Append labels to provided Instance type
+ Append labels to provided Instance type.
Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current
airflow version string follows semantic versioning spec: x.y.z).
@@ -275,6 +275,8 @@ class CloudMemorystoreHook(GoogleBaseHook):
metadata: Sequence[Tuple[str, str]] = (),
):
"""
+ Failover of the primary node to current replica node.
+
Initiates a failover of the primary node to current replica node for a
specific STANDARD tier Cloud
Memorystore for Redis instance.
@@ -392,8 +394,7 @@ class CloudMemorystoreHook(GoogleBaseHook):
metadata: Sequence[Tuple[str, str]] = (),
):
"""
- Lists all Redis instances owned by a project in either the specified
location (region) or all
- locations.
+ List Redis instances owned by a project at the specified location
(region) or all locations.
:param location: The location of the Cloud Memorystore instance (for
example europe-west1)
@@ -528,7 +529,7 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
@staticmethod
def _append_label(instance: cloud_memcache.Instance, key: str, val: str)
-> cloud_memcache.Instance:
"""
- Append labels to provided Instance type
+ Append labels to provided Instance type.
Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current
airflow version string follows semantic versioning spec: x.y.z).
@@ -752,8 +753,7 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
metadata: Sequence[Tuple[str, str]] = (),
):
"""
- Lists all Memcached instances owned by a project in either the
specified location (region) or all
- locations.
+ List Memcached instances owned by a project at the specified location
(region) or all locations.
:param location: The location of the Cloud Memorystore instance (for
example europe-west1)
@@ -851,9 +851,10 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
metadata: Sequence[Tuple[str, str]] = (),
):
"""
- Updates the defined Memcached Parameters for an existing Instance.
This method only stages the
- parameters, it must be followed by apply_parameters to apply the
parameters to nodes of
- the Memcached Instance.
+ Update the defined Memcached Parameters for an existing Instance.
+
+ This method only stages the parameters, it must be followed by
apply_parameters
+ to apply the parameters to nodes of the Memcached Instance.
:param update_mask: Required. Mask of fields to update.
If a dict is provided, it must be of the same form as the protobuf
message
diff --git a/airflow/providers/mysql/hooks/mysql.py
b/airflow/providers/mysql/hooks/mysql.py
index 586cfb5b8c..508ae6c56c 100644
--- a/airflow/providers/mysql/hooks/mysql.py
+++ b/airflow/providers/mysql/hooks/mysql.py
@@ -61,8 +61,10 @@ class MySqlHook(DbApiHook):
def set_autocommit(self, conn: MySQLConnectionTypes, autocommit: bool) ->
None:
"""
- The MySQLdb (mysqlclient) client uses an `autocommit` method rather
- than an `autocommit` property to set the autocommit setting
+ Set *autocommit*.
+
+ *mysqlclient* uses an *autocommit* method rather than an *autocommit*
+ property, so we need to override this to support it.
:param conn: connection to set autocommit setting
:param autocommit: autocommit setting
@@ -75,8 +77,10 @@ class MySqlHook(DbApiHook):
def get_autocommit(self, conn: MySQLConnectionTypes) -> bool:
"""
- The MySQLdb (mysqlclient) client uses a `get_autocommit` method
- rather than an `autocommit` property to get the autocommit setting
+ Whether *autocommit* is active.
+
+ *mysqlclient* uses an *get_autocommit* method rather than an
*autocommit*
+ property, so we need to override this to support it.
:param conn: connection to get autocommit setting from.
:return: connection autocommit setting
@@ -146,6 +150,8 @@ class MySqlHook(DbApiHook):
def get_conn(self) -> MySQLConnectionTypes:
"""
+ Connection to a MySQL database.
+
Establishes a connection to a mysql database
by extracting the connection configuration from the Airflow connection.
@@ -174,7 +180,7 @@ class MySqlHook(DbApiHook):
raise ValueError('Unknown MySQL client name provided!')
def bulk_load(self, table: str, tmp_file: str) -> None:
- """Loads a tab-delimited file into a database table"""
+ """Load a tab-delimited file into a database table."""
conn = self.get_conn()
cur = conn.cursor()
cur.execute(
@@ -187,7 +193,7 @@ class MySqlHook(DbApiHook):
conn.close()
def bulk_dump(self, table: str, tmp_file: str) -> None:
- """Dumps a database table into a tab-delimited file"""
+ """Dump a database table into a tab-delimited file."""
conn = self.get_conn()
cur = conn.cursor()
cur.execute(
@@ -202,6 +208,8 @@ class MySqlHook(DbApiHook):
@staticmethod
def _serialize_cell(cell: object, conn: Optional[Connection] = None) ->
object:
"""
+ Convert argument to a literal.
+
The package MySQLdb converts an argument to a literal
when passing those separately to execute. Hence, this method does
nothing.
@@ -214,6 +222,8 @@ class MySqlHook(DbApiHook):
def get_iam_token(self, conn: Connection) -> Tuple[str, int]:
"""
+ Retrieve a temporary password to connect to MySQL.
+
Uses AWSHook to retrieve a temporary password to connect to MySQL
Port is required. If none is provided, default 3306 is used
"""
diff --git a/airflow/providers/qubole/sensors/qubole.py
b/airflow/providers/qubole/sensors/qubole.py
index 1d1fdb62ed..09acc58d54 100644
--- a/airflow/providers/qubole/sensors/qubole.py
+++ b/airflow/providers/qubole/sensors/qubole.py
@@ -29,7 +29,7 @@ if TYPE_CHECKING:
class QuboleSensor(BaseSensorOperator):
- """Base class for all Qubole Sensors"""
+ """Base class for all Qubole Sensors."""
template_fields: Sequence[str] = ('data', 'qubole_conn_id')
@@ -68,8 +68,9 @@ class QuboleSensor(BaseSensorOperator):
class QuboleFileSensor(QuboleSensor):
"""
- Wait for a file or folder to be present in cloud storage
- and check for its presence via QDS APIs
+ Wait for a file or folder to be present in cloud storage.
+
+ Check for file or folder presence via QDS APIs.
.. seealso::
For more information on how to use this sensor, take a look at the
guide:
@@ -92,8 +93,9 @@ class QuboleFileSensor(QuboleSensor):
class QubolePartitionSensor(QuboleSensor):
"""
- Wait for a Hive partition to show up in QHS (Qubole Hive Service)
- and check for its presence via QDS APIs
+ Wait for a Hive partition to show up in QHS (Qubole Hive Service).
+
+ Check for Hive partition presence via QDS APIs.
.. seealso::
For more information on how to use this sensor, take a look at the
guide:
diff --git a/airflow/utils/process_utils.py b/airflow/utils/process_utils.py
index 68d74ebe1e..4c5b8c019b 100644
--- a/airflow/utils/process_utils.py
+++ b/airflow/utils/process_utils.py
@@ -16,7 +16,7 @@
# specific language governing permissions and limitations
# under the License.
#
-"""Utilities for running or stopping processes"""
+"""Utilities for running or stopping processes."""
import errno
import logging
import os
@@ -56,6 +56,8 @@ def reap_process_group(
timeout: int = DEFAULT_TIME_TO_WAIT_AFTER_SIGTERM,
) -> Dict[int, int]:
"""
+ Send sig (SIGTERM) to the process group of pid.
+
Tries really hard to terminate all processes in the group (including
grandchildren). Will send
sig (SIGTERM) to the process group of pid. If any process is alive after
timeout
a SIGKILL will be send.
@@ -158,7 +160,7 @@ def reap_process_group(
def execute_in_subprocess(cmd: List[str], cwd: Optional[str] = None) -> None:
"""
- Execute a process and stream output to logger
+ Execute a process and stream output to logger.
:param cmd: command and arguments to run
:param cwd: Current working directory passed to the Popen constructor
"""
@@ -167,7 +169,7 @@ def execute_in_subprocess(cmd: List[str], cwd:
Optional[str] = None) -> None:
def execute_in_subprocess_with_kwargs(cmd: List[str], **kwargs) -> None:
"""
- Execute a process and stream output to logger
+ Execute a process and stream output to logger.
:param cmd: command and arguments to run
@@ -190,6 +192,8 @@ def execute_in_subprocess_with_kwargs(cmd: List[str],
**kwargs) -> None:
def execute_interactive(cmd: List[str], **kwargs) -> None:
"""
+ Run the new command as a subprocess.
+
Runs the new command as a subprocess and ensures that the terminal's state
is restored to its original
state after the process is completed e.g. if the subprocess hides the
cursor, it will be restored after
the process is completed.
@@ -271,8 +275,9 @@ def kill_child_processes_by_pids(pids_to_kill: List[int],
timeout: int = 5) -> N
@contextmanager
def patch_environ(new_env_variables: Dict[str, str]) -> Generator[None, None,
None]:
"""
- Sets environment variables in context. After leaving the context, it
restores its original state.
+ Set environment variables in context.
+ After leaving the context, it restores its original state.
:param new_env_variables: Environment variables to set
"""
current_env_state = {key: os.environ.get(key) for key in
new_env_variables.keys()}
@@ -316,7 +321,7 @@ def check_if_pidfile_process_is_running(pid_file: str,
process_name: str):
def set_new_process_group() -> None:
"""
- Tries to set current process to a new process group
+ Try to set current process to a new process group.
That makes it easy to kill all sub-process of this at the OS-level,
rather than having to iterate the child processes.
If current process spawn by system call ``exec()`` than keep current
process group
diff --git a/docs/apache-airflow/migrations-ref.rst
b/docs/apache-airflow/migrations-ref.rst
index f8b1f14dbe..fa1606946f 100644
--- a/docs/apache-airflow/migrations-ref.rst
+++ b/docs/apache-airflow/migrations-ref.rst
@@ -58,7 +58,7 @@ Here's the list of all the Database Migrations that are
executed via when you ru
+---------------------------------+-------------------+-------------------+--------------------------------------------------------------+
| ``1de7bc13c950`` | ``b1b348e02d07`` | ``2.3.1`` |
Add index for ``event`` column in ``log`` table. |
+---------------------------------+-------------------+-------------------+--------------------------------------------------------------+
-| ``b1b348e02d07`` | ``75d5ed6c2b43`` | ``2.3.0`` |
Update dag.default_view to grid |
+| ``b1b348e02d07`` | ``75d5ed6c2b43`` | ``2.3.0`` |
Update dag.default_view to grid. |
+---------------------------------+-------------------+-------------------+--------------------------------------------------------------+
| ``75d5ed6c2b43`` | ``909884dea523`` | ``2.3.0`` |
Add map_index to Log. |
+---------------------------------+-------------------+-------------------+--------------------------------------------------------------+
@@ -216,7 +216,7 @@ Here's the list of all the Database Migrations that are
executed via when you ru
+---------------------------------+-------------------+-------------------+--------------------------------------------------------------+
| ``41f5f12752f8`` | ``03bc53e68815`` | ``1.10.2`` |
Add superuser field |
+---------------------------------+-------------------+-------------------+--------------------------------------------------------------+
-| ``03bc53e68815`` (merge_point) | ``0a2a5b66e19d``, | ``1.10.2`` |
Merge migrations Heads |
+| ``03bc53e68815`` (merge_point) | ``0a2a5b66e19d``, | ``1.10.2`` |
Merge migrations Heads. |
| | ``bf00311e1990`` | |
|
+---------------------------------+-------------------+-------------------+--------------------------------------------------------------+
| ``0a2a5b66e19d`` | ``9635ae0956e7`` | ``1.10.2`` |
Add ``task_reschedule`` table |