This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new b1ad017cee pydocstyle D202 added (#24221)
b1ad017cee is described below
commit b1ad017cee66f5e042144cc7baa2d44b23b47c4f
Author: Bowrna <[email protected]>
AuthorDate: Tue Jun 7 19:31:47 2022 +0530
pydocstyle D202 added (#24221)
---
.pre-commit-config.yaml | 2 +-
airflow/cli/commands/dag_command.py | 1 -
airflow/models/dag.py | 1 -
airflow/models/mappedoperator.py | 1 -
airflow/providers/amazon/aws/hooks/quicksight.py | 2 --
airflow/providers/amazon/aws/hooks/s3.py | 1 -
airflow/providers/amazon/aws/hooks/sts.py | 1 -
airflow/providers/amazon/aws/sensors/rds.py | 1 -
airflow/providers/cncf/kubernetes/hooks/kubernetes.py | 1 -
airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py | 1 -
airflow/providers/google/cloud/hooks/bigquery.py | 1 -
airflow/providers/google/cloud/hooks/looker.py | 2 --
airflow/providers/hashicorp/secrets/vault.py | 1 -
airflow/utils/db.py | 2 --
airflow/utils/process_utils.py | 1 -
15 files changed, 1 insertion(+), 18 deletions(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index ccb3114704..d1dcb808ce 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -242,7 +242,7 @@ repos:
name: Run pydocstyle
args:
- --convention=pep257
- - --add-ignore=D100,D102,D103,D104,D105,D107,D202,D205,D400,D401
+ - --add-ignore=D100,D102,D103,D104,D105,D107,D205,D400,D401
exclude: |
(?x)
^tests/.*\.py$|
diff --git a/airflow/cli/commands/dag_command.py
b/airflow/cli/commands/dag_command.py
index a06c5a7069..a219c94da9 100644
--- a/airflow/cli/commands/dag_command.py
+++ b/airflow/cli/commands/dag_command.py
@@ -253,7 +253,6 @@ def dag_state(args, session=NEW_SESSION):
>>> airflow dags state a_dag_with_conf_passed 2015-01-01T00:00:00.000000
failed, {"name": "bob", "age": "42"}
"""
-
dag = DagModel.get_dagmodel(args.dag_id, session=session)
if not dag:
diff --git a/airflow/models/dag.py b/airflow/models/dag.py
index ece03bafe8..4e4278b2ee 100644
--- a/airflow/models/dag.py
+++ b/airflow/models/dag.py
@@ -1976,7 +1976,6 @@ class DAG(LoggingMixin):
:param include_direct_upstream: Include all tasks directly upstream of
matched
and downstream (if include_downstream = True) tasks
"""
-
from airflow.models.baseoperator import BaseOperator
from airflow.models.mappedoperator import MappedOperator
diff --git a/airflow/models/mappedoperator.py b/airflow/models/mappedoperator.py
index fe18a97cc1..ba7328d2f7 100644
--- a/airflow/models/mappedoperator.py
+++ b/airflow/models/mappedoperator.py
@@ -843,7 +843,6 @@ class MappedOperator(AbstractOperator):
:return: None if upstream tasks are not complete yet, or else total
number of mapped TIs this task
should have
"""
-
lengths = self._get_map_lengths(run_id, session=session)
expansion_kwargs = self._get_expansion_kwargs()
diff --git a/airflow/providers/amazon/aws/hooks/quicksight.py
b/airflow/providers/amazon/aws/hooks/quicksight.py
index a7e90c36cf..a11ad2781b 100644
--- a/airflow/providers/amazon/aws/hooks/quicksight.py
+++ b/airflow/providers/amazon/aws/hooks/quicksight.py
@@ -72,7 +72,6 @@ class QuickSightHook(AwsBaseHook):
having Ingestion ARN, HTTP status, ingestion ID and ingestion
status.
:rtype: Dict
"""
-
self.log.info("Creating QuickSight Ingestion for data set id %s.",
data_set_id)
quicksight_client = self.get_conn()
try:
@@ -136,7 +135,6 @@ class QuickSightHook(AwsBaseHook):
will check the status of QuickSight Ingestion
:return: response of describe_ingestion call after Ingestion is is done
"""
-
sec = 0
status = self.get_status(aws_account_id, data_set_id, ingestion_id)
while status in self.NON_TERMINAL_STATES and status != target_state:
diff --git a/airflow/providers/amazon/aws/hooks/s3.py
b/airflow/providers/amazon/aws/hooks/s3.py
index e7e9f2de50..fd130a5bdd 100644
--- a/airflow/providers/amazon/aws/hooks/s3.py
+++ b/airflow/providers/amazon/aws/hooks/s3.py
@@ -162,7 +162,6 @@ class S3Hook(AwsBaseHook):
:return: the parsed bucket name and key
:rtype: tuple of str
"""
-
if bucket is None:
return S3Hook.parse_s3_url(key)
diff --git a/airflow/providers/amazon/aws/hooks/sts.py
b/airflow/providers/amazon/aws/hooks/sts.py
index aff787ee5d..78ecad74d9 100644
--- a/airflow/providers/amazon/aws/hooks/sts.py
+++ b/airflow/providers/amazon/aws/hooks/sts.py
@@ -33,7 +33,6 @@ class StsHook(AwsBaseHook):
def get_account_number(self) -> str:
"""Get the account Number"""
-
try:
return self.get_conn().get_caller_identity()['Account']
except Exception as general_error:
diff --git a/airflow/providers/amazon/aws/sensors/rds.py
b/airflow/providers/amazon/aws/sensors/rds.py
index 54ee50875e..264f3e7fe1 100644
--- a/airflow/providers/amazon/aws/sensors/rds.py
+++ b/airflow/providers/amazon/aws/sensors/rds.py
@@ -56,7 +56,6 @@ class RdsBaseSensor(BaseSensorOperator):
def _check_item(self, item_type: str, item_name: str) -> bool:
"""Get certain item from `_describe_item()` and check its status"""
-
try:
items = self._describe_item(item_type, item_name)
except ClientError:
diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
index e15dce67ef..ad7762ed4f 100644
--- a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
+++ b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
@@ -180,7 +180,6 @@ class KubernetesHook(BaseHook):
def get_conn(self) -> Any:
"""Returns kubernetes api session for use with requests"""
-
in_cluster = self._coalesce_param(
self.in_cluster,
self.conn_extras.get("extra__kubernetes__in_cluster") or None
)
diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
index 056d6585ab..3b4366dca9 100644
--- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
+++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
@@ -593,7 +593,6 @@ class KubernetesPodOperator(BaseOperator):
When we find values there that we need to apply on the hook, we patch
special
hook attributes here.
"""
-
# default for enable_tcp_keepalive is True; patch if False
if conf.getboolean('kubernetes', 'enable_tcp_keepalive') is False:
hook._deprecated_core_disable_tcp_keepalive = True
diff --git a/airflow/providers/google/cloud/hooks/bigquery.py
b/airflow/providers/google/cloud/hooks/bigquery.py
index d4f54f56ce..70795efd65 100644
--- a/airflow/providers/google/cloud/hooks/bigquery.py
+++ b/airflow/providers/google/cloud/hooks/bigquery.py
@@ -361,7 +361,6 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
:param exists_ok: If ``True``, ignore "already exists" errors when
creating the table.
:return: Created table
"""
-
_table_resource: Dict[str, Any] = {}
if self.location:
diff --git a/airflow/providers/google/cloud/hooks/looker.py
b/airflow/providers/google/cloud/hooks/looker.py
index 845211260b..425b167956 100644
--- a/airflow/providers/google/cloud/hooks/looker.py
+++ b/airflow/providers/google/cloud/hooks/looker.py
@@ -185,7 +185,6 @@ class LookerHook(BaseHook):
def get_looker_sdk(self):
"""Returns Looker SDK client for Looker API 4.0."""
-
conn = self.get_connection(self.looker_conn_id)
settings = LookerApiSettings(conn)
@@ -214,7 +213,6 @@ class LookerApiSettings(api_settings.ApiSettings):
Overrides the default logic of getting connection settings. Fetches
the connection settings from Airflow's connection object.
"""
-
config = {}
if self.conn.host is None:
diff --git a/airflow/providers/hashicorp/secrets/vault.py
b/airflow/providers/hashicorp/secrets/vault.py
index 4ff25d71b4..52b019eeac 100644
--- a/airflow/providers/hashicorp/secrets/vault.py
+++ b/airflow/providers/hashicorp/secrets/vault.py
@@ -175,7 +175,6 @@ class VaultBackend(BaseSecretsBackend, LoggingMixin):
:rtype: str
:return: The connection uri retrieved from the secret
"""
-
# Since VaultBackend implements `get_connection`, `get_conn_uri` is
not used. So we
# don't need to implement (or direct users to use) method
`get_conn_value` instead
warnings.warn(
diff --git a/airflow/utils/db.py b/airflow/utils/db.py
index c3262318c1..e8790de823 100644
--- a/airflow/utils/db.py
+++ b/airflow/utils/db.py
@@ -861,7 +861,6 @@ def reflect_tables(tables: List[Union[Base, str]], session):
This function gets the current state of each table in the set of models
provided and returns
a SqlAlchemy metadata object containing them.
"""
-
import sqlalchemy.schema
metadata = sqlalchemy.schema.MetaData(session.bind)
@@ -1173,7 +1172,6 @@ def _move_duplicate_data_to_new_table(
building the DELETE FROM join condition.
:param target_table_name: name of the table in which to park the duplicate
rows
"""
-
bind = session.get_bind()
dialect_name = bind.dialect.name
query = (
diff --git a/airflow/utils/process_utils.py b/airflow/utils/process_utils.py
index d547f2c0de..4ae1f43980 100644
--- a/airflow/utils/process_utils.py
+++ b/airflow/utils/process_utils.py
@@ -320,7 +320,6 @@ def set_new_process_group() -> None:
rather than having to iterate the child processes.
If current process spawn by system call ``exec()`` than keep current
process group
"""
-
if os.getpid() == os.getsid(0):
# If PID = SID than process a session leader, and it is not possible
to change process group
return