This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new b11525702c Use literal dict instead of calling dict() in providers
(#33761)
b11525702c is described below
commit b11525702c72cb53034aa29ccd6d0e1161ac475c
Author: Hussein Awala <[email protected]>
AuthorDate: Sat Aug 26 09:27:13 2023 +0200
Use literal dict instead of calling dict() in providers (#33761)
---
.../providers/amazon/aws/hooks/redshift_data.py | 2 +-
.../kubernetes/executors/kubernetes_executor.py | 2 +-
airflow/providers/cncf/kubernetes/operators/pod.py | 8 +-
.../providers/databricks/hooks/databricks_base.py | 12 +-
airflow/providers/docker/decorators/docker.py | 18 +-
.../providers/elasticsearch/hooks/elasticsearch.py | 14 +-
airflow/providers/exasol/hooks/exasol.py | 12 +-
airflow/providers/google/ads/hooks/ads.py | 4 +-
.../cloud/example_dags/example_cloud_sql_query.py | 52 ++--
airflow/providers/google/cloud/hooks/bigtable.py | 10 +-
.../cloud/hooks/cloud_storage_transfer_service.py | 2 +-
.../providers/google/cloud/hooks/compute_ssh.py | 2 +-
airflow/providers/google/cloud/hooks/dataflow.py | 12 +-
airflow/providers/google/cloud/hooks/dlp.py | 270 +++++++++---------
airflow/providers/google/cloud/hooks/os_login.py | 10 +-
.../providers/google/cloud/operators/cloud_sql.py | 303 +++++++++++----------
.../providers/google/cloud/operators/compute.py | 72 ++---
.../providers/google/cloud/operators/functions.py | 86 +++---
.../triggers/cloud_storage_transfer_service.py | 2 +-
.../google/cloud/utils/field_validator.py | 26 +-
airflow/providers/postgres/hooks/postgres.py | 14 +-
airflow/providers/snowflake/hooks/snowflake.py | 4 +-
airflow/providers/ssh/hooks/ssh.py | 38 +--
23 files changed, 491 insertions(+), 484 deletions(-)
diff --git a/airflow/providers/amazon/aws/hooks/redshift_data.py
b/airflow/providers/amazon/aws/hooks/redshift_data.py
index 852b1e5c68..4e16729a81 100644
--- a/airflow/providers/amazon/aws/hooks/redshift_data.py
+++ b/airflow/providers/amazon/aws/hooks/redshift_data.py
@@ -188,7 +188,7 @@ class
RedshiftDataHook(AwsGenericHook["RedshiftDataAPIServiceClient"]):
pk_columns = []
token = ""
while True:
- kwargs = dict(Id=stmt_id)
+ kwargs = {"Id": stmt_id}
if token:
kwargs["NextToken"] = token
response = self.conn.get_statement_result(**kwargs)
diff --git a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
index ac11622216..33161097c7 100644
--- a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
+++ b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
@@ -246,7 +246,7 @@ class KubernetesExecutor(BaseExecutor):
if ti.map_index >= 0:
# Old tasks _couldn't_ be mapped, so we don't have to worry
about compat
base_label_selector += f",map_index={ti.map_index}"
- kwargs = dict(label_selector=base_label_selector)
+ kwargs = {"label_selector": base_label_selector}
if self.kube_config.kube_client_request_args:
kwargs.update(**self.kube_config.kube_client_request_args)
diff --git a/airflow/providers/cncf/kubernetes/operators/pod.py
b/airflow/providers/cncf/kubernetes/operators/pod.py
index 28d603e0ea..a0e8b4cd0f 100644
--- a/airflow/providers/cncf/kubernetes/operators/pod.py
+++ b/airflow/providers/cncf/kubernetes/operators/pod.py
@@ -852,10 +852,10 @@ class KubernetesPodOperator(BaseOperator):
def on_kill(self) -> None:
if self.pod:
pod = self.pod
- kwargs = dict(
- name=pod.metadata.name,
- namespace=pod.metadata.namespace,
- )
+ kwargs = {
+ "name": pod.metadata.name,
+ "namespace": pod.metadata.namespace,
+ }
if self.termination_grace_period is not None:
kwargs.update(grace_period_seconds=self.termination_grace_period)
self.client.delete_namespaced_pod(**kwargs)
diff --git a/airflow/providers/databricks/hooks/databricks_base.py
b/airflow/providers/databricks/hooks/databricks_base.py
index 6d0d929b5d..f77fe8bcda 100644
--- a/airflow/providers/databricks/hooks/databricks_base.py
+++ b/airflow/providers/databricks/hooks/databricks_base.py
@@ -121,12 +121,12 @@ class BaseDatabricksHook(BaseHook):
self.retry_args["retry"] =
retry_if_exception(self._retryable_error)
self.retry_args["after"] = my_after_func
else:
- self.retry_args = dict(
- stop=stop_after_attempt(self.retry_limit),
- wait=wait_exponential(min=self.retry_delay,
max=(2**retry_limit)),
- retry=retry_if_exception(self._retryable_error),
- after=my_after_func,
- )
+ self.retry_args = {
+ "stop": stop_after_attempt(self.retry_limit),
+ "wait": wait_exponential(min=self.retry_delay,
max=(2**retry_limit)),
+ "retry": retry_if_exception(self._retryable_error),
+ "after": my_after_func,
+ }
@cached_property
def databricks_conn(self) -> Connection:
diff --git a/airflow/providers/docker/decorators/docker.py
b/airflow/providers/docker/decorators/docker.py
index bb7849f1b2..beb93fe113 100644
--- a/airflow/providers/docker/decorators/docker.py
+++ b/airflow/providers/docker/decorators/docker.py
@@ -112,15 +112,15 @@ class _DockerDecoratedOperator(DecoratedOperator,
DockerOperator):
self.pickling_library.dump({"args": self.op_args,
"kwargs": self.op_kwargs}, file)
py_source = self.get_python_source()
write_python_script(
- jinja_context=dict(
- op_args=self.op_args,
- op_kwargs=self.op_kwargs,
- pickling_library=self.pickling_library.__name__,
- python_callable=self.python_callable.__name__,
- python_callable_source=py_source,
- expect_airflow=self.expect_airflow,
- string_args_global=False,
- ),
+ jinja_context={
+ "op_args": self.op_args,
+ "op_kwargs": self.op_kwargs,
+ "pickling_library": self.pickling_library.__name__,
+ "python_callable": self.python_callable.__name__,
+ "python_callable_source": py_source,
+ "expect_airflow": self.expect_airflow,
+ "string_args_global": False,
+ },
filename=script_filename,
)
diff --git a/airflow/providers/elasticsearch/hooks/elasticsearch.py
b/airflow/providers/elasticsearch/hooks/elasticsearch.py
index 13b2573093..b504215966 100644
--- a/airflow/providers/elasticsearch/hooks/elasticsearch.py
+++ b/airflow/providers/elasticsearch/hooks/elasticsearch.py
@@ -92,13 +92,13 @@ class ElasticsearchSQLHook(DbApiHook):
conn_id = getattr(self, self.conn_name_attr)
conn = self.connection or self.get_connection(conn_id)
- conn_args = dict(
- host=conn.host,
- port=conn.port,
- user=conn.login or None,
- password=conn.password or None,
- scheme=conn.schema or "http",
- )
+ conn_args = {
+ "host": conn.host,
+ "port": conn.port,
+ "user": conn.login or None,
+ "password": conn.password or None,
+ "scheme": conn.schema or "http",
+ }
if conn.extra_dejson.get("http_compress", False):
conn_args["http_compress"] = bool(["http_compress"])
diff --git a/airflow/providers/exasol/hooks/exasol.py
b/airflow/providers/exasol/hooks/exasol.py
index 7a03fdd097..ed71205ebc 100644
--- a/airflow/providers/exasol/hooks/exasol.py
+++ b/airflow/providers/exasol/hooks/exasol.py
@@ -56,12 +56,12 @@ class ExasolHook(DbApiHook):
def get_conn(self) -> ExaConnection:
conn_id = getattr(self, self.conn_name_attr)
conn = self.get_connection(conn_id)
- conn_args = dict(
- dsn=f"{conn.host}:{conn.port}",
- user=conn.login,
- password=conn.password,
- schema=self.schema or conn.schema,
- )
+ conn_args = {
+ "dsn": f"{conn.host}:{conn.port}",
+ "user": conn.login,
+ "password": conn.password,
+ "schema": self.schema or conn.schema,
+ }
# check for parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ["compression", "encryption", "json_lib",
"client_name"]:
diff --git a/airflow/providers/google/ads/hooks/ads.py
b/airflow/providers/google/ads/hooks/ads.py
index fd68d1ae53..dbeb42f1da 100644
--- a/airflow/providers/google/ads/hooks/ads.py
+++ b/airflow/providers/google/ads/hooks/ads.py
@@ -224,7 +224,9 @@ class GoogleAdsHook(BaseHook):
iterators = []
for client_id in client_ids:
- iterator = service.search(request=dict(customer_id=client_id,
query=query, page_size=page_size))
+ iterator = service.search(
+ request={"customer_id": client_id, "query": query,
"page_size": page_size}
+ )
iterators.append(iterator)
self.log.info("Fetched Google Ads Iterators")
diff --git
a/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py
b/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py
index 870da1653c..c3ec7b8d1a 100644
--- a/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py
+++ b/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py
@@ -100,19 +100,19 @@ def get_absolute_path(path):
return os.path.join(HOME_DIR, path)
-postgres_kwargs = dict(
- user=quote_plus(GCSQL_POSTGRES_USER),
- password=quote_plus(GCSQL_POSTGRES_PASSWORD),
- public_port=GCSQL_POSTGRES_PUBLIC_PORT,
- public_ip=quote_plus(GCSQL_POSTGRES_PUBLIC_IP),
- project_id=quote_plus(GCP_PROJECT_ID),
- location=quote_plus(GCP_REGION),
- instance=quote_plus(GCSQL_POSTGRES_INSTANCE_NAME_QUERY),
- database=quote_plus(GCSQL_POSTGRES_DATABASE_NAME),
-
client_cert_file=quote_plus(get_absolute_path(GCSQL_POSTGRES_CLIENT_CERT_FILE)),
-
client_key_file=quote_plus(get_absolute_path(GCSQL_POSTGRES_CLIENT_KEY_FILE)),
-
server_ca_file=quote_plus(get_absolute_path(GCSQL_POSTGRES_SERVER_CA_FILE)),
-)
+postgres_kwargs = {
+ "user": quote_plus(GCSQL_POSTGRES_USER),
+ "password": quote_plus(GCSQL_POSTGRES_PASSWORD),
+ "public_port": GCSQL_POSTGRES_PUBLIC_PORT,
+ "public_ip": quote_plus(GCSQL_POSTGRES_PUBLIC_IP),
+ "project_id": quote_plus(GCP_PROJECT_ID),
+ "location": quote_plus(GCP_REGION),
+ "instance": quote_plus(GCSQL_POSTGRES_INSTANCE_NAME_QUERY),
+ "database": quote_plus(GCSQL_POSTGRES_DATABASE_NAME),
+ "client_cert_file":
quote_plus(get_absolute_path(GCSQL_POSTGRES_CLIENT_CERT_FILE)),
+ "client_key_file":
quote_plus(get_absolute_path(GCSQL_POSTGRES_CLIENT_KEY_FILE)),
+ "server_ca_file":
quote_plus(get_absolute_path(GCSQL_POSTGRES_SERVER_CA_FILE)),
+}
# The connections below are created using one of the standard approaches - via
environment
# variables named AIRFLOW_CONN_* . The connections can also be created in the
database
@@ -166,19 +166,19 @@ os.environ["AIRFLOW_CONN_PUBLIC_POSTGRES_TCP_SSL"] = (
"sslrootcert={server_ca_file}".format(**postgres_kwargs)
)
-mysql_kwargs = dict(
- user=quote_plus(GCSQL_MYSQL_USER),
- password=quote_plus(GCSQL_MYSQL_PASSWORD),
- public_port=GCSQL_MYSQL_PUBLIC_PORT,
- public_ip=quote_plus(GCSQL_MYSQL_PUBLIC_IP),
- project_id=quote_plus(GCP_PROJECT_ID),
- location=quote_plus(GCP_REGION),
- instance=quote_plus(GCSQL_MYSQL_INSTANCE_NAME_QUERY),
- database=quote_plus(GCSQL_MYSQL_DATABASE_NAME),
-
client_cert_file=quote_plus(get_absolute_path(GCSQL_MYSQL_CLIENT_CERT_FILE)),
- client_key_file=quote_plus(get_absolute_path(GCSQL_MYSQL_CLIENT_KEY_FILE)),
- server_ca_file=quote_plus(get_absolute_path(GCSQL_MYSQL_SERVER_CA_FILE)),
-)
+mysql_kwargs = {
+ "user": quote_plus(GCSQL_MYSQL_USER),
+ "password": quote_plus(GCSQL_MYSQL_PASSWORD),
+ "public_port": GCSQL_MYSQL_PUBLIC_PORT,
+ "public_ip": quote_plus(GCSQL_MYSQL_PUBLIC_IP),
+ "project_id": quote_plus(GCP_PROJECT_ID),
+ "location": quote_plus(GCP_REGION),
+ "instance": quote_plus(GCSQL_MYSQL_INSTANCE_NAME_QUERY),
+ "database": quote_plus(GCSQL_MYSQL_DATABASE_NAME),
+ "client_cert_file":
quote_plus(get_absolute_path(GCSQL_MYSQL_CLIENT_CERT_FILE)),
+ "client_key_file":
quote_plus(get_absolute_path(GCSQL_MYSQL_CLIENT_KEY_FILE)),
+ "server_ca_file":
quote_plus(get_absolute_path(GCSQL_MYSQL_SERVER_CA_FILE)),
+}
# MySQL: connect via proxy over TCP (specific proxy version)
os.environ["AIRFLOW_CONN_PROXY_MYSQL_TCP"] = (
diff --git a/airflow/providers/google/cloud/hooks/bigtable.py
b/airflow/providers/google/cloud/hooks/bigtable.py
index d01ab2d8ee..888d8d0236 100644
--- a/airflow/providers/google/cloud/hooks/bigtable.py
+++ b/airflow/providers/google/cloud/hooks/bigtable.py
@@ -148,11 +148,11 @@ class BigtableHook(GoogleBaseHook):
instance_labels,
)
- cluster_kwargs = dict(
- cluster_id=main_cluster_id,
- location_id=main_cluster_zone,
- default_storage_type=cluster_storage_type,
- )
+ cluster_kwargs = {
+ "cluster_id": main_cluster_id,
+ "location_id": main_cluster_zone,
+ "default_storage_type": cluster_storage_type,
+ }
if instance_type != enums.Instance.Type.DEVELOPMENT and cluster_nodes:
cluster_kwargs["serve_nodes"] = cluster_nodes
clusters = [instance.cluster(**cluster_kwargs)]
diff --git
a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py
b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py
index 61d9225e89..ec452153f1 100644
--- a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py
+++ b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py
@@ -517,7 +517,7 @@ class
CloudDataTransferServiceAsyncHook(GoogleBaseAsyncHook):
"""
client = self.get_conn()
jobs_list_request = ListTransferJobsRequest(
- filter=json.dumps(dict(project_id=self.project_id,
job_names=job_names))
+ filter=json.dumps({"project_id": self.project_id, "job_names":
job_names})
)
return await client.list_transfer_jobs(request=jobs_list_request)
diff --git a/airflow/providers/google/cloud/hooks/compute_ssh.py
b/airflow/providers/google/cloud/hooks/compute_ssh.py
index 0486a3614b..0619d79974 100644
--- a/airflow/providers/google/cloud/hooks/compute_ssh.py
+++ b/airflow/providers/google/cloud/hooks/compute_ssh.py
@@ -314,7 +314,7 @@ class ComputeEngineSSHHook(SSHHook):
item["value"] = keys
break
else:
- new_dict = dict(key="ssh-keys", value=keys)
+ new_dict = {"key": "ssh-keys", "value": keys}
metadata["items"] = [new_dict]
self._compute_hook.set_instance_metadata(
diff --git a/airflow/providers/google/cloud/hooks/dataflow.py
b/airflow/providers/google/cloud/hooks/dataflow.py
index 559164a2d0..3d4f8d75f7 100644
--- a/airflow/providers/google/cloud/hooks/dataflow.py
+++ b/airflow/providers/google/cloud/hooks/dataflow.py
@@ -1236,12 +1236,12 @@ class AsyncDataflowHook(GoogleBaseAsyncHook):
client = await self.initialize_client(JobsV1Beta3AsyncClient)
request = GetJobRequest(
- dict(
- project_id=project_id,
- job_id=job_id,
- view=job_view,
- location=location,
- )
+ {
+ "project_id": project_id,
+ "job_id": job_id,
+ "view": job_view,
+ "location": location,
+ }
)
job = await client.get_job(
diff --git a/airflow/providers/google/cloud/hooks/dlp.py
b/airflow/providers/google/cloud/hooks/dlp.py
index 4795057989..d65d884558 100644
--- a/airflow/providers/google/cloud/hooks/dlp.py
+++ b/airflow/providers/google/cloud/hooks/dlp.py
@@ -148,9 +148,9 @@ class CloudDLPHook(GoogleBaseHook):
name = DlpServiceClient.dlp_job_path(project_id, dlp_job_id)
client.cancel_dlp_job(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -195,11 +195,11 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
return client.create_deidentify_template(
- request=dict(
- parent=parent,
- deidentify_template=deidentify_template,
- template_id=template_id,
- ),
+ request={
+ "parent": parent,
+ "deidentify_template": deidentify_template,
+ "template_id": template_id,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -242,12 +242,12 @@ class CloudDLPHook(GoogleBaseHook):
parent = DlpServiceClient.common_project_path(project_id)
job = client.create_dlp_job(
- request=dict(
- parent=parent,
- inspect_job=inspect_job,
- risk_job=risk_job,
- job_id=job_id,
- ),
+ request={
+ "parent": parent,
+ "inspect_job": inspect_job,
+ "risk_job": risk_job,
+ "job_id": job_id,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -321,11 +321,11 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
return client.create_inspect_template(
- request=dict(
- parent=parent,
- inspect_template=inspect_template,
- template_id=template_id,
- ),
+ request={
+ "parent": parent,
+ "inspect_template": inspect_template,
+ "template_id": template_id,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -360,11 +360,11 @@ class CloudDLPHook(GoogleBaseHook):
parent = DlpServiceClient.common_project_path(project_id)
return client.create_job_trigger(
- request=dict(
- parent=parent,
- job_trigger=job_trigger,
- trigger_id=trigger_id,
- ),
+ request={
+ "parent": parent,
+ "job_trigger": job_trigger,
+ "trigger_id": trigger_id,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -410,11 +410,11 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
return client.create_stored_info_type(
- request=dict(
- parent=parent,
- config=config,
- stored_info_type_id=stored_info_type_id,
- ),
+ request={
+ "parent": parent,
+ "config": config,
+ "stored_info_type_id": stored_info_type_id,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -461,14 +461,14 @@ class CloudDLPHook(GoogleBaseHook):
parent = DlpServiceClient.common_project_path(project_id)
return client.deidentify_content(
- request=dict(
- parent=parent,
- deidentify_config=deidentify_config,
- inspect_config=inspect_config,
- item=item,
- inspect_template_name=inspect_template_name,
- deidentify_template_name=deidentify_template_name,
- ),
+ request={
+ "parent": parent,
+ "deidentify_config": deidentify_config,
+ "inspect_config": inspect_config,
+ "item": item,
+ "inspect_template_name": inspect_template_name,
+ "deidentify_template_name": deidentify_template_name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -509,9 +509,9 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
client.delete_deidentify_template(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -550,9 +550,9 @@ class CloudDLPHook(GoogleBaseHook):
name = DlpServiceClient.dlp_job_path(project_id, dlp_job_id)
client.delete_dlp_job(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -599,9 +599,9 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
client.delete_inspect_template(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -637,9 +637,9 @@ class CloudDLPHook(GoogleBaseHook):
name = DlpServiceClient.job_trigger_path(project_id, job_trigger_id)
client.delete_job_trigger(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -686,9 +686,9 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
client.delete_stored_info_type(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -735,9 +735,9 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
return client.get_deidentify_template(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -773,9 +773,9 @@ class CloudDLPHook(GoogleBaseHook):
name = DlpServiceClient.dlp_job_path(project_id, dlp_job_id)
return client.get_dlp_job(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -822,9 +822,9 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
return client.get_inspect_template(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -860,9 +860,9 @@ class CloudDLPHook(GoogleBaseHook):
name = DlpServiceClient.job_trigger_path(project_id, job_trigger_id)
return client.get_job_trigger(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -909,9 +909,9 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
return client.get_stored_info_type(
- request=dict(
- name=name,
- ),
+ request={
+ "name": name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -950,12 +950,12 @@ class CloudDLPHook(GoogleBaseHook):
parent = DlpServiceClient.common_project_path(project_id)
return client.inspect_content(
- request=dict(
- parent=parent,
- inspect_config=inspect_config,
- item=item,
- inspect_template_name=inspect_template_name,
- ),
+ request={
+ "parent": parent,
+ "inspect_config": inspect_config,
+ "item": item,
+ "inspect_template_name": inspect_template_name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1003,11 +1003,11 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
results = client.list_deidentify_templates(
- request=dict(
- parent=parent,
- page_size=page_size,
- order_by=order_by,
- ),
+ request={
+ "parent": parent,
+ "page_size": page_size,
+ "order_by": order_by,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1050,13 +1050,13 @@ class CloudDLPHook(GoogleBaseHook):
parent = DlpServiceClient.common_project_path(project_id)
results = client.list_dlp_jobs(
- request=dict(
- parent=parent,
- filter=results_filter,
- page_size=page_size,
- type_=job_type,
- order_by=order_by,
- ),
+ request={
+ "parent": parent,
+ "filter": results_filter,
+ "page_size": page_size,
+ "type_": job_type,
+ "order_by": order_by,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1088,10 +1088,10 @@ class CloudDLPHook(GoogleBaseHook):
client = self.get_conn()
return client.list_info_types(
- request=dict(
- language_code=language_code,
- filter=results_filter,
- ),
+ request={
+ "language_code": language_code,
+ "filter": results_filter,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1139,11 +1139,11 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
results = client.list_inspect_templates(
- request=dict(
- parent=parent,
- page_size=page_size,
- order_by=order_by,
- ),
+ request={
+ "parent": parent,
+ "page_size": page_size,
+ "order_by": order_by,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1183,12 +1183,12 @@ class CloudDLPHook(GoogleBaseHook):
parent = DlpServiceClient.common_project_path(project_id)
results = client.list_job_triggers(
- request=dict(
- parent=parent,
- page_size=page_size,
- order_by=order_by,
- filter=results_filter,
- ),
+ request={
+ "parent": parent,
+ "page_size": page_size,
+ "order_by": order_by,
+ "filter": results_filter,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1237,11 +1237,11 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
results = client.list_stored_info_types(
- request=dict(
- parent=parent,
- page_size=page_size,
- order_by=order_by,
- ),
+ request={
+ "parent": parent,
+ "page_size": page_size,
+ "order_by": order_by,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1285,13 +1285,13 @@ class CloudDLPHook(GoogleBaseHook):
parent = DlpServiceClient.common_project_path(project_id)
return client.redact_image(
- request=dict(
- parent=parent,
- inspect_config=inspect_config,
- image_redaction_configs=image_redaction_configs,
- include_findings=include_findings,
- byte_item=byte_item,
- ),
+ request={
+ "parent": parent,
+ "inspect_config": inspect_config,
+ "image_redaction_configs": image_redaction_configs,
+ "include_findings": include_findings,
+ "byte_item": byte_item,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1336,14 +1336,14 @@ class CloudDLPHook(GoogleBaseHook):
parent = DlpServiceClient.common_project_path(project_id)
return client.reidentify_content(
- request=dict(
- parent=parent,
- reidentify_config=reidentify_config,
- inspect_config=inspect_config,
- item=item,
- inspect_template_name=inspect_template_name,
- reidentify_template_name=reidentify_template_name,
- ),
+ request={
+ "parent": parent,
+ "reidentify_config": reidentify_config,
+ "inspect_config": inspect_config,
+ "item": item,
+ "inspect_template_name": inspect_template_name,
+ "reidentify_template_name": reidentify_template_name,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1394,11 +1394,11 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
return client.update_deidentify_template(
- request=dict(
- name=name,
- deidentify_template=deidentify_template,
- update_mask=update_mask,
- ),
+ request={
+ "name": name,
+ "deidentify_template": deidentify_template,
+ "update_mask": update_mask,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1448,11 +1448,11 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
return client.update_inspect_template(
- request=dict(
- name=name,
- inspect_template=inspect_template,
- update_mask=update_mask,
- ),
+ request={
+ "name": name,
+ "inspect_template": inspect_template,
+ "update_mask": update_mask,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
@@ -1552,11 +1552,11 @@ class CloudDLPHook(GoogleBaseHook):
raise AirflowException("Please provide either organization_id or
project_id.")
return client.update_stored_info_type(
- request=dict(
- name=name,
- config=config,
- update_mask=update_mask,
- ),
+ request={
+ "name": name,
+ "config": config,
+ "update_mask": update_mask,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
diff --git a/airflow/providers/google/cloud/hooks/os_login.py
b/airflow/providers/google/cloud/hooks/os_login.py
index 9a355d74d9..ceb8ff7438 100644
--- a/airflow/providers/google/cloud/hooks/os_login.py
+++ b/airflow/providers/google/cloud/hooks/os_login.py
@@ -92,11 +92,11 @@ class OSLoginHook(GoogleBaseHook):
"""
conn = self.get_conn()
return conn.import_ssh_public_key(
- request=dict(
- parent=f"users/{user}",
- ssh_public_key=ssh_public_key,
- project_id=project_id,
- ),
+ request={
+ "parent": f"users/{user}",
+ "ssh_public_key": ssh_public_key,
+ "project_id": project_id,
+ },
retry=retry,
timeout=timeout,
metadata=metadata,
diff --git a/airflow/providers/google/cloud/operators/cloud_sql.py
b/airflow/providers/google/cloud/operators/cloud_sql.py
index 61275dbc29..3ddf366217 100644
--- a/airflow/providers/google/cloud/operators/cloud_sql.py
+++ b/airflow/providers/google/cloud/operators/cloud_sql.py
@@ -44,175 +44,180 @@ SETTINGS = "settings"
SETTINGS_VERSION = "settingsVersion"
CLOUD_SQL_CREATE_VALIDATION: Sequence[dict] = [
- dict(name="name", allow_empty=False),
- dict(
- name="settings",
- type="dict",
- fields=[
- dict(name="tier", allow_empty=False),
- dict(
- name="backupConfiguration",
- type="dict",
- fields=[
- dict(name="binaryLogEnabled", optional=True),
- dict(name="enabled", optional=True),
- dict(name="replicationLogArchivingEnabled", optional=True),
- dict(name="startTime", allow_empty=False, optional=True),
+ {"name": "name", "allow_empty": False},
+ {
+ "name": "settings",
+ "type": "dict",
+ "fields": [
+ {"name": "tier", "allow_empty": False},
+ {
+ "name": "backupConfiguration",
+ "type": "dict",
+ "fields": [
+ {"name": "binaryLogEnabled", "optional": True},
+ {"name": "enabled", "optional": True},
+ {"name": "replicationLogArchivingEnabled", "optional":
True},
+ {"name": "startTime", "allow_empty": False, "optional":
True},
],
- optional=True,
- ),
- dict(name="activationPolicy", allow_empty=False, optional=True),
- dict(name="authorizedGaeApplications", type="list", optional=True),
- dict(name="crashSafeReplicationEnabled", optional=True),
- dict(name="dataDiskSizeGb", optional=True),
- dict(name="dataDiskType", allow_empty=False, optional=True),
- dict(name="databaseFlags", type="list", optional=True),
- dict(
- name="ipConfiguration",
- type="dict",
- fields=[
- dict(
- name="authorizedNetworks",
- type="list",
- fields=[
- dict(name="expirationTime", optional=True),
- dict(name="name", allow_empty=False,
optional=True),
- dict(name="value", allow_empty=False,
optional=True),
+ "optional": True,
+ },
+ {"name": "activationPolicy", "allow_empty": False, "optional":
True},
+ {"name": "authorizedGaeApplications", "type": "list", "optional":
True},
+ {"name": "crashSafeReplicationEnabled", "optional": True},
+ {"name": "dataDiskSizeGb", "optional": True},
+ {"name": "dataDiskType", "allow_empty": False, "optional": True},
+ {"name": "databaseFlags", "type": "list", "optional": True},
+ {
+ "name": "ipConfiguration",
+ "type": "dict",
+ "fields": [
+ {
+ "name": "authorizedNetworks",
+ "type": "list",
+ "fields": [
+ {"name": "expirationTime", "optional": True},
+ {"name": "name", "allow_empty": False, "optional":
True},
+ {"name": "value", "allow_empty": False,
"optional": True},
],
- optional=True,
- ),
- dict(name="ipv4Enabled", optional=True),
- dict(name="privateNetwork", allow_empty=False,
optional=True),
- dict(name="requireSsl", optional=True),
+ "optional": True,
+ },
+ {"name": "ipv4Enabled", "optional": True},
+ {"name": "privateNetwork", "allow_empty": False,
"optional": True},
+ {"name": "requireSsl", "optional": True},
],
- optional=True,
- ),
- dict(
- name="locationPreference",
- type="dict",
- fields=[
- dict(name="followGaeApplication", allow_empty=False,
optional=True),
- dict(name="zone", allow_empty=False, optional=True),
+ "optional": True,
+ },
+ {
+ "name": "locationPreference",
+ "type": "dict",
+ "fields": [
+ {"name": "followGaeApplication", "allow_empty": False,
"optional": True},
+ {"name": "zone", "allow_empty": False, "optional": True},
],
- optional=True,
- ),
- dict(
- name="maintenanceWindow",
- type="dict",
- fields=[
- dict(name="hour", optional=True),
- dict(name="day", optional=True),
- dict(name="updateTrack", allow_empty=False, optional=True),
+ "optional": True,
+ },
+ {
+ "name": "maintenanceWindow",
+ "type": "dict",
+ "fields": [
+ {"name": "hour", "optional": True},
+ {"name": "day", "optional": True},
+ {"name": "updateTrack", "allow_empty": False, "optional":
True},
],
- optional=True,
- ),
- dict(name="pricingPlan", allow_empty=False, optional=True),
- dict(name="replicationType", allow_empty=False, optional=True),
- dict(name="storageAutoResize", optional=True),
- dict(name="storageAutoResizeLimit", optional=True),
- dict(name="userLabels", type="dict", optional=True),
+ "optional": True,
+ },
+ {"name": "pricingPlan", "allow_empty": False, "optional": True},
+ {"name": "replicationType", "allow_empty": False, "optional":
True},
+ {"name": "storageAutoResize", "optional": True},
+ {"name": "storageAutoResizeLimit", "optional": True},
+ {"name": "userLabels", "type": "dict", "optional": True},
],
- ),
- dict(name="databaseVersion", allow_empty=False, optional=True),
- dict(name="failoverReplica", type="dict", fields=[dict(name="name",
allow_empty=False)], optional=True),
- dict(name="masterInstanceName", allow_empty=False, optional=True),
- dict(name="onPremisesConfiguration", type="dict", optional=True),
- dict(name="region", allow_empty=False, optional=True),
- dict(
- name="replicaConfiguration",
- type="dict",
- fields=[
- dict(name="failoverTarget", optional=True),
- dict(
- name="mysqlReplicaConfiguration",
- type="dict",
- fields=[
- dict(name="caCertificate", allow_empty=False,
optional=True),
- dict(name="clientCertificate", allow_empty=False,
optional=True),
- dict(name="clientKey", allow_empty=False, optional=True),
- dict(name="connectRetryInterval", optional=True),
- dict(name="dumpFilePath", allow_empty=False,
optional=True),
- dict(name="masterHeartbeatPeriod", optional=True),
- dict(name="password", allow_empty=False, optional=True),
- dict(name="sslCipher", allow_empty=False, optional=True),
- dict(name="username", allow_empty=False, optional=True),
- dict(name="verifyServerCertificate", optional=True),
+ },
+ {"name": "databaseVersion", "allow_empty": False, "optional": True},
+ {
+ "name": "failoverReplica",
+ "type": "dict",
+ "fields": [{"name": "name", "allow_empty": False}],
+ "optional": True,
+ },
+ {"name": "masterInstanceName", "allow_empty": False, "optional": True},
+ {"name": "onPremisesConfiguration", "type": "dict", "optional": True},
+ {"name": "region", "allow_empty": False, "optional": True},
+ {
+ "name": "replicaConfiguration",
+ "type": "dict",
+ "fields": [
+ {"name": "failoverTarget", "optional": True},
+ {
+ "name": "mysqlReplicaConfiguration",
+ "type": "dict",
+ "fields": [
+ {"name": "caCertificate", "allow_empty": False,
"optional": True},
+ {"name": "clientCertificate", "allow_empty": False,
"optional": True},
+ {"name": "clientKey", "allow_empty": False, "optional":
True},
+ {"name": "connectRetryInterval", "optional": True},
+ {"name": "dumpFilePath", "allow_empty": False, "optional":
True},
+ {"name": "masterHeartbeatPeriod", "optional": True},
+ {"name": "password", "allow_empty": False, "optional":
True},
+ {"name": "sslCipher", "allow_empty": False, "optional":
True},
+ {"name": "username", "allow_empty": False, "optional":
True},
+ {"name": "verifyServerCertificate", "optional": True},
],
- optional=True,
- ),
+ "optional": True,
+ },
],
- optional=True,
- ),
+ "optional": True,
+ },
]
CLOUD_SQL_EXPORT_VALIDATION = [
- dict(
- name="exportContext",
- type="dict",
- fields=[
- dict(name="fileType", allow_empty=False),
- dict(name="uri", allow_empty=False),
- dict(name="databases", optional=True, type="list"),
- dict(
- name="sqlExportOptions",
- type="dict",
- optional=True,
- fields=[
- dict(name="tables", optional=True, type="list"),
- dict(name="schemaOnly", optional=True),
- dict(
- name="mysqlExportOptions",
- type="dict",
- optional=True,
- fields=[dict(name="masterData")],
- ),
+ {
+ "name": "exportContext",
+ "type": "dict",
+ "fields": [
+ {"name": "fileType", "allow_empty": False},
+ {"name": "uri", "allow_empty": False},
+ {"name": "databases", "optional": True, "type": "list"},
+ {
+ "name": "sqlExportOptions",
+ "type": "dict",
+ "optional": True,
+ "fields": [
+ {"name": "tables", "optional": True, "type": "list"},
+ {"name": "schemaOnly", "optional": True},
+ {
+ "name": "mysqlExportOptions",
+ "type": "dict",
+ "optional": True,
+ "fields": [{"name": "masterData"}],
+ },
],
- ),
- dict(
- name="csvExportOptions",
- type="dict",
- optional=True,
- fields=[
- dict(name="selectQuery"),
- dict(name="escapeCharacter", optional=True),
- dict(name="quoteCharacter", optional=True),
- dict(name="fieldsTerminatedBy", optional=True),
- dict(name="linesTerminatedBy", optional=True),
+ },
+ {
+ "name": "csvExportOptions",
+ "type": "dict",
+ "optional": True,
+ "fields": [
+ {"name": "selectQuery"},
+ {"name": "escapeCharacter", "optional": True},
+ {"name": "quoteCharacter", "optional": True},
+ {"name": "fieldsTerminatedBy", "optional": True},
+ {"name": "linesTerminatedBy", "optional": True},
],
- ),
- dict(name="offload", optional=True),
+ },
+ {"name": "offload", "optional": True},
],
- )
+ }
]
CLOUD_SQL_IMPORT_VALIDATION = [
- dict(
- name="importContext",
- type="dict",
- fields=[
- dict(name="fileType", allow_empty=False),
- dict(name="uri", allow_empty=False),
- dict(name="database", optional=True, allow_empty=False),
- dict(name="importUser", optional=True),
- dict(
- name="csvImportOptions",
- type="dict",
- optional=True,
- fields=[dict(name="table"), dict(name="columns", type="list",
optional=True)],
- ),
+ {
+ "name": "importContext",
+ "type": "dict",
+ "fields": [
+ {"name": "fileType", "allow_empty": False},
+ {"name": "uri", "allow_empty": False},
+ {"name": "database", "optional": True, "allow_empty": False},
+ {"name": "importUser", "optional": True},
+ {
+ "name": "csvImportOptions",
+ "type": "dict",
+ "optional": True,
+ "fields": [{"name": "table"}, {"name": "columns", "type":
"list", "optional": True}],
+ },
],
- )
+ }
]
CLOUD_SQL_DATABASE_CREATE_VALIDATION = [
- dict(name="instance", allow_empty=False),
- dict(name="name", allow_empty=False),
- dict(name="project", allow_empty=False),
+ {"name": "instance", "allow_empty": False},
+ {"name": "name", "allow_empty": False},
+ {"name": "project", "allow_empty": False},
]
CLOUD_SQL_DATABASE_PATCH_VALIDATION = [
- dict(name="instance", optional=True),
- dict(name="name", optional=True),
- dict(name="project", optional=True),
- dict(name="etag", optional=True),
- dict(name="charset", optional=True),
- dict(name="collation", optional=True),
+ {"name": "instance", "optional": True},
+ {"name": "name", "optional": True},
+ {"name": "project", "optional": True},
+ {"name": "etag", "optional": True},
+ {"name": "charset", "optional": True},
+ {"name": "collation", "optional": True},
]
diff --git a/airflow/providers/google/cloud/operators/compute.py
b/airflow/providers/google/cloud/operators/compute.py
index 2adb5b8439..4386798e9e 100644
--- a/airflow/providers/google/cloud/operators/compute.py
+++ b/airflow/providers/google/cloud/operators/compute.py
@@ -661,7 +661,7 @@ class
ComputeEngineStopInstanceOperator(ComputeEngineBaseOperator):
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
- dict(name="machineType", regexp="^.+$"),
+ {"name": "machineType", "regexp": "^.+$"},
]
@@ -768,44 +768,44 @@ class
ComputeEngineSetMachineTypeOperator(ComputeEngineBaseOperator):
GCE_INSTANCE_TEMPLATE_VALIDATION_PATCH_SPECIFICATION: list[dict[str, Any]] = [
- dict(name="name", regexp="^.+$"),
- dict(name="description", optional=True),
- dict(
- name="properties",
- type="dict",
- optional=True,
- fields=[
- dict(name="description", optional=True),
- dict(name="tags", optional=True, fields=[dict(name="items",
optional=True)]),
- dict(name="machineType", optional=True),
- dict(name="canIpForward", optional=True),
- dict(name="networkInterfaces", optional=True), # not validating
deeper
- dict(name="disks", optional=True), # not validating the array
deeper
- dict(
- name="metadata",
- optional=True,
- fields=[
- dict(name="fingerprint", optional=True),
- dict(name="items", optional=True),
- dict(name="kind", optional=True),
+ {"name": "name", "regexp": "^.+$"},
+ {"name": "description", "optional": True},
+ {
+ "name": "properties",
+ "type": "dict",
+ "optional": True,
+ "fields": [
+ {"name": "description", "optional": True},
+ {"name": "tags", "optional": True, "fields": [{"name": "items",
"optional": True}]},
+ {"name": "machineType", "optional": True},
+ {"name": "canIpForward", "optional": True},
+ {"name": "networkInterfaces", "optional": True}, # not validating
deeper
+ {"name": "disks", "optional": True}, # not validating the array
deeper
+ {
+ "name": "metadata",
+ "optional": True,
+ "fields": [
+ {"name": "fingerprint", "optional": True},
+ {"name": "items", "optional": True},
+ {"name": "kind", "optional": True},
],
- ),
- dict(name="serviceAccounts", optional=True), # not validating
deeper
- dict(
- name="scheduling",
- optional=True,
- fields=[
- dict(name="onHostMaintenance", optional=True),
- dict(name="automaticRestart", optional=True),
- dict(name="preemptible", optional=True),
- dict(name="nodeAffinities", optional=True), # not
validating deeper
+ },
+ {"name": "serviceAccounts", "optional": True}, # not validating
deeper
+ {
+ "name": "scheduling",
+ "optional": True,
+ "fields": [
+ {"name": "onHostMaintenance", "optional": True},
+ {"name": "automaticRestart", "optional": True},
+ {"name": "preemptible", "optional": True},
+ {"name": "nodeAffinities", "optional": True}, # not
validating deeper
],
- ),
- dict(name="labels", optional=True),
- dict(name="guestAccelerators", optional=True), # not validating
deeper
- dict(name="minCpuPlatform", optional=True),
+ },
+ {"name": "labels", "optional": True},
+ {"name": "guestAccelerators", "optional": True}, # not validating
deeper
+ {"name": "minCpuPlatform", "optional": True},
],
- ),
+ },
]
GCE_INSTANCE_FIELDS_TO_SANITIZE = [
diff --git a/airflow/providers/google/cloud/operators/functions.py
b/airflow/providers/google/cloud/operators/functions.py
index f58be0a017..5591465f01 100644
--- a/airflow/providers/google/cloud/operators/functions.py
+++ b/airflow/providers/google/cloud/operators/functions.py
@@ -51,54 +51,54 @@ def _validate_max_instances(value):
CLOUD_FUNCTION_VALIDATION: list[dict[str, Any]] = [
- dict(name="name", regexp="^.+$"),
- dict(name="description", regexp="^.+$", optional=True),
- dict(name="entryPoint", regexp=r"^.+$", optional=True),
- dict(name="runtime", regexp=r"^.+$", optional=True),
- dict(name="timeout", regexp=r"^.+$", optional=True),
- dict(name="availableMemoryMb",
custom_validation=_validate_available_memory_in_mb, optional=True),
- dict(name="labels", optional=True),
- dict(name="environmentVariables", optional=True),
- dict(name="network", regexp=r"^.+$", optional=True),
- dict(name="maxInstances", optional=True,
custom_validation=_validate_max_instances),
- dict(
- name="source_code",
- type="union",
- fields=[
- dict(name="sourceArchiveUrl", regexp=r"^.+$"),
- dict(name="sourceRepositoryUrl", regexp=r"^.+$",
api_version="v1beta2"),
- dict(name="sourceRepository", type="dict",
fields=[dict(name="url", regexp=r"^.+$")]),
- dict(name="sourceUploadUrl"),
+ {"name": "name", "regexp": "^.+$"},
+ {"name": "description", "regexp": "^.+$", "optional": True},
+ {"name": "entryPoint", "regexp": r"^.+$", "optional": True},
+ {"name": "runtime", "regexp": r"^.+$", "optional": True},
+ {"name": "timeout", "regexp": r"^.+$", "optional": True},
+ {"name": "availableMemoryMb", "custom_validation":
_validate_available_memory_in_mb, "optional": True},
+ {"name": "labels", "optional": True},
+ {"name": "environmentVariables", "optional": True},
+ {"name": "network", "regexp": r"^.+$", "optional": True},
+ {"name": "maxInstances", "optional": True, "custom_validation":
_validate_max_instances},
+ {
+ "name": "source_code",
+ "type": "union",
+ "fields": [
+ {"name": "sourceArchiveUrl", "regexp": r"^.+$"},
+ {"name": "sourceRepositoryUrl", "regexp": r"^.+$", "api_version":
"v1beta2"},
+ {"name": "sourceRepository", "type": "dict", "fields": [{"name":
"url", "regexp": r"^.+$"}]},
+ {"name": "sourceUploadUrl"},
],
- ),
- dict(
- name="trigger",
- type="union",
- fields=[
- dict(
- name="httpsTrigger",
- type="dict",
- fields=[
+ },
+ {
+ "name": "trigger",
+ "type": "union",
+ "fields": [
+ {
+ "name": "httpsTrigger",
+ "type": "dict",
+ "fields": [
# This dict should be empty at input (url is added at
output)
],
- ),
- dict(
- name="eventTrigger",
- type="dict",
- fields=[
- dict(name="eventType", regexp=r"^.+$"),
- dict(name="resource", regexp=r"^.+$"),
- dict(name="service", regexp=r"^.+$", optional=True),
- dict(
- name="failurePolicy",
- type="dict",
- optional=True,
- fields=[dict(name="retry", type="dict",
optional=True)],
- ),
+ },
+ {
+ "name": "eventTrigger",
+ "type": "dict",
+ "fields": [
+ {"name": "eventType", "regexp": r"^.+$"},
+ {"name": "resource", "regexp": r"^.+$"},
+ {"name": "service", "regexp": r"^.+$", "optional": True},
+ {
+ "name": "failurePolicy",
+ "type": "dict",
+ "optional": True,
+ "fields": [{"name": "retry", "type": "dict",
"optional": True}],
+ },
],
- ),
+ },
],
- ),
+ },
]
diff --git
a/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py
b/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py
index 2ea2bed9d8..c27e6ba507 100644
--- a/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py
+++ b/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py
@@ -97,7 +97,7 @@ class
CloudStorageTransferServiceCreateJobsTrigger(BaseTrigger):
)
return
except (GoogleAPIError, AirflowException) as ex:
- yield TriggerEvent(dict(status="error", message=str(ex)))
+ yield TriggerEvent({"status": "error", "message": str(ex)})
return
jobs_total = len(self.job_names)
diff --git a/airflow/providers/google/cloud/utils/field_validator.py
b/airflow/providers/google/cloud/utils/field_validator.py
index 1e853d44ed..87aee5d7af 100644
--- a/airflow/providers/google/cloud/utils/field_validator.py
+++ b/airflow/providers/google/cloud/utils/field_validator.py
@@ -158,20 +158,20 @@ def _int_greater_than_zero(value):
EXAMPLE_VALIDATION_SPECIFICATION = [
- dict(name="name", allow_empty=False),
- dict(name="description", allow_empty=False, optional=True),
- dict(name="availableMemoryMb", custom_validation=_int_greater_than_zero,
optional=True),
- dict(name="labels", optional=True, type="dict"),
- dict(
- name="an_union",
- type="union",
- fields=[
- dict(name="variant_1", regexp=r"^.+$"),
- dict(name="variant_2", regexp=r"^.+$", api_version="v1beta2"),
- dict(name="variant_3", type="dict", fields=[dict(name="url",
regexp=r"^.+$")]),
- dict(name="variant_4"),
+ {"name": "name", "allow_empty": False},
+ {"name": "description", "allow_empty": False, "optional": True},
+ {"name": "availableMemoryMb", "custom_validation": _int_greater_than_zero,
"optional": True},
+ {"name": "labels", "optional": True, "type": "dict"},
+ {
+ "name": "an_union",
+ "type": "union",
+ "fields": [
+ {"name": "variant_1", "regexp": r"^.+$"},
+ {"name": "variant_2", "regexp": r"^.+$", "api_version": "v1beta2"},
+ {"name": "variant_3", "type": "dict", "fields": [{"name": "url",
"regexp": r"^.+$"}]},
+ {"name": "variant_4"},
],
- ),
+ },
]
diff --git a/airflow/providers/postgres/hooks/postgres.py
b/airflow/providers/postgres/hooks/postgres.py
index 87edd3c87c..b6b214e990 100644
--- a/airflow/providers/postgres/hooks/postgres.py
+++ b/airflow/providers/postgres/hooks/postgres.py
@@ -127,13 +127,13 @@ class PostgresHook(DbApiHook):
if conn.extra_dejson.get("iam", False):
conn.login, conn.password, conn.port = self.get_iam_token(conn)
- conn_args = dict(
- host=conn.host,
- user=conn.login,
- password=conn.password,
- dbname=self.database or conn.schema,
- port=conn.port,
- )
+ conn_args = {
+ "host": conn.host,
+ "user": conn.login,
+ "password": conn.password,
+ "dbname": self.database or conn.schema,
+ "port": conn.port,
+ }
raw_cursor = conn.extra_dejson.get("cursor", False)
if raw_cursor:
conn_args["cursor_factory"] = self._get_cursor(raw_cursor)
diff --git a/airflow/providers/snowflake/hooks/snowflake.py
b/airflow/providers/snowflake/hooks/snowflake.py
index 3cb0c2a704..e42a0b1b57 100644
--- a/airflow/providers/snowflake/hooks/snowflake.py
+++ b/airflow/providers/snowflake/hooks/snowflake.py
@@ -312,11 +312,11 @@ class SnowflakeHook(DbApiHook):
engine_kwargs = engine_kwargs or {}
conn_params = self._get_conn_params()
if "insecure_mode" in conn_params:
- engine_kwargs.setdefault("connect_args", dict())
+ engine_kwargs.setdefault("connect_args", {})
engine_kwargs["connect_args"]["insecure_mode"] = True
for key in ["session_parameters", "private_key"]:
if conn_params.get(key):
- engine_kwargs.setdefault("connect_args", dict())
+ engine_kwargs.setdefault("connect_args", {})
engine_kwargs["connect_args"][key] = conn_params[key]
return create_engine(self._conn_params_to_sqlalchemy_uri(conn_params),
**engine_kwargs)
diff --git a/airflow/providers/ssh/hooks/ssh.py
b/airflow/providers/ssh/hooks/ssh.py
index 68fa875993..d643b014e4 100644
--- a/airflow/providers/ssh/hooks/ssh.py
+++ b/airflow/providers/ssh/hooks/ssh.py
@@ -314,16 +314,16 @@ class SSHHook(BaseHook):
f"[{self.remote_host}]:{self.port}",
self.host_key.get_name(), self.host_key
)
- connect_kwargs: dict[str, Any] = dict(
- hostname=self.remote_host,
- username=self.username,
- timeout=self.conn_timeout,
- compress=self.compress,
- port=self.port,
- sock=self.host_proxy,
- look_for_keys=self.look_for_keys,
- banner_timeout=self.banner_timeout,
- )
+ connect_kwargs: dict[str, Any] = {
+ "hostname": self.remote_host,
+ "username": self.username,
+ "timeout": self.conn_timeout,
+ "compress": self.compress,
+ "port": self.port,
+ "sock": self.host_proxy,
+ "look_for_keys": self.look_for_keys,
+ "banner_timeout": self.banner_timeout,
+ }
if self.password:
password = self.password.strip()
@@ -397,15 +397,15 @@ class SSHHook(BaseHook):
else:
local_bind_address = ("localhost",)
- tunnel_kwargs = dict(
- ssh_port=self.port,
- ssh_username=self.username,
- ssh_pkey=self.key_file or self.pkey,
- ssh_proxy=self.host_proxy,
- local_bind_address=local_bind_address,
- remote_bind_address=(remote_host, remote_port),
- logger=self.log,
- )
+ tunnel_kwargs = {
+ "ssh_port": self.port,
+ "ssh_username": self.username,
+ "ssh_pkey": self.key_file or self.pkey,
+ "ssh_proxy": self.host_proxy,
+ "local_bind_address": local_bind_address,
+ "remote_bind_address": (remote_host, remote_port),
+ "logger": self.log,
+ }
if self.password:
password = self.password.strip()