This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 353b1482d3 remove unnecessary and rewrite it using list in providers
(#33763)
353b1482d3 is described below
commit 353b1482d3720c7e962022f25c7e5d3e105ed4f0
Author: Hussein Awala <[email protected]>
AuthorDate: Sat Aug 26 09:19:42 2023 +0200
remove unnecessary and rewrite it using list in providers (#33763)
---
airflow/providers/amazon/aws/sensors/s3.py | 2 +-
airflow/providers/elasticsearch/log/es_response.py | 2 +-
airflow/providers/google/cloud/transfers/sql_to_gcs.py | 2 +-
.../providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py | 2 +-
airflow/providers/oracle/transfers/oracle_to_oracle.py | 2 +-
5 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/airflow/providers/amazon/aws/sensors/s3.py
b/airflow/providers/amazon/aws/sensors/s3.py
index 704fcd95d9..0afa2cf704 100644
--- a/airflow/providers/amazon/aws/sensors/s3.py
+++ b/airflow/providers/amazon/aws/sensors/s3.py
@@ -120,7 +120,7 @@ class S3KeySensor(BaseSensorOperator):
return False
# Reduce the set of metadata to size only
- files = list(map(lambda f: {"Size": f["Size"]}, key_matches))
+ files = [{"Size": f["Size"]} for f in key_matches]
else:
obj = self.hook.head_object(key, bucket_name)
if obj is None:
diff --git a/airflow/providers/elasticsearch/log/es_response.py
b/airflow/providers/elasticsearch/log/es_response.py
index e39496deff..ce11c715aa 100644
--- a/airflow/providers/elasticsearch/log/es_response.py
+++ b/airflow/providers/elasticsearch/log/es_response.py
@@ -38,7 +38,7 @@ class AttributeList:
return _wrap(val)
def __iter__(self):
- return map(lambda i: _wrap(i), self._l_)
+ return (_wrap(i) for i in self._l_)
def __bool__(self):
return bool(self._l_)
diff --git a/airflow/providers/google/cloud/transfers/sql_to_gcs.py
b/airflow/providers/google/cloud/transfers/sql_to_gcs.py
index e696487f65..c1b90a1bff 100644
--- a/airflow/providers/google/cloud/transfers/sql_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/sql_to_gcs.py
@@ -235,7 +235,7 @@ class BaseSQLToGCSOperator(BaseOperator):
names in GCS, and values are file handles to local files that
contain the data for the GCS objects.
"""
- org_schema = list(map(lambda schema_tuple: schema_tuple[0],
cursor.description))
+ org_schema = [schema_tuple[0] for schema_tuple in cursor.description]
schema = [column for column in org_schema if column not in
self.exclude_columns]
col_type_dict = self._get_col_type_dict()
diff --git
a/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py
b/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py
index f891e080a4..7a73e74835 100644
--- a/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py
+++ b/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py
@@ -87,7 +87,7 @@ class OracleToAzureDataLakeOperator(BaseOperator):
quotechar=self.quotechar,
quoting=self.quoting,
)
- csv_writer.writerow(map(lambda field: field[0],
cursor.description))
+ csv_writer.writerow(field[0] for field in cursor.description)
csv_writer.writerows(cursor)
csvfile.flush()
diff --git a/airflow/providers/oracle/transfers/oracle_to_oracle.py
b/airflow/providers/oracle/transfers/oracle_to_oracle.py
index 192df8c78d..ad66b2530f 100644
--- a/airflow/providers/oracle/transfers/oracle_to_oracle.py
+++ b/airflow/providers/oracle/transfers/oracle_to_oracle.py
@@ -69,7 +69,7 @@ class OracleToOracleOperator(BaseOperator):
cursor = src_conn.cursor()
self.log.info("Querying data from source: %s",
self.oracle_source_conn_id)
cursor.execute(self.source_sql, self.source_sql_params)
- target_fields = list(map(lambda field: field[0],
cursor.description))
+ target_fields = [field[0] for field in cursor.description]
rows_total = 0
for rows in iter(lambda: cursor.fetchmany(self.rows_chunk), []):