This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 2a00f18ebb E731: replace lambda by a def method in Airflow providers 
(#33757)
2a00f18ebb is described below

commit 2a00f18ebb7f0e286955a946844c14b72fcc3b05
Author: Hussein Awala <[email protected]>
AuthorDate: Sat Aug 26 09:15:13 2023 +0200

    E731: replace lambda by a def method in Airflow providers (#33757)
---
 airflow/providers/apache/hive/macros/hive.py         | 13 ++++++++++---
 airflow/providers/google/cloud/operators/dataflow.py |  4 +++-
 airflow/providers/ssh/hooks/ssh.py                   |  7 ++++---
 3 files changed, 17 insertions(+), 7 deletions(-)

diff --git a/airflow/providers/apache/hive/macros/hive.py 
b/airflow/providers/apache/hive/macros/hive.py
index 2b083e20fa..71e0661697 100644
--- a/airflow/providers/apache/hive/macros/hive.py
+++ b/airflow/providers/apache/hive/macros/hive.py
@@ -61,9 +61,16 @@ def _closest_date(target_dt, date_list, before_target=None) 
-> datetime.date | N
     :param before_target: closest before or after the target
     :returns: The closest date
     """
-    time_before = lambda d: target_dt - d if d <= target_dt else 
datetime.timedelta.max
-    time_after = lambda d: d - target_dt if d >= target_dt else 
datetime.timedelta.max
-    any_time = lambda d: target_dt - d if d < target_dt else d - target_dt
+
+    def time_before(d):
+        return target_dt - d if d <= target_dt else datetime.timedelta.max
+
+    def time_after(d):
+        return d - target_dt if d >= target_dt else datetime.timedelta.max
+
+    def any_time(d):
+        return target_dt - d if d < target_dt else d - target_dt
+
     if before_target is None:
         return min(date_list, key=any_time).date()
     if before_target:
diff --git a/airflow/providers/google/cloud/operators/dataflow.py 
b/airflow/providers/google/cloud/operators/dataflow.py
index e6710c9f9a..c9daa16966 100644
--- a/airflow/providers/google/cloud/operators/dataflow.py
+++ b/airflow/providers/google/cloud/operators/dataflow.py
@@ -1188,7 +1188,9 @@ class 
DataflowCreatePythonJobOperator(GoogleCloudBaseOperator):
         pipeline_options.update(self.options)
 
         # Convert argument names from lowerCamelCase to snake case.
-        camel_to_snake = lambda name: re.sub(r"[A-Z]", lambda x: "_" + 
x.group(0).lower(), name)
+        def camel_to_snake(name):
+            return re.sub("[A-Z]", lambda x: "_" + x.group(0).lower(), name)
+
         formatted_pipeline_options = {camel_to_snake(key): 
pipeline_options[key] for key in pipeline_options}
 
         def set_current_job_id(job_id):
diff --git a/airflow/providers/ssh/hooks/ssh.py 
b/airflow/providers/ssh/hooks/ssh.py
index 6273ebd0e1..68fa875993 100644
--- a/airflow/providers/ssh/hooks/ssh.py
+++ b/airflow/providers/ssh/hooks/ssh.py
@@ -338,9 +338,10 @@ class SSHHook(BaseHook):
         if self.disabled_algorithms:
             connect_kwargs.update(disabled_algorithms=self.disabled_algorithms)
 
-        log_before_sleep = lambda retry_state: self.log.info(
-            "Failed to connect. Sleeping before retry attempt %d", 
retry_state.attempt_number
-        )
+        def log_before_sleep(retry_state):
+            return self.log.info(
+                "Failed to connect. Sleeping before retry attempt %d", 
retry_state.attempt_number
+            )
 
         for attempt in Retrying(
             reraise=True,

Reply via email to