This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 00e5976bde remove soft_fail part2 (#41727)
00e5976bde is described below

commit 00e5976bde0f4a5e41c5cd25f111084e159f9c23
Author: raphaelauv <[email protected]>
AuthorDate: Sun Aug 25 18:26:15 2024 +0200

    remove soft_fail part2 (#41727)
    
    Co-authored-by: raphaelauv <[email protected]>
---
 airflow/providers/amazon/provider.yaml    |  1 -
 airflow/providers/ftp/sensors/ftp.py      |  3 ---
 airflow/providers/http/sensors/http.py    |  6 +-----
 airflow/sensors/time_delta.py             |  9 ++-------
 generated/provider_dependencies.json      |  1 -
 tests/providers/ftp/sensors/test_ftp.py   |  5 ++---
 tests/providers/http/sensors/test_http.py | 29 +----------------------------
 7 files changed, 6 insertions(+), 48 deletions(-)

diff --git a/airflow/providers/amazon/provider.yaml 
b/airflow/providers/amazon/provider.yaml
index 074a8afa8c..54d36ca420 100644
--- a/airflow/providers/amazon/provider.yaml
+++ b/airflow/providers/amazon/provider.yaml
@@ -94,7 +94,6 @@ dependencies:
   - apache-airflow-providers-common-compat>=1.1.0
   - apache-airflow-providers-common-sql>=1.3.1
   - apache-airflow-providers-http
-  - apache-airflow-providers-common-compat>=1.1.0
   # We should update minimum version of boto3 and here regularly to avoid 
`pip` backtracking with the number
   # of candidates to consider. Make sure to configure boto3 version here as 
well as in all the tools below
   # in the `devel-dependencies` section to be the same minimum version.
diff --git a/airflow/providers/ftp/sensors/ftp.py 
b/airflow/providers/ftp/sensors/ftp.py
index 847cf76353..1ab56c56c0 100644
--- a/airflow/providers/ftp/sensors/ftp.py
+++ b/airflow/providers/ftp/sensors/ftp.py
@@ -21,7 +21,6 @@ import ftplib  # nosec: B402
 import re
 from typing import TYPE_CHECKING, Sequence
 
-from airflow.exceptions import AirflowSkipException
 from airflow.providers.ftp.hooks.ftp import FTPHook, FTPSHook
 from airflow.sensors.base import BaseSensorOperator
 
@@ -83,8 +82,6 @@ class FTPSensor(BaseSensorOperator):
                 if (error_code != 550) and (
                     self.fail_on_transient_errors or (error_code not in 
self.transient_errors)
                 ):
-                    if self.soft_fail:
-                        raise AirflowSkipException from e
                     raise e
 
                 return False
diff --git a/airflow/providers/http/sensors/http.py 
b/airflow/providers/http/sensors/http.py
index 3691764333..33b5e1d4de 100644
--- a/airflow/providers/http/sensors/http.py
+++ b/airflow/providers/http/sensors/http.py
@@ -21,7 +21,7 @@ from datetime import timedelta
 from typing import TYPE_CHECKING, Any, Callable, Sequence
 
 from airflow.configuration import conf
-from airflow.exceptions import AirflowException, AirflowSkipException
+from airflow.exceptions import AirflowException
 from airflow.providers.http.hooks.http import HttpHook
 from airflow.providers.http.triggers.http import HttpSensorTrigger
 from airflow.sensors.base import BaseSensorOperator
@@ -151,10 +151,6 @@ class HttpSensor(BaseSensorOperator):
         except AirflowException as exc:
             if str(exc).startswith(self.response_error_codes_allowlist):
                 return False
-            # TODO: remove this if block when min_airflow_version is set to 
higher than 2.7.1
-            if self.soft_fail:
-                raise AirflowSkipException from exc
-
             raise exc
 
         return True
diff --git a/airflow/sensors/time_delta.py b/airflow/sensors/time_delta.py
index dc78a0e33b..1cf3861a54 100644
--- a/airflow/sensors/time_delta.py
+++ b/airflow/sensors/time_delta.py
@@ -22,7 +22,6 @@ from time import sleep
 from typing import TYPE_CHECKING, Any, NoReturn
 
 from airflow.configuration import conf
-from airflow.exceptions import AirflowSkipException
 from airflow.sensors.base import BaseSensorOperator
 from airflow.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
 from airflow.utils import timezone
@@ -80,12 +79,8 @@ class TimeDeltaSensorAsync(TimeDeltaSensor):
         if timezone.utcnow() > target_dttm:
             # If the target datetime is in the past, return immediately
             return True
-        try:
-            trigger = DateTimeTrigger(moment=target_dttm, 
end_from_trigger=self.end_from_trigger)
-        except (TypeError, ValueError) as e:
-            if self.soft_fail:
-                raise AirflowSkipException("Skipping due to soft_fail is set 
to True.") from e
-            raise
+
+        trigger = DateTimeTrigger(moment=target_dttm, 
end_from_trigger=self.end_from_trigger)
 
         self.defer(trigger=trigger, method_name="execute_complete")
 
diff --git a/generated/provider_dependencies.json 
b/generated/provider_dependencies.json
index 4d93d34f7c..3d0e7841c5 100644
--- a/generated/provider_dependencies.json
+++ b/generated/provider_dependencies.json
@@ -29,7 +29,6 @@
     "deps": [
       "PyAthena>=3.0.10",
       "apache-airflow-providers-common-compat>=1.1.0",
-      "apache-airflow-providers-common-compat>=1.1.0",
       "apache-airflow-providers-common-sql>=1.3.1",
       "apache-airflow-providers-http",
       "apache-airflow>=2.8.0",
diff --git a/tests/providers/ftp/sensors/test_ftp.py 
b/tests/providers/ftp/sensors/test_ftp.py
index 107b232371..0a71fbd594 100644
--- a/tests/providers/ftp/sensors/test_ftp.py
+++ b/tests/providers/ftp/sensors/test_ftp.py
@@ -22,7 +22,6 @@ from unittest import mock
 
 import pytest
 
-from airflow.exceptions import AirflowSkipException
 from airflow.providers.ftp.hooks.ftp import FTPHook
 from airflow.providers.ftp.sensors.ftp import FTPSensor
 
@@ -72,13 +71,13 @@ class TestFTPSensor:
 
     @mock.patch("airflow.providers.ftp.sensors.ftp.FTPHook", spec=FTPHook)
     def test_poke_fail_on_transient_error_and_skip(self, mock_hook):
-        op = FTPSensor(path="foobar.json", ftp_conn_id="bob_ftp", 
task_id="test_task", soft_fail=True)
+        op = FTPSensor(path="foobar.json", ftp_conn_id="bob_ftp", 
task_id="test_task")
 
         mock_hook.return_value.__enter__.return_value.get_mod_time.side_effect 
= error_perm(
             "434: Host unavailable"
         )
 
-        with pytest.raises(AirflowSkipException):
+        with pytest.raises(error_perm):
             op.execute(None)
 
     @mock.patch("airflow.providers.ftp.sensors.ftp.FTPHook", spec=FTPHook)
diff --git a/tests/providers/http/sensors/test_http.py 
b/tests/providers/http/sensors/test_http.py
index 2b499a1d68..e19a4c929a 100644
--- a/tests/providers/http/sensors/test_http.py
+++ b/tests/providers/http/sensors/test_http.py
@@ -23,7 +23,7 @@ from unittest.mock import patch
 import pytest
 import requests
 
-from airflow.exceptions import AirflowException, AirflowSensorTimeout, 
AirflowSkipException, TaskDeferred
+from airflow.exceptions import AirflowException, AirflowSensorTimeout, 
TaskDeferred
 from airflow.models.dag import DAG
 from airflow.providers.http.operators.http import HttpOperator
 from airflow.providers.http.sensors.http import HttpSensor
@@ -65,33 +65,6 @@ class TestHttpSensor:
         with pytest.raises(AirflowException, match="AirflowException raised 
here!"):
             task.execute(context={})
 
-    @patch("airflow.providers.http.hooks.http.requests.Session.send")
-    def test_poke_exception_with_soft_fail(self, mock_session_send, 
create_task_of_operator):
-        """
-        Exception occurs in poke function should be skipped if soft_fail is 
True.
-        """
-        response = requests.Response()
-        response.status_code = 200
-        mock_session_send.return_value = response
-
-        def resp_check(_):
-            raise AirflowException("AirflowException raised here!")
-
-        task = create_task_of_operator(
-            HttpSensor,
-            dag_id="http_sensor_poke_exception",
-            task_id="http_sensor_poke_exception",
-            http_conn_id="http_default",
-            endpoint="",
-            request_params={},
-            response_check=resp_check,
-            timeout=5,
-            poke_interval=1,
-            soft_fail=True,
-        )
-        with pytest.raises(AirflowSkipException):
-            task.execute(context={})
-
     @patch("airflow.providers.http.hooks.http.requests.Session.send")
     def 
test_poke_continues_for_http_500_with_extra_options_check_response_false(
         self,

Reply via email to