This is an automated email from the ASF dual-hosted git repository.
ferruzzi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new ba71910de05 Enable PT011 rule to provider tests (#57528)
ba71910de05 is described below
commit ba71910de05b51de050460838b06ca40a532a597
Author: Xch1 <[email protected]>
AuthorDate: Fri Oct 31 01:46:30 2025 +0800
Enable PT011 rule to provider tests (#57528)
Signed-off-by: Xch1 <[email protected]>
---
.../spark/tests/unit/apache/spark/hooks/test_spark_submit.py | 7 +++++--
.../pagerduty/tests/unit/pagerduty/hooks/test_pagerduty_events.py | 7 +++++--
task-sdk/tests/task_sdk/definitions/test_dag.py | 5 +++--
3 files changed, 13 insertions(+), 6 deletions(-)
diff --git
a/providers/apache/spark/tests/unit/apache/spark/hooks/test_spark_submit.py
b/providers/apache/spark/tests/unit/apache/spark/hooks/test_spark_submit.py
index e19a7f73611..70a595a1827 100644
--- a/providers/apache/spark/tests/unit/apache/spark/hooks/test_spark_submit.py
+++ b/providers/apache/spark/tests/unit/apache/spark/hooks/test_spark_submit.py
@@ -536,11 +536,14 @@ class TestSparkSubmitHook:
SparkSubmitHook(conn_id="spark_binary_set",
spark_binary="another-custom-spark-submit")
def
test_resolve_connection_spark_binary_extra_not_allowed_runtime_error(self):
- with pytest.raises(ValueError):
+ with pytest.raises(
+ ValueError,
+ match="Please make sure your spark binary is one of the allowed
ones and that it is available on the PATH",
+ ):
SparkSubmitHook(conn_id="spark_custom_binary_set")
def test_resolve_connection_spark_home_not_allowed_runtime_error(self):
- with pytest.raises(ValueError):
+ with pytest.raises(ValueError, match="The `spark-home` extra is not
allowed any more"):
SparkSubmitHook(conn_id="spark_home_set")
def test_resolve_connection_spark_binary_default_value_override(self):
diff --git
a/providers/pagerduty/tests/unit/pagerduty/hooks/test_pagerduty_events.py
b/providers/pagerduty/tests/unit/pagerduty/hooks/test_pagerduty_events.py
index f2665bf5151..6dd218327b9 100644
--- a/providers/pagerduty/tests/unit/pagerduty/hooks/test_pagerduty_events.py
+++ b/providers/pagerduty/tests/unit/pagerduty/hooks/test_pagerduty_events.py
@@ -69,11 +69,14 @@ class TestPrepareEventData:
assert even_data == exp_event_data
def test_prepare_event_data_invalid_action(self):
- with pytest.raises(ValueError):
+ with pytest.raises(ValueError, match="Event action must be one of:
trigger, acknowledge, resolve"):
prepare_event_data(summary="test", severity="error",
action="should_raise")
def test_prepare_event_missing_dedup_key(self):
- with pytest.raises(ValueError):
+ with pytest.raises(
+ ValueError,
+ match="The dedup_key property is required for action=acknowledge
events, and it must be a string",
+ ):
prepare_event_data(summary="test", severity="error",
action="acknowledge")
diff --git a/task-sdk/tests/task_sdk/definitions/test_dag.py
b/task-sdk/tests/task_sdk/definitions/test_dag.py
index 6e6ef551d07..d711f300ce8 100644
--- a/task-sdk/tests/task_sdk/definitions/test_dag.py
+++ b/task-sdk/tests/task_sdk/definitions/test_dag.py
@@ -480,9 +480,10 @@ def
test_continuous_schedule_interval_limits_max_active_runs(max_active_runs):
def test_continuous_schedule_interval_limits_max_active_runs_error():
- with pytest.raises(ValueError) as ctx:
+ with pytest.raises(
+ ValueError, match="Invalid max_active_runs: ContinuousTimetable
requires max_active_runs <= 1"
+ ):
DAG(dag_id="continuous", schedule="@continuous", max_active_runs=2)
- assert str(ctx.value) == "Invalid max_active_runs: ContinuousTimetable
requires max_active_runs <= 1"
class TestDagDecorator: