This is an automated email from the ASF dual-hosted git repository.

ash pushed a commit to branch task-sdk-first-code
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 7c1ae9fb342db0e3fb28aaa8212b1492a051a24a
Author: Ash Berlin-Taylor <[email protected]>
AuthorDate: Tue Oct 29 15:29:10 2024 +0000

    Fix tests [skip ci]
---
 providers/tests/google/cloud/operators/test_bigquery.py | 4 ++--
 providers/tests/google/cloud/operators/test_dataflow.py | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/providers/tests/google/cloud/operators/test_bigquery.py 
b/providers/tests/google/cloud/operators/test_bigquery.py
index a3beddb9d87..8d6f4de47ff 100644
--- a/providers/tests/google/cloud/operators/test_bigquery.py
+++ b/providers/tests/google/cloud/operators/test_bigquery.py
@@ -2578,7 +2578,7 @@ class TestBigQueryValueCheckOperator:
         """
         Assert the exception if require param not pass to 
BigQueryValueCheckOperator with deferrable=True
         """
-        with pytest.raises(AirflowException) as missing_param:
+        with pytest.raises(TypeError) as missing_param:
             BigQueryValueCheckOperator(deferrable=True, **kwargs)
         assert missing_param.value.args[0] == expected
 
@@ -2590,7 +2590,7 @@ class TestBigQueryValueCheckOperator:
             "missing keyword arguments 'sql', 'pass_value'",
             "missing keyword arguments 'pass_value', 'sql'",
         )
-        with pytest.raises(AirflowException) as missing_param:
+        with pytest.raises(TypeError) as missing_param:
             BigQueryValueCheckOperator(deferrable=True, kwargs={})
         assert missing_param.value.args[0] in (expected, expected1)
 
diff --git a/providers/tests/google/cloud/operators/test_dataflow.py 
b/providers/tests/google/cloud/operators/test_dataflow.py
index 4263d3300f1..96e0621add8 100644
--- a/providers/tests/google/cloud/operators/test_dataflow.py
+++ b/providers/tests/google/cloud/operators/test_dataflow.py
@@ -1077,7 +1077,7 @@ class TestDataflowRunPipelineOperator:
             "location": TEST_LOCATION,
             "gcp_conn_id": GCP_CONN_ID,
         }
-        with pytest.raises(AirflowException):
+        with pytest.raises(TypeError, match="missing keyword argument"):
             
DataflowRunPipelineOperator(**init_kwargs).execute(mock.MagicMock()).return_value
 = {
                 "error": {"message": "example error"}
             }

Reply via email to