This is an automated email from the ASF dual-hosted git repository. ash pushed a commit to branch task-sdk-first-code in repository https://gitbox.apache.org/repos/asf/airflow.git
commit 3e9f21b5335b54f4bff6a31e079dcd221efd251d Author: Ash Berlin-Taylor <[email protected]> AuthorDate: Tue Oct 29 16:12:31 2024 +0000 fix-non db tests --- providers/tests/amazon/aws/operators/test_batch.py | 2 +- .../tests/google/cloud/operators/test_cloud_build.py | 4 ++-- providers/tests/google/cloud/operators/test_compute.py | 8 ++++---- providers/tests/google/cloud/operators/test_dataproc.py | 6 +++--- .../google/cloud/operators/test_kubernetes_engine.py | 16 ++++++++++------ .../tests/google/cloud/operators/test_speech_to_text.py | 10 ++-------- providers/tests/google/cloud/sensors/test_dataproc.py | 2 +- .../tests/google/cloud/transfers/test_gcs_to_bigquery.py | 7 ++++--- providers/tests/salesforce/operators/test_bulk.py | 4 ++-- tests/serialization/test_dag_serialization.py | 5 +++++ 10 files changed, 34 insertions(+), 30 deletions(-) diff --git a/providers/tests/amazon/aws/operators/test_batch.py b/providers/tests/amazon/aws/operators/test_batch.py index 1389099e444..0c14c256edb 100644 --- a/providers/tests/amazon/aws/operators/test_batch.py +++ b/providers/tests/amazon/aws/operators/test_batch.py @@ -441,7 +441,7 @@ class TestBatchOperator: client_mock().submit_job.assert_called_once_with(**expected_args) def test_cant_set_old_and_new_override_param(self): - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="Invalid arguments were passed"): _ = BatchOperator( task_id="task", job_name=JOB_NAME, diff --git a/providers/tests/google/cloud/operators/test_cloud_build.py b/providers/tests/google/cloud/operators/test_cloud_build.py index 3bcc8ac66aa..958190c942a 100644 --- a/providers/tests/google/cloud/operators/test_cloud_build.py +++ b/providers/tests/google/cloud/operators/test_cloud_build.py @@ -134,7 +134,7 @@ class TestCloudBuildOperator: @mock.patch(CLOUD_BUILD_HOOK_PATH) def test_create_build_with_missing_build(self, mock_hook): mock_hook.return_value.create_build_without_waiting_for_result.return_value = Build() - with pytest.raises(AirflowException, match="missing keyword argument 'build'"): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'build'"): CloudBuildCreateBuildOperator(task_id="id") @pytest.mark.parametrize( @@ -479,7 +479,7 @@ def test_async_create_build_error_event_should_throw_exception(): @mock.patch(CLOUD_BUILD_HOOK_PATH) def test_async_create_build_with_missing_build_should_throw_exception(mock_hook): mock_hook.return_value.create_build.return_value = Build() - with pytest.raises(AirflowException, match="missing keyword argument 'build'"): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'build'"): CloudBuildCreateBuildOperator(task_id="id") diff --git a/providers/tests/google/cloud/operators/test_compute.py b/providers/tests/google/cloud/operators/test_compute.py index fac74bae483..fcfb4e9d48d 100644 --- a/providers/tests/google/cloud/operators/test_compute.py +++ b/providers/tests/google/cloud/operators/test_compute.py @@ -349,7 +349,7 @@ class TestGceInstanceInsertFromTemplate: ) def test_insert_instance_from_template_should_throw_ex_when_missing_source_instance_template(self): - with pytest.raises(AirflowException, match=r"missing keyword argument 'source_instance_template'"): + with pytest.raises(TypeError, match=r"missing keyword argument 'source_instance_template'"): ComputeEngineInsertInstanceFromTemplateOperator( project_id=GCP_PROJECT_ID, body=GCP_INSTANCE_BODY_FROM_TEMPLATE, @@ -360,7 +360,7 @@ class TestGceInstanceInsertFromTemplate: ) def test_insert_instance_from_template_should_throw_ex_when_missing_body(self): - with pytest.raises(AirflowException, match=r"missing keyword argument 'body'"): + with pytest.raises(TypeError, match=r"missing keyword argument 'body'"): ComputeEngineInsertInstanceFromTemplateOperator( project_id=GCP_PROJECT_ID, source_instance_template=SOURCE_INSTANCE_TEMPLATE, @@ -910,7 +910,7 @@ class TestGceTemplateInsert: ) def test_insert_template_should_throw_ex_when_missing_body(self): - with pytest.raises(AirflowException, match=r"missing keyword argument 'body'"): + with pytest.raises((TypeError, AirflowException), match=r"missing keyword argument 'body'"): ComputeEngineInsertInstanceTemplateOperator( task_id=TASK_ID, project_id=GCP_PROJECT_ID, @@ -1552,7 +1552,7 @@ class TestGceInstanceGroupManagerInsert: ) def test_insert_igm_should_throw_ex_when_missing_body(self): - with pytest.raises(AirflowException, match=r"missing keyword argument 'body'"): + with pytest.raises((TypeError, AirflowException), match=r"missing keyword argument 'body'"): ComputeEngineInsertInstanceGroupManagerOperator( zone=GCE_ZONE, task_id=TASK_ID, diff --git a/providers/tests/google/cloud/operators/test_dataproc.py b/providers/tests/google/cloud/operators/test_dataproc.py index babe432a2fe..2ec6ceb9bab 100644 --- a/providers/tests/google/cloud/operators/test_dataproc.py +++ b/providers/tests/google/cloud/operators/test_dataproc.py @@ -1577,7 +1577,7 @@ class TestDataprocSubmitJobOperator(DataprocJobTestBase): ) def test_missing_region_parameter(self): - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'region'"): DataprocSubmitJobOperator( task_id=TASK_ID, project_id=GCP_PROJECT, @@ -1692,7 +1692,7 @@ class TestDataprocUpdateClusterOperator(DataprocClusterTestBase): ) def test_missing_region_parameter(self): - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'region'"): DataprocUpdateClusterOperator( task_id=TASK_ID, cluster_name=CLUSTER_NAME, @@ -2678,7 +2678,7 @@ class TestDataprocCreateWorkflowTemplateOperator: ) def test_missing_region_parameter(self): - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'region'"): DataprocCreateWorkflowTemplateOperator( task_id=TASK_ID, gcp_conn_id=GCP_CONN_ID, diff --git a/providers/tests/google/cloud/operators/test_kubernetes_engine.py b/providers/tests/google/cloud/operators/test_kubernetes_engine.py index f0f42745c1c..3127b5d89ca 100644 --- a/providers/tests/google/cloud/operators/test_kubernetes_engine.py +++ b/providers/tests/google/cloud/operators/test_kubernetes_engine.py @@ -227,7 +227,7 @@ class TestGoogleCloudPlatformContainerOperator: @mock.patch(GKE_HOOK_PATH) def test_create_execute_error_location(self, mock_hook): - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'location'"): GKECreateClusterOperator( project_id=TEST_GCP_PROJECT_ID, body=PROJECT_BODY, task_id=PROJECT_TASK_ID ) @@ -270,14 +270,14 @@ class TestGoogleCloudPlatformContainerOperator: @mock.patch(GKE_HOOK_PATH) def test_delete_execute_error_cluster_name(self, mock_hook): - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'name'"): GKEDeleteClusterOperator( project_id=TEST_GCP_PROJECT_ID, location=PROJECT_LOCATION, task_id=PROJECT_TASK_ID ) @mock.patch(GKE_HOOK_PATH) def test_delete_execute_error_location(self, mock_hook): - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'location'"): GKEDeleteClusterOperator( project_id=TEST_GCP_PROJECT_ID, name=CLUSTER_NAME, task_id=PROJECT_TASK_ID ) @@ -1270,7 +1270,7 @@ class TestGKEStartKueueJobOperator: fetch_cluster_info_mock.assert_called_once() def test_config_file_throws_error(self): - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'queue_name'"): GKEStartKueueJobOperator( project_id=TEST_GCP_PROJECT_ID, location=PROJECT_LOCATION, @@ -1478,7 +1478,9 @@ class TestGKESuspendJobOperator: ) def test_config_file_throws_error(self): - with pytest.raises(AirflowException): + with pytest.raises( + (TypeError, AirflowException), match="Invalid arguments were passed to .*\n.*'config_file'" + ): GKESuspendJobOperator( project_id=TEST_GCP_PROJECT_ID, location=PROJECT_LOCATION, @@ -1586,7 +1588,9 @@ class TestGKEResumeJobOperator: ) def test_config_file_throws_error(self): - with pytest.raises(AirflowException): + with pytest.raises( + (TypeError, AirflowException), match="Invalid arguments were passed to .*\n.*'config_file'" + ): GKEResumeJobOperator( project_id=TEST_GCP_PROJECT_ID, location=PROJECT_LOCATION, diff --git a/providers/tests/google/cloud/operators/test_speech_to_text.py b/providers/tests/google/cloud/operators/test_speech_to_text.py index 1d7fa9ca37f..155658976f3 100644 --- a/providers/tests/google/cloud/operators/test_speech_to_text.py +++ b/providers/tests/google/cloud/operators/test_speech_to_text.py @@ -59,26 +59,20 @@ class TestCloudSpeechToTextRecognizeSpeechOperator: def test_missing_config(self, mock_hook): mock_hook.return_value.recognize_speech.return_value = True - with pytest.raises(AirflowException) as ctx: + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'config'"): CloudSpeechToTextRecognizeSpeechOperator( project_id=PROJECT_ID, gcp_conn_id=GCP_CONN_ID, audio=AUDIO, task_id="id" ).execute(context={"task_instance": Mock()}) - - err = ctx.value - assert "config" in str(err) mock_hook.assert_not_called() @patch("airflow.providers.google.cloud.operators.speech_to_text.CloudSpeechToTextHook") def test_missing_audio(self, mock_hook): mock_hook.return_value.recognize_speech.return_value = True - with pytest.raises(AirflowException) as ctx: + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'audio'"): CloudSpeechToTextRecognizeSpeechOperator( project_id=PROJECT_ID, gcp_conn_id=GCP_CONN_ID, config=CONFIG, task_id="id" ).execute(context={"task_instance": Mock()}) - - err = ctx.value - assert "audio" in str(err) mock_hook.assert_not_called() @patch("airflow.providers.google.cloud.operators.speech_to_text.FileDetailsLink.persist") diff --git a/providers/tests/google/cloud/sensors/test_dataproc.py b/providers/tests/google/cloud/sensors/test_dataproc.py index 669a9a09f2a..5fea3e8d3ba 100644 --- a/providers/tests/google/cloud/sensors/test_dataproc.py +++ b/providers/tests/google/cloud/sensors/test_dataproc.py @@ -132,7 +132,7 @@ class TestDataprocJobSensor: @mock.patch(DATAPROC_PATH.format("DataprocHook")) def test_missing_region(self, mock_hook): - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'region'"): DataprocJobSensor( task_id=TASK_ID, project_id=GCP_PROJECT, diff --git a/providers/tests/google/cloud/transfers/test_gcs_to_bigquery.py b/providers/tests/google/cloud/transfers/test_gcs_to_bigquery.py index 05ef254cb43..d8a6c550c51 100644 --- a/providers/tests/google/cloud/transfers/test_gcs_to_bigquery.py +++ b/providers/tests/google/cloud/transfers/test_gcs_to_bigquery.py @@ -906,7 +906,7 @@ class TestGCSToBigQueryOperator: ] hook.return_value.generate_job_id.return_value = REAL_JOB_ID hook.return_value.split_tablename.return_value = (PROJECT_ID, DATASET, TABLE) - with pytest.raises(AirflowException, match=r"missing keyword argument 'bucket'"): + with pytest.raises((TypeError, AirflowException), match=r"missing keyword argument 'bucket'"): GCSToBigQueryOperator( task_id=TASK_ID, source_objects=TEST_SOURCE_OBJECTS, @@ -926,7 +926,7 @@ class TestGCSToBigQueryOperator: ] hook.return_value.generate_job_id.return_value = REAL_JOB_ID hook.return_value.split_tablename.return_value = (PROJECT_ID, DATASET, TABLE) - with pytest.raises(AirflowException, match=r"missing keyword argument 'source_objects'"): + with pytest.raises((TypeError, AirflowException), match=r"missing keyword argument 'source_objects'"): GCSToBigQueryOperator( task_id=TASK_ID, destination_project_dataset_table=TEST_EXPLICIT_DEST, @@ -947,7 +947,8 @@ class TestGCSToBigQueryOperator: hook.return_value.generate_job_id.return_value = REAL_JOB_ID hook.return_value.split_tablename.return_value = (PROJECT_ID, DATASET, TABLE) with pytest.raises( - AirflowException, match=r"missing keyword argument 'destination_project_dataset_table'" + (TypeError, AirflowException), + match=r"missing keyword argument 'destination_project_dataset_table'", ): GCSToBigQueryOperator( task_id=TASK_ID, diff --git a/providers/tests/salesforce/operators/test_bulk.py b/providers/tests/salesforce/operators/test_bulk.py index a28cafc73a1..3ec701ecf41 100644 --- a/providers/tests/salesforce/operators/test_bulk.py +++ b/providers/tests/salesforce/operators/test_bulk.py @@ -33,7 +33,7 @@ class TestSalesforceBulkOperator: """ Test execute missing operation """ - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'operation'"): SalesforceBulkOperator( task_id="no_missing_operation_arg", object_name="Account", @@ -52,7 +52,7 @@ class TestSalesforceBulkOperator: """ Test execute missing object_name """ - with pytest.raises(AirflowException): + with pytest.raises((TypeError, AirflowException), match="missing keyword argument 'object_name'"): SalesforceBulkOperator( task_id="no_object_name_arg", operation="insert", diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index 783b04152fd..0111cc669cd 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -1258,7 +1258,12 @@ class TestStringifiedDAGs: "max_active_tis_per_dag": None, "max_active_tis_per_dagrun": None, "max_retry_delay": None, + "on_execute_callback": None, "on_failure_fail_dagrun": False, + "on_failure_callback": None, + "on_retry_callback": None, + "on_skipped_callback": None, + "on_success_callback": None, "outlets": [], "owner": "airflow", "params": {},
