This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 165d5d51178 Bring back providers compatibility checks (#52398)
165d5d51178 is described below

commit 165d5d511781a029cf4ee51d57fd70b5912a181b
Author: Jarek Potiuk <[email protected]>
AuthorDate: Sat Jun 28 20:04:16 2025 +0200

    Bring back providers compatibility checks (#52398)
    
    The compatibility checks were removed in #52072 accidentally. This
    one brings them back:
    
    * Python 3.10
    * do not add cloudant (it was not working for Python 3.9)
---
 dev/breeze/src/airflow_breeze/global_constants.py   | 21 ++++++++++++++++++++-
 .../google/cloud/operators/test_bigquery_dts.py     | 12 ++----------
 .../unit/google/cloud/operators/test_datacatalog.py | 14 ++++++++++++++
 .../unit/google/cloud/operators/test_functions.py   |  4 ++++
 4 files changed, 40 insertions(+), 11 deletions(-)

diff --git a/dev/breeze/src/airflow_breeze/global_constants.py 
b/dev/breeze/src/airflow_breeze/global_constants.py
index 224a6d3be56..afa0f4dae3b 100644
--- a/dev/breeze/src/airflow_breeze/global_constants.py
+++ b/dev/breeze/src/airflow_breeze/global_constants.py
@@ -732,7 +732,26 @@ DEFAULT_EXTRAS = [
     # END OF EXTRAS LIST UPDATED BY PRE COMMIT
 ]
 
-PROVIDERS_COMPATIBILITY_TESTS_MATRIX: list[dict[str, str | list[str]]] = []
+PROVIDERS_COMPATIBILITY_TESTS_MATRIX: list[dict[str, str | list[str]]] = [
+    {
+        "python-version": "3.10",
+        "airflow-version": "2.10.5",
+        "remove-providers": "common.messaging fab git keycloak",
+        "run-tests": "true",
+    },
+    {
+        "python-version": "3.10",
+        "airflow-version": "2.11.0",
+        "remove-providers": "common.messaging fab git keycloak",
+        "run-tests": "true",
+    },
+    {
+        "python-version": "3.10",
+        "airflow-version": "3.0.2",
+        "remove-providers": "",
+        "run-tests": "true",
+    },
+]
 
 # Number of slices for low dep tests
 NUMBER_OF_LOW_DEP_SLICES = 5
diff --git 
a/providers/google/tests/unit/google/cloud/operators/test_bigquery_dts.py 
b/providers/google/tests/unit/google/cloud/operators/test_bigquery_dts.py
index a4df65dc3c4..9c5bca9376c 100644
--- a/providers/google/tests/unit/google/cloud/operators/test_bigquery_dts.py
+++ b/providers/google/tests/unit/google/cloud/operators/test_bigquery_dts.py
@@ -29,8 +29,6 @@ from airflow.providers.google.cloud.operators.bigquery_dts 
import (
     BigQueryDeleteDataTransferConfigOperator,
 )
 
-from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
-
 PROJECT_ID = "id"
 
 TRANSFER_CONFIG = {
@@ -73,10 +71,7 @@ class TestBigQueryCreateDataTransferOperator:
             retry=DEFAULT,
             timeout=None,
         )
-        if AIRFLOW_V_3_0_PLUS:
-            ti.xcom_push.assert_called_with(key="transfer_config_id", 
value="1a2b3c")
-        else:
-            ti.xcom_push.assert_called_with(key="transfer_config_id", 
value="1a2b3c", execution_date=None)
+        ti.xcom_push.assert_called_with(key="transfer_config_id", 
value="1a2b3c")
 
         assert "secret_access_key" not in return_value.get("params", {})
         assert "access_key_id" not in return_value.get("params", {})
@@ -131,10 +126,7 @@ class 
TestBigQueryDataTransferServiceStartTransferRunsOperator:
             retry=DEFAULT,
             timeout=None,
         )
-        if AIRFLOW_V_3_0_PLUS:
-            ti.xcom_push.assert_called_with(key="run_id", value="123")
-        else:
-            ti.xcom_push.assert_called_with(key="run_id", value="123", 
execution_date=None)
+        ti.xcom_push.assert_called_with(key="run_id", value="123")
 
     @mock.patch(
         f"{OPERATOR_MODULE_PATH}.BiqQueryDataTransferServiceHook",
diff --git 
a/providers/google/tests/unit/google/cloud/operators/test_datacatalog.py 
b/providers/google/tests/unit/google/cloud/operators/test_datacatalog.py
index da5b420edb3..f4cf88a478d 100644
--- a/providers/google/tests/unit/google/cloud/operators/test_datacatalog.py
+++ b/providers/google/tests/unit/google/cloud/operators/test_datacatalog.py
@@ -51,6 +51,8 @@ from airflow.providers.google.cloud.operators.datacatalog 
import (
     CloudDataCatalogUpdateTagTemplateOperator,
 )
 
+from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
+
 if TYPE_CHECKING:
     from google.api_core.gapic_v1.method import _MethodDefault
 
@@ -163,6 +165,8 @@ class TestCloudDataCatalogCreateEntryOperator:
             )
         mock_ti = mock.MagicMock()
         mock_context = {"ti": mock_ti}
+        if not AIRFLOW_V_3_0_PLUS:
+            mock_context["task"] = task  # type: ignore[assignment]
         result = task.execute(context=mock_context)  # type: ignore[arg-type]
         mock_hook.assert_called_once_with(
             gcp_conn_id=TEST_GCP_CONN_ID,
@@ -205,6 +209,8 @@ class TestCloudDataCatalogCreateEntryOperator:
             )
         mock_ti = mock.MagicMock()
         mock_context = {"ti": mock_ti}
+        if not AIRFLOW_V_3_0_PLUS:
+            mock_context["task"] = task  # type: ignore[assignment]
         result = task.execute(context=mock_context)  # type: ignore[arg-type]
         mock_hook.assert_called_once_with(
             gcp_conn_id=TEST_GCP_CONN_ID,
@@ -257,6 +263,8 @@ class TestCloudDataCatalogCreateEntryGroupOperator:
             )
         mock_ti = mock.MagicMock()
         mock_context = {"ti": mock_ti}
+        if not AIRFLOW_V_3_0_PLUS:
+            mock_context["task"] = task  # type: ignore[assignment]
         result = task.execute(context=mock_context)  # type: ignore[arg-type]
         mock_hook.assert_called_once_with(
             gcp_conn_id=TEST_GCP_CONN_ID,
@@ -301,6 +309,8 @@ class TestCloudDataCatalogCreateTagOperator:
             )
         mock_ti = mock.MagicMock()
         mock_context = {"ti": mock_ti}
+        if not AIRFLOW_V_3_0_PLUS:
+            mock_context["task"] = task  # type: ignore[assignment]
         result = task.execute(context=mock_context)  # type: ignore[arg-type]
         mock_hook.assert_called_once_with(
             gcp_conn_id=TEST_GCP_CONN_ID,
@@ -345,6 +355,8 @@ class TestCloudDataCatalogCreateTagTemplateOperator:
             )
         mock_ti = mock.MagicMock()
         mock_context = {"ti": mock_ti}
+        if not AIRFLOW_V_3_0_PLUS:
+            mock_context["task"] = task  # type: ignore[assignment]
         result = task.execute(context=mock_context)  # type: ignore[arg-type]
         mock_hook.assert_called_once_with(
             gcp_conn_id=TEST_GCP_CONN_ID,
@@ -388,6 +400,8 @@ class TestCloudDataCatalogCreateTagTemplateFieldOperator:
             )
         mock_ti = mock.MagicMock()
         mock_context = {"ti": mock_ti}
+        if not AIRFLOW_V_3_0_PLUS:
+            mock_context["task"] = task  # type: ignore[assignment]
         result = task.execute(context=mock_context)  # type: ignore[arg-type]
         mock_hook.assert_called_once_with(
             gcp_conn_id=TEST_GCP_CONN_ID,
diff --git 
a/providers/google/tests/unit/google/cloud/operators/test_functions.py 
b/providers/google/tests/unit/google/cloud/operators/test_functions.py
index 47b3e4ebde6..6d1b48ec2f4 100644
--- a/providers/google/tests/unit/google/cloud/operators/test_functions.py
+++ b/providers/google/tests/unit/google/cloud/operators/test_functions.py
@@ -32,6 +32,8 @@ from airflow.providers.google.cloud.operators.functions 
import (
 )
 from airflow.version import version
 
+from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
+
 EMPTY_CONTENT = b""
 MOCK_RESP_404 = httplib2.Response({"status": 404})
 
@@ -716,6 +718,8 @@ class TestGcfFunctionInvokeOperator:
         )
         mock_ti = mock.MagicMock()
         mock_context = {"ti": mock_ti}
+        if not AIRFLOW_V_3_0_PLUS:
+            mock_context["task"] = op
         op.execute(mock_context)
 
         mock_gcf_hook.assert_called_once_with(

Reply via email to