This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 756011a55f5 AIP-82: implement Google Pub/Sub message queue provider 
(#56445)
756011a55f5 is described below

commit 756011a55f5f3ff3e9b1d463343d250fdbc04188
Author: Deji Ibrahim <[email protected]>
AuthorDate: Wed Jan 21 22:47:18 2026 +0100

    AIP-82: implement Google Pub/Sub message queue provider (#56445)
    
    * AIP-82: implement Google Pub/Sub message queue provider
    
    * fix: mypy checks
    
    * fix: spell checks
    
    * fix: tests
    
    * switch to scheme based configuration
    
    * add system tests, update docs
    
    * use proposed naming convention for folders/files
    
    * handle 2.11 compatibility
    
    * limit lower dependency bound
---
 dev/breeze/tests/test_selective_checks.py          |  8 +-
 providers/google/docs/index.rst                    |  2 +
 providers/google/docs/message-queues/index.rst     | 70 ++++++++++++++++
 providers/google/provider.yaml                     |  3 +
 providers/google/pyproject.toml                    |  4 +
 .../providers/google/cloud/triggers/pubsub.py      | 10 ++-
 .../providers/google/event_scheduling/__init__.py  | 16 ++++
 .../google/event_scheduling/events/__init__.py     | 16 ++++
 .../google/event_scheduling/events/pubsub.py       | 61 ++++++++++++++
 .../airflow/providers/google/get_provider_info.py  |  3 +
 .../system/google/event_scheduling/__init__.py     | 16 ++++
 .../example_event_schedule_pubsub.py               | 96 ++++++++++++++++++++++
 .../tests/unit/google/event_scheduling/__init__.py | 16 ++++
 .../google/event_scheduling/events/__init__.py     | 16 ++++
 .../google/event_scheduling/events/test_pubsub.py  | 51 ++++++++++++
 15 files changed, 382 insertions(+), 6 deletions(-)

diff --git a/dev/breeze/tests/test_selective_checks.py 
b/dev/breeze/tests/test_selective_checks.py
index 1271e73384f..0d30f975b0a 100644
--- a/dev/breeze/tests/test_selective_checks.py
+++ b/dev/breeze/tests/test_selective_checks.py
@@ -1948,7 +1948,7 @@ def test_expected_output_push(
             ),
             {
                 "selected-providers-list-as-string": "amazon apache.beam 
apache.cassandra apache.kafka "
-                "cncf.kubernetes common.compat common.sql databricks "
+                "cncf.kubernetes common.compat common.messaging common.sql 
databricks "
                 "facebook google hashicorp http microsoft.azure 
microsoft.mssql mysql "
                 "openlineage oracle postgres presto salesforce samba sftp ssh 
standard trino",
                 "all-python-versions": 
f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']",
@@ -1960,7 +1960,7 @@ def test_expected_output_push(
                 "skip-providers-tests": "false",
                 "docs-build": "true",
                 "docs-list-as-string": "apache-airflow helm-chart amazon 
apache.beam apache.cassandra "
-                "apache.kafka cncf.kubernetes common.compat common.sql 
databricks facebook google hashicorp http microsoft.azure "
+                "apache.kafka cncf.kubernetes common.compat common.messaging 
common.sql databricks facebook google hashicorp http microsoft.azure "
                 "microsoft.mssql mysql openlineage oracle postgres "
                 "presto salesforce samba sftp ssh standard trino",
                 "skip-prek-hooks": ALL_SKIPPED_COMMITS_IF_NO_UI,
@@ -1974,7 +1974,7 @@ def test_expected_output_push(
                         {
                             "description": "amazon...standard",
                             "test_types": "Providers[amazon] 
Providers[apache.beam,apache.cassandra,"
-                            
"apache.kafka,cncf.kubernetes,common.compat,common.sql,databricks,facebook,"
+                            
"apache.kafka,cncf.kubernetes,common.compat,common.messaging,common.sql,databricks,facebook,"
                             
"hashicorp,http,microsoft.azure,microsoft.mssql,mysql,"
                             
"openlineage,oracle,postgres,presto,salesforce,samba,sftp,ssh,trino] "
                             "Providers[google] "
@@ -2245,7 +2245,7 @@ def test_upgrade_to_newer_dependencies(
             ("providers/google/docs/some_file.rst",),
             {
                 "docs-list-as-string": "amazon apache.beam apache.cassandra 
apache.kafka "
-                "cncf.kubernetes common.compat common.sql databricks facebook 
google hashicorp http "
+                "cncf.kubernetes common.compat common.messaging common.sql 
databricks facebook google hashicorp http "
                 "microsoft.azure microsoft.mssql mysql openlineage oracle "
                 "postgres presto salesforce samba sftp ssh standard trino",
             },
diff --git a/providers/google/docs/index.rst b/providers/google/docs/index.rst
index 9140ddee136..f2069252b55 100644
--- a/providers/google/docs/index.rst
+++ b/providers/google/docs/index.rst
@@ -36,6 +36,7 @@
 
     Connection types <connections/index>
     Logging handlers <logging/index>
+    Message queues <message-queues/index>
     Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
     API Authentication backend <api-auth-backend/google-openid>
     Operators <operators/index>
@@ -206,6 +207,7 @@ Dependent package
 `apache-airflow-providers-apache-cassandra 
<https://airflow.apache.org/docs/apache-airflow-providers-apache-cassandra>`_  
``apache.cassandra``
 `apache-airflow-providers-cncf-kubernetes 
<https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes>`_    
``cncf.kubernetes``
 `apache-airflow-providers-common-compat 
<https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_      
  ``common.compat``
+`apache-airflow-providers-common-messaging 
<https://airflow.apache.org/docs/apache-airflow-providers-common-messaging>`_  
``common.messaging``
 `apache-airflow-providers-common-sql 
<https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_         
     ``common.sql``
 `apache-airflow-providers-facebook 
<https://airflow.apache.org/docs/apache-airflow-providers-facebook>`_           
       ``facebook``
 `apache-airflow-providers-http 
<https://airflow.apache.org/docs/apache-airflow-providers-http>`_               
           ``http``
diff --git a/providers/google/docs/message-queues/index.rst 
b/providers/google/docs/message-queues/index.rst
new file mode 100644
index 00000000000..93f4c1a5e65
--- /dev/null
+++ b/providers/google/docs/message-queues/index.rst
@@ -0,0 +1,70 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+Google Cloud Messaging Queues
+==============================
+
+.. contents::
+   :local:
+   :depth: 2
+
+Google Cloud Pub/Sub Queue Provider
+------------------------------------
+
+Implemented by 
:class:`~airflow.providers.google.event_scheduling.events.pubsub.PubSubMessageQueueEventTriggerContainer`
+
+The Google Cloud Pub/Sub Queue Provider is a message queue provider that uses 
Google Cloud Pub/Sub as the underlying message queue system.
+
+It allows you to send and receive messages using Cloud Pub/Sub in your Airflow 
workflows
+with 
:class:`~airflow.providers.common.messaging.triggers.msg_queue.MessageQueueTrigger`
 common message queue interface.
+
+.. include:: /../src/airflow/providers/google/event_scheduling/events/pubsub.py
+    :start-after: [START pubsub_message_queue_provider_description]
+    :end-before: [END pubsub_message_queue_provider_description]
+
+Pub/Sub Message Queue Trigger
+-----------------------------
+
+Implemented by 
:class:`~airflow.providers.google.cloud.triggers.pubsub.PubsubPullTrigger`
+
+Inherited from 
:class:`~airflow.providers.common.messaging.triggers.msg_queue.MessageQueueTrigger`
+
+
+Wait for a message in a queue
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Below is an example of how you can configure an Airflow DAG to be triggered by 
a message in Pub/Sub.
+
+.. exampleinclude:: 
/../tests/system/google/event_scheduling/example_event_schedule_pubsub.py
+    :language: python
+    :start-after: [START howto_trigger_pubsub_message_queue]
+    :end-before: [END howto_trigger_pubsub_message_queue]
+
+How it works
+------------
+
+1. **Pub/Sub Message Queue Trigger**: The ``PubsubPullTrigger`` listens for 
messages from a Google Cloud Pub/Sub subscription.
+
+2. **Asset and Watcher**: The ``Asset`` abstracts the external entity, the 
Pub/Sub subscription in this example.
+   The ``AssetWatcher`` associate a trigger with a name. This name helps you 
identify which trigger is associated to which
+   asset.
+
+3. **Event-Driven DAG**: Instead of running on a fixed schedule, the DAG 
executes when the asset receives an update
+   (e.g., a new message in the queue).
+
+For how to use the trigger, refer to the documentation of the
+:ref:`Messaging Trigger <howto/trigger:MessageQueueTrigger>`
diff --git a/providers/google/provider.yaml b/providers/google/provider.yaml
index 357ea767591..389c679975a 100644
--- a/providers/google/provider.yaml
+++ b/providers/google/provider.yaml
@@ -1260,3 +1260,6 @@ auth-backends:
 logging:
   - airflow.providers.google.cloud.log.gcs_task_handler.GCSTaskHandler
   - 
airflow.providers.google.cloud.log.stackdriver_task_handler.StackdriverTaskHandler
+
+queues:
+  - 
airflow.providers.google.event_scheduling.events.pubsub.PubSubMessageQueueEventTriggerContainer
diff --git a/providers/google/pyproject.toml b/providers/google/pyproject.toml
index e80d279c3cd..7bd2fd60552 100644
--- a/providers/google/pyproject.toml
+++ b/providers/google/pyproject.toml
@@ -207,6 +207,9 @@ dependencies = [
 "standard" = [
     "apache-airflow-providers-standard"
 ]
+"common.messaging" = [
+    "apache-airflow-providers-common-messaging>=2.0.0"
+]
 
 [dependency-groups]
 dev = [
@@ -218,6 +221,7 @@ dev = [
     "apache-airflow-providers-apache-cassandra",
     "apache-airflow-providers-cncf-kubernetes",
     "apache-airflow-providers-common-compat",
+    "apache-airflow-providers-common-messaging",
     "apache-airflow-providers-common-sql",
     "apache-airflow-providers-facebook",
     "apache-airflow-providers-http",
diff --git 
a/providers/google/src/airflow/providers/google/cloud/triggers/pubsub.py 
b/providers/google/src/airflow/providers/google/cloud/triggers/pubsub.py
index c09571221b2..22314838c46 100644
--- a/providers/google/src/airflow/providers/google/cloud/triggers/pubsub.py
+++ b/providers/google/src/airflow/providers/google/cloud/triggers/pubsub.py
@@ -26,10 +26,16 @@ from typing import Any
 from google.cloud.pubsub_v1.types import ReceivedMessage
 
 from airflow.providers.google.cloud.hooks.pubsub import PubSubAsyncHook
-from airflow.triggers.base import BaseTrigger, TriggerEvent
+from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
+from airflow.triggers.base import TriggerEvent
 
+if AIRFLOW_V_3_0_PLUS:
+    from airflow.triggers.base import BaseEventTrigger
+else:
+    from airflow.triggers.base import BaseTrigger as BaseEventTrigger  # type: 
ignore
 
-class PubsubPullTrigger(BaseTrigger):
+
+class PubsubPullTrigger(BaseEventTrigger):
     """
     Initialize the Pubsub Pull Trigger with needed parameters.
 
diff --git 
a/providers/google/src/airflow/providers/google/event_scheduling/__init__.py 
b/providers/google/src/airflow/providers/google/event_scheduling/__init__.py
new file mode 100644
index 00000000000..13a83393a91
--- /dev/null
+++ b/providers/google/src/airflow/providers/google/event_scheduling/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git 
a/providers/google/src/airflow/providers/google/event_scheduling/events/__init__.py
 
b/providers/google/src/airflow/providers/google/event_scheduling/events/__init__.py
new file mode 100644
index 00000000000..13a83393a91
--- /dev/null
+++ 
b/providers/google/src/airflow/providers/google/event_scheduling/events/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git 
a/providers/google/src/airflow/providers/google/event_scheduling/events/pubsub.py
 
b/providers/google/src/airflow/providers/google/event_scheduling/events/pubsub.py
new file mode 100644
index 00000000000..1d1e3612181
--- /dev/null
+++ 
b/providers/google/src/airflow/providers/google/event_scheduling/events/pubsub.py
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from airflow.providers.common.messaging.providers.base_provider import 
BaseMessageQueueProvider
+from airflow.providers.google.cloud.triggers.pubsub import PubsubPullTrigger
+
+if TYPE_CHECKING:
+    from airflow.triggers.base import BaseEventTrigger
+
+
+class PubSubMessageQueueEventTriggerContainer(BaseMessageQueueProvider):
+    """
+    Configuration for PubSub integration with common-messaging.
+
+    [START pubsub_message_queue_provider_description]
+    * It uses ``google+pubsub`` as the scheme for identifying the provider.
+    * For parameter definitions, take a look at 
:class:`~airflow.providers.google.cloud.triggers.pubsub.PubsubPullTrigger`.
+
+    .. code-block:: python
+
+        from airflow.providers.common.messaging.triggers.msg_queue import 
MessageQueueTrigger
+        from airflow.sdk import Asset, AssetWatcher
+
+        trigger = MessageQueueTrigger(
+            scheme="google+pubsub",
+            # Additional PubsubPullTrigger parameters as needed
+            project_id="my_project",
+            subscription="my_subscription",
+            ack_messages=True,
+            max_messages=1,
+            gcp_conn_id="google_cloud_default",
+            poke_interval=60.0,
+        )
+
+        asset = Asset("pubsub_queue_asset", 
watchers=[AssetWatcher(name="pubsub_watcher", trigger=trigger)])
+
+    [END pubsub_message_queue_provider_description]
+
+    """
+
+    scheme = "google+pubsub"
+
+    def trigger_class(self) -> type[BaseEventTrigger]:
+        return PubsubPullTrigger
diff --git a/providers/google/src/airflow/providers/google/get_provider_info.py 
b/providers/google/src/airflow/providers/google/get_provider_info.py
index 12d473d44a2..97b4c162fe0 100644
--- a/providers/google/src/airflow/providers/google/get_provider_info.py
+++ b/providers/google/src/airflow/providers/google/get_provider_info.py
@@ -1516,4 +1516,7 @@ def get_provider_info():
             
"airflow.providers.google.cloud.log.gcs_task_handler.GCSTaskHandler",
             
"airflow.providers.google.cloud.log.stackdriver_task_handler.StackdriverTaskHandler",
         ],
+        "queues": [
+            
"airflow.providers.google.event_scheduling.events.pubsub.PubSubMessageQueueEventTriggerContainer"
+        ],
     }
diff --git a/providers/google/tests/system/google/event_scheduling/__init__.py 
b/providers/google/tests/system/google/event_scheduling/__init__.py
new file mode 100644
index 00000000000..13a83393a91
--- /dev/null
+++ b/providers/google/tests/system/google/event_scheduling/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git 
a/providers/google/tests/system/google/event_scheduling/example_event_schedule_pubsub.py
 
b/providers/google/tests/system/google/event_scheduling/example_event_schedule_pubsub.py
new file mode 100644
index 00000000000..9ec99997f6a
--- /dev/null
+++ 
b/providers/google/tests/system/google/event_scheduling/example_event_schedule_pubsub.py
@@ -0,0 +1,96 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Example Airflow DAG that demonstrates using Google Cloud Pub/Sub with 
MessageQueueTrigger
+and Asset Watchers for event-driven workflows.
+
+This example shows how to create a DAG that triggers when messages arrive in a
+Google Cloud Pub/Sub subscription using Asset Watchers.
+
+Prerequisites
+-------------
+
+Before running this example, ensure you have:
+
+1. A GCP project with Pub/Sub API enabled
+2. The following Pub/Sub resources created in your project:
+
+   - Topic: ``test-topic``
+   - Subscription: ``test-subscription``
+
+You can create these resources using:
+
+.. code-block:: bash
+
+    # Create topic
+    gcloud pubsub topics create test-topic --project={PROJECT_ID}
+
+    # Create subscription
+    gcloud pubsub subscriptions create test-subscription \\
+        --topic=test-topic --project={PROJECT_ID}
+
+How to test
+-----------
+
+1. Ensure the Pub/Sub resources exist (see Prerequisites above)
+2. Publish a message to trigger the DAG:
+
+   .. code-block:: bash
+
+       gcloud pubsub topics publish test-topic \\
+           --message="Test message" --project={PROJECT_ID}
+
+3. The DAG will be triggered automatically when the message arrives
+"""
+
+from __future__ import annotations
+
+# [START howto_trigger_pubsub_message_queue]
+from airflow.providers.common.messaging.triggers.msg_queue import 
MessageQueueTrigger
+from airflow.providers.standard.operators.empty import EmptyOperator
+from airflow.sdk import DAG, Asset, AssetWatcher
+
+# Define a trigger that listens to a Google Cloud Pub/Sub subscription
+trigger = MessageQueueTrigger(
+    scheme="google+pubsub",
+    project_id="my-project",
+    subscription="test-subscription",
+    ack_messages=True,
+    max_messages=1,
+    gcp_conn_id="google_cloud_default",
+    poke_interval=60.0,
+)
+
+# Define an asset that watches for messages on the Pub/Sub subscription
+asset = Asset(
+    "event_schedule_pubsub_asset_1",
+    watchers=[AssetWatcher(name="event_schedule_pubsub_watcher_1", 
trigger=trigger)],
+)
+
+with DAG(
+    dag_id="example_event_schedule_pubsub",
+    schedule=[asset],
+) as dag:
+    process_message_task = EmptyOperator(task_id="process_pubsub_message")
+# [END howto_trigger_pubsub_message_queue]
+
+
+from tests_common.test_utils.system_tests import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: 
tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)
diff --git a/providers/google/tests/unit/google/event_scheduling/__init__.py 
b/providers/google/tests/unit/google/event_scheduling/__init__.py
new file mode 100644
index 00000000000..13a83393a91
--- /dev/null
+++ b/providers/google/tests/unit/google/event_scheduling/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git 
a/providers/google/tests/unit/google/event_scheduling/events/__init__.py 
b/providers/google/tests/unit/google/event_scheduling/events/__init__.py
new file mode 100644
index 00000000000..13a83393a91
--- /dev/null
+++ b/providers/google/tests/unit/google/event_scheduling/events/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git 
a/providers/google/tests/unit/google/event_scheduling/events/test_pubsub.py 
b/providers/google/tests/unit/google/event_scheduling/events/test_pubsub.py
new file mode 100644
index 00000000000..ebd59b609ca
--- /dev/null
+++ b/providers/google/tests/unit/google/event_scheduling/events/test_pubsub.py
@@ -0,0 +1,51 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import pytest
+
+from airflow.providers.google.cloud.triggers.pubsub import PubsubPullTrigger
+
+pytest.importorskip("airflow.providers.common.messaging.providers.base_provider")
+
+
+def test_message_pubsub_queue_create():
+    from airflow.providers.common.messaging.providers.base_provider import 
BaseMessageQueueProvider
+    from airflow.providers.google.event_scheduling.events.pubsub import (
+        PubSubMessageQueueEventTriggerContainer,
+    )
+
+    provider = PubSubMessageQueueEventTriggerContainer()
+    assert isinstance(provider, BaseMessageQueueProvider)
+
+
+def test_message_pubsub_queue_trigger_class():
+    from airflow.providers.google.event_scheduling.events.pubsub import (
+        PubSubMessageQueueEventTriggerContainer,
+    )
+
+    provider = PubSubMessageQueueEventTriggerContainer()
+    assert provider.trigger_class() == PubsubPullTrigger
+
+
+def test_scheme_matches():
+    from airflow.providers.google.event_scheduling.events.pubsub import (
+        PubSubMessageQueueEventTriggerContainer,
+    )
+
+    provider = PubSubMessageQueueEventTriggerContainer()
+    assert provider.scheme_matches("google+pubsub")

Reply via email to