harishkesavarao commented on code in PR #30477:
URL: https://github.com/apache/airflow/pull/30477#discussion_r1158941469


##########
airflow/providers/databricks/sensors/databricks_sql.py:
##########
@@ -0,0 +1,134 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+"""This module contains Databricks sensors."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Callable, Iterable, Sequence
+
+from airflow.compat.functools import cached_property
+from airflow.exceptions import AirflowException
+from airflow.providers.common.sql.hooks.sql import fetch_all_handler
+from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
+from airflow.sensors.base import BaseSensorOperator
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+class DatabricksSqlSensor(BaseSensorOperator):
+    """
+    Sensor that runs a SQL query on Databricks.
+
+    :param databricks_conn_id: Reference to :ref:`Databricks
+        connection id<howto/connection:databricks>` (templated), defaults to
+        DatabricksSqlHook.default_conn_name.
+    :param sql_warehouse_name: Optional name of Databricks SQL warehouse. If 
not specified, ``http_path``
+        must be provided as described below, defaults to None
+    :param http_path: Optional string specifying HTTP path of Databricks SQL 
warehouse or All Purpose cluster.
+        If not specified, it should be either specified in the Databricks 
connection's
+        extra parameters, or ``sql_warehouse_name`` must be specified.
+    :param session_configuration: An optional dictionary of Spark session 
parameters. If not specified,
+        it could be specified in the Databricks connection's extra parameters, 
defaults to None
+    :param http_headers: An optional list of (k, v) pairs
+        that will be set as HTTP headers on every request. (templated).
+    :param catalog: An optional initial catalog to use.
+        Requires Databricks Runtime version 9.0+ (templated), defaults to ""
+    :param schema: An optional initial schema to use.
+        Requires Databricks Runtime version 9.0+ (templated), defaults to 
"default"
+    :param sql: SQL statement to be executed.
+    :param handler: Handler for DbApiHook.run() to return results, defaults to 
fetch_all_handler
+    :param client_parameters: Additional parameters internal to Databricks SQL 
connector parameters.
+    """
+
+    template_fields: Sequence[str] = (
+        "databricks_conn_id",
+        "sql",
+        "catalog",
+        "schema",
+        "http_headers",
+    )
+
+    template_ext: Sequence[str] = (".sql",)
+    template_fields_renderers = {"sql": "sql"}
+
+    def __init__(
+        self,
+        *,
+        databricks_conn_id: str = DatabricksSqlHook.default_conn_name,
+        http_path: str | None = None,
+        sql_warehouse_name: str | None = None,
+        session_configuration=None,
+        http_headers: list[tuple[str, str]] | None = None,
+        catalog: str = "",
+        schema: str = "default",
+        sql: str | Iterable[str],
+        handler: Callable[[Any], Any] = fetch_all_handler,
+        client_parameters: dict[str, Any] | None = None,
+        **kwargs,
+    ) -> None:
+        """Creates DatabricksSqlSensor object using the specified input 
arguments."""
+        self.databricks_conn_id = databricks_conn_id
+        self._http_path = http_path
+        self._sql_warehouse_name = sql_warehouse_name
+        self.session_config = session_configuration
+        self.http_headers = http_headers
+        self.catalog = catalog
+        self.schema = schema
+        self.sql = sql
+        self.caller = "DatabricksSqlSensor"
+        self.client_parameters = client_parameters or {}
+        self.hook_params = kwargs.pop("hook_params", {})
+        self.handler = handler
+        super().__init__(**kwargs)
+
+    @cached_property
+    def hook(self) -> DatabricksSqlHook:
+        """Creates and returns a DatabricksSqlHook object."""
+        return DatabricksSqlHook(
+            self.databricks_conn_id,
+            self._http_path,
+            self._sql_warehouse_name,
+            self.session_config,
+            self.http_headers,
+            self.catalog,
+            self.schema,
+            self.caller,
+            **self.client_parameters,
+            **self.hook_params,
+        )
+
+    def _get_results(self) -> bool:
+        """Uses the Databricks SQL hook and runs the specified SQL query."""
+        if not (self._http_path or self._sql_warehouse_name):
+            raise AirflowException(
+                "Databricks SQL warehouse/cluster configuration missing. 
Please specify either http_path or "
+                "sql_warehouse_name."
+            )
+        hook = self.hook
+        sql_result = hook.run(

Review Comment:
   > By design, a sensor should only wait for something. My concern here is we 
are running a query which will take the worker slot for the entire task 
execution time so there can be a problem when we run it in `reschedule` mode.
   
   @pankajastro You are right about the sensor waiting for something, but this 
sensor doesn't wait until it receives results, it immediately returns either a 
True or False based on whether the query returns any results at all. This is 
useful if you only want to know whether there are results but not want to know 
what they are, and take action based on it.
   



##########
airflow/providers/databricks/sensors/databricks_sql.py:
##########
@@ -0,0 +1,134 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+"""This module contains Databricks sensors."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Callable, Iterable, Sequence
+
+from airflow.compat.functools import cached_property
+from airflow.exceptions import AirflowException
+from airflow.providers.common.sql.hooks.sql import fetch_all_handler
+from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
+from airflow.sensors.base import BaseSensorOperator
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+class DatabricksSqlSensor(BaseSensorOperator):
+    """
+    Sensor that runs a SQL query on Databricks.
+
+    :param databricks_conn_id: Reference to :ref:`Databricks
+        connection id<howto/connection:databricks>` (templated), defaults to
+        DatabricksSqlHook.default_conn_name.
+    :param sql_warehouse_name: Optional name of Databricks SQL warehouse. If 
not specified, ``http_path``
+        must be provided as described below, defaults to None
+    :param http_path: Optional string specifying HTTP path of Databricks SQL 
warehouse or All Purpose cluster.
+        If not specified, it should be either specified in the Databricks 
connection's
+        extra parameters, or ``sql_warehouse_name`` must be specified.
+    :param session_configuration: An optional dictionary of Spark session 
parameters. If not specified,
+        it could be specified in the Databricks connection's extra parameters, 
defaults to None
+    :param http_headers: An optional list of (k, v) pairs
+        that will be set as HTTP headers on every request. (templated).
+    :param catalog: An optional initial catalog to use.
+        Requires Databricks Runtime version 9.0+ (templated), defaults to ""
+    :param schema: An optional initial schema to use.
+        Requires Databricks Runtime version 9.0+ (templated), defaults to 
"default"
+    :param sql: SQL statement to be executed.
+    :param handler: Handler for DbApiHook.run() to return results, defaults to 
fetch_all_handler
+    :param client_parameters: Additional parameters internal to Databricks SQL 
connector parameters.
+    """
+
+    template_fields: Sequence[str] = (
+        "databricks_conn_id",
+        "sql",
+        "catalog",
+        "schema",
+        "http_headers",
+    )
+
+    template_ext: Sequence[str] = (".sql",)
+    template_fields_renderers = {"sql": "sql"}
+
+    def __init__(
+        self,
+        *,
+        databricks_conn_id: str = DatabricksSqlHook.default_conn_name,
+        http_path: str | None = None,
+        sql_warehouse_name: str | None = None,
+        session_configuration=None,
+        http_headers: list[tuple[str, str]] | None = None,
+        catalog: str = "",
+        schema: str = "default",
+        sql: str | Iterable[str],
+        handler: Callable[[Any], Any] = fetch_all_handler,
+        client_parameters: dict[str, Any] | None = None,
+        **kwargs,
+    ) -> None:
+        """Creates DatabricksSqlSensor object using the specified input 
arguments."""
+        self.databricks_conn_id = databricks_conn_id
+        self._http_path = http_path
+        self._sql_warehouse_name = sql_warehouse_name
+        self.session_config = session_configuration
+        self.http_headers = http_headers
+        self.catalog = catalog
+        self.schema = schema
+        self.sql = sql
+        self.caller = "DatabricksSqlSensor"
+        self.client_parameters = client_parameters or {}
+        self.hook_params = kwargs.pop("hook_params", {})
+        self.handler = handler
+        super().__init__(**kwargs)
+
+    @cached_property
+    def hook(self) -> DatabricksSqlHook:
+        """Creates and returns a DatabricksSqlHook object."""
+        return DatabricksSqlHook(
+            self.databricks_conn_id,
+            self._http_path,
+            self._sql_warehouse_name,
+            self.session_config,
+            self.http_headers,
+            self.catalog,
+            self.schema,
+            self.caller,
+            **self.client_parameters,
+            **self.hook_params,
+        )
+
+    def _get_results(self) -> bool:
+        """Uses the Databricks SQL hook and runs the specified SQL query."""
+        if not (self._http_path or self._sql_warehouse_name):
+            raise AirflowException(
+                "Databricks SQL warehouse/cluster configuration missing. 
Please specify either http_path or "
+                "sql_warehouse_name."
+            )
+        hook = self.hook
+        sql_result = hook.run(

Review Comment:
   You are right about the sensor waiting for something, but this sensor 
doesn't wait until it receives results, it immediately returns either a `True` 
or `False` based on whether the query returns any results at all. This is 
useful if you only want to know whether there are results but not want to know 
what they are, and take action based on it.
   
   In the context of Databricks, the SQL operator is not any different from the 
sensor, but it can be used as an upstream dependency to check whether a query 
result contains data or not and then do something else with a downstream task. 
This check which returns True or False, while the operator just executes the 
query and does not return anything. The results are just logged in Airflow.
   



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to