o-nikolas commented on code in PR #30204: URL: https://github.com/apache/airflow/pull/30204#discussion_r1142902837
########## airflow/providers/databricks/sensors/sql.py: ########## @@ -0,0 +1,126 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +"""This module contains Databricks sensors.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Callable, Sequence + +from airflow.providers.common.sql.hooks.sql import fetch_all_handler +from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook +from airflow.sensors.base import BaseSensorOperator + +if TYPE_CHECKING: + from airflow.utils.context import Context + + +class DatabricksSqlSensor(BaseSensorOperator): + """ + Sensor to execute SQL statements on a Delta table via Databricks. + + :param databricks_conn_id: Reference to :ref:`Databricks + connection id<howto/connection:databricks>` (templated), defaults to + DatabricksSqlHook.default_conn_name + :param http_path: Optional string specifying HTTP path of Databricks SQL Endpoint or cluster. + If not specified, it should be either specified in the Databricks connection's + extra parameters, or ``sql_endpoint_name`` must be specified. + :param sql_endpoint_name: Optional name of Databricks SQL Endpoint. If not specified, ``http_path`` + must be provided as described above, defaults to None + :param session_configuration: An optional dictionary of Spark session parameters. If not specified, + it could be specified in the Databricks connection's extra parameters., defaults to None + :param http_headers: An optional list of (k, v) pairs + that will be set as HTTP headers on every request. (templated). + :param catalog: An optional initial catalog to use. + Requires DBR version 9.0+ (templated), defaults to "" Review Comment: It seems to default to `"hive_metastore"` not `""` ########## airflow/providers/databricks/sensors/sql.py: ########## @@ -0,0 +1,126 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +"""This module contains Databricks sensors.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Callable, Sequence + +from airflow.providers.common.sql.hooks.sql import fetch_all_handler +from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook +from airflow.sensors.base import BaseSensorOperator + +if TYPE_CHECKING: + from airflow.utils.context import Context + + +class DatabricksSqlSensor(BaseSensorOperator): + """ + Sensor to execute SQL statements on a Delta table via Databricks. + + :param databricks_conn_id: Reference to :ref:`Databricks + connection id<howto/connection:databricks>` (templated), defaults to + DatabricksSqlHook.default_conn_name + :param http_path: Optional string specifying HTTP path of Databricks SQL Endpoint or cluster. + If not specified, it should be either specified in the Databricks connection's + extra parameters, or ``sql_endpoint_name`` must be specified. + :param sql_endpoint_name: Optional name of Databricks SQL Endpoint. If not specified, ``http_path`` + must be provided as described above, defaults to None + :param session_configuration: An optional dictionary of Spark session parameters. If not specified, + it could be specified in the Databricks connection's extra parameters., defaults to None + :param http_headers: An optional list of (k, v) pairs + that will be set as HTTP headers on every request. (templated). + :param catalog: An optional initial catalog to use. + Requires DBR version 9.0+ (templated), defaults to "" + :param schema: An optional initial schema to use. + Requires DBR version 9.0+ (templated), defaults to "default" + :param sql: SQL statement to be executed. + :param handler: Handler for DbApiHook.run() to return results, defaults to fetch_all_handler + :param client_parameters: Additional parameters internal to Databricks SQL Connector parameters. + """ + + template_fields: Sequence[str] = ( + "databricks_conn_id", + "sql", + "catalog", + "schema", + "http_headers", + ) + + template_ext: Sequence[str] = (".sql",) + template_fields_renderers = {"sql": "sql"} + + def __init__( + self, + *, + databricks_conn_id: str = DatabricksSqlHook.default_conn_name, + http_path: str | None = None, + sql_endpoint_name: str | None = None, + session_configuration=None, + http_headers: list[tuple[str, str]] | None = None, + catalog: str = "hive_metastore", + schema: str = "default", + sql: str | None = None, + handler: Callable[[Any], Any] = fetch_all_handler, + client_parameters: dict[str, Any] | None = None, + **kwargs, + ) -> None: + self.databricks_conn_id = databricks_conn_id + self._http_path = http_path + self._sql_endpoint_name = sql_endpoint_name + self.session_config = session_configuration + self.http_headers = http_headers + self.catalog = catalog + self.schema = schema + self.sql = sql + self.caller = "DatabricksSqlSensor" + self.client_parameters = client_parameters or {} + self.hook_params = kwargs.pop("hook_params", {}) + self.handler = handler + super().__init__(**kwargs) + + def _get_hook(self) -> DatabricksSqlHook: Review Comment: You can make this a cached property ########## tests/system/providers/databricks/example_databricks_sensor.py: ########## @@ -0,0 +1,71 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +This is an example DAG which uses the DatabricksSqlSensor. +The task checks for a generic SQL statement against a Delta table, +and if a result is returned, the task succeeds, else it times out. +""" +from __future__ import annotations + +import os +from datetime import datetime + +from airflow import DAG +from airflow.providers.databricks.sensors.sql import DatabricksSqlSensor + +# [docs] +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +# [docs] +DAG_ID = "example_databricks_sensor" + +with DAG( + dag_id=DAG_ID, + schedule="@daily", + start_date=datetime(2021, 1, 1), + tags=["example"], + catchup=False, +) as dag: + # [docs] + connection_id = "databricks_default" + sql_endpoint_name = "Starter Warehouse" + + # [START howto_sensor_databricks_sql] Review Comment: Can you add a doc file (which would use this tag)? ########## airflow/providers/databricks/sensors/sql.py: ########## @@ -0,0 +1,126 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +"""This module contains Databricks sensors.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Callable, Sequence + +from airflow.providers.common.sql.hooks.sql import fetch_all_handler +from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook +from airflow.sensors.base import BaseSensorOperator + +if TYPE_CHECKING: + from airflow.utils.context import Context + + +class DatabricksSqlSensor(BaseSensorOperator): + """ + Sensor to execute SQL statements on a Delta table via Databricks. + + :param databricks_conn_id: Reference to :ref:`Databricks + connection id<howto/connection:databricks>` (templated), defaults to + DatabricksSqlHook.default_conn_name + :param http_path: Optional string specifying HTTP path of Databricks SQL Endpoint or cluster. + If not specified, it should be either specified in the Databricks connection's + extra parameters, or ``sql_endpoint_name`` must be specified. + :param sql_endpoint_name: Optional name of Databricks SQL Endpoint. If not specified, ``http_path`` + must be provided as described above, defaults to None + :param session_configuration: An optional dictionary of Spark session parameters. If not specified, + it could be specified in the Databricks connection's extra parameters., defaults to None + :param http_headers: An optional list of (k, v) pairs + that will be set as HTTP headers on every request. (templated). + :param catalog: An optional initial catalog to use. + Requires DBR version 9.0+ (templated), defaults to "" + :param schema: An optional initial schema to use. + Requires DBR version 9.0+ (templated), defaults to "default" + :param sql: SQL statement to be executed. + :param handler: Handler for DbApiHook.run() to return results, defaults to fetch_all_handler + :param client_parameters: Additional parameters internal to Databricks SQL Connector parameters. + """ + + template_fields: Sequence[str] = ( + "databricks_conn_id", + "sql", + "catalog", + "schema", + "http_headers", + ) + + template_ext: Sequence[str] = (".sql",) + template_fields_renderers = {"sql": "sql"} + + def __init__( + self, + *, + databricks_conn_id: str = DatabricksSqlHook.default_conn_name, + http_path: str | None = None, + sql_endpoint_name: str | None = None, + session_configuration=None, + http_headers: list[tuple[str, str]] | None = None, + catalog: str = "hive_metastore", + schema: str = "default", + sql: str | None = None, + handler: Callable[[Any], Any] = fetch_all_handler, + client_parameters: dict[str, Any] | None = None, + **kwargs, + ) -> None: + self.databricks_conn_id = databricks_conn_id + self._http_path = http_path + self._sql_endpoint_name = sql_endpoint_name + self.session_config = session_configuration + self.http_headers = http_headers + self.catalog = catalog + self.schema = schema + self.sql = sql + self.caller = "DatabricksSqlSensor" + self.client_parameters = client_parameters or {} + self.hook_params = kwargs.pop("hook_params", {}) + self.handler = handler + super().__init__(**kwargs) + + def _get_hook(self) -> DatabricksSqlHook: + return DatabricksSqlHook( + self.databricks_conn_id, + self._http_path, + self._sql_endpoint_name, + self.session_config, + self.http_headers, + self.catalog, + self.schema, + self.caller, + **self.client_parameters, + **self.hook_params, + ) + + def _sql_sensor(self, sql): + hook = self._get_hook() + sql_result = hook.run( + sql, + handler=self.handler if self.do_xcom_push else None, + ) + return sql_result + + def _get_results(self) -> bool: + result = self._sql_sensor(self.sql) + self.log.debug("SQL result: %s", result) + return bool(result) Review Comment: Nit: I don't think these really need to be separated into different methods. Or is this for future abstraction? E.g.: ```suggestion def _get_results(self) -> bool: hook = self._get_hook() sql_result = hook.run( self.sql, handler=self.handler if self.do_xcom_push else None, ) self.log.debug("SQL result: %s", sql_result) return bool(sql_result) ``` ########## tests/system/providers/databricks/example_databricks_sensor.py: ########## @@ -0,0 +1,71 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +This is an example DAG which uses the DatabricksSqlSensor. +The task checks for a generic SQL statement against a Delta table, +and if a result is returned, the task succeeds, else it times out. +""" +from __future__ import annotations + +import os +from datetime import datetime + +from airflow import DAG +from airflow.providers.databricks.sensors.sql import DatabricksSqlSensor + +# [docs] Review Comment: What are these tags for? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
