This is an automated email from the ASF dual-hosted git repository.
eladkal pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 454afb5a4a0 Improve example docs around SQLExecuteQueryOperator in
Postgres/Oracle/Presto/Vertica/ODBC (#46352)
454afb5a4a0 is described below
commit 454afb5a4a0c2b092f9921e639b7ee56e37b2f00
Author: Aaron Chen <[email protected]>
AuthorDate: Sun Feb 2 20:31:18 2025 -0800
Improve example docs around SQLExecuteQueryOperator in
Postgres/Oracle/Presto/Vertica/ODBC (#46352)
* add oracle docs & dag example
---
providers/odbc/docs/index.rst | 1 +
providers/odbc/docs/operators.rst | 95 +++++++++++++++++++++
providers/odbc/tests/system/odbc/example_odbc.py | 96 ++++++++++++++++++++++
providers/oracle/docs/index.rst | 2 +-
providers/oracle/docs/operators.rst | 74 +++++++++++++++++
providers/oracle/docs/operators/index.rst | 64 ---------------
providers/oracle/tests/system/oracle/__init__.py | 16 ++++
.../oracle/tests/system/oracle/example_oracle.py | 93 +++++++++++++++++++++
providers/postgres/docs/index.rst | 3 +-
...gres_operator_howto_guide.rst => operators.rst} | 0
.../{operators/transfer => }/gcs_to_presto.rst | 0
providers/presto/docs/index.rst | 3 +-
providers/presto/docs/operators.rst | 72 ++++++++++++++++
.../presto/tests/system/presto/example_presto.py | 66 +++++++++++++++
providers/vertica/docs/index.rst | 1 +
providers/vertica/docs/operators.rst | 76 +++++++++++++++++
.../tests/system/vertica/example_vertica.py | 91 ++++++++++++++++++++
17 files changed, 685 insertions(+), 68 deletions(-)
diff --git a/providers/odbc/docs/index.rst b/providers/odbc/docs/index.rst
index 114d9561880..0020637e236 100644
--- a/providers/odbc/docs/index.rst
+++ b/providers/odbc/docs/index.rst
@@ -35,6 +35,7 @@
:caption: Guides
Connection types <connections/odbc>
+ Operators <operators>
.. toctree::
:hidden:
diff --git a/providers/odbc/docs/operators.rst
b/providers/odbc/docs/operators.rst
new file mode 100644
index 00000000000..877c69ad016
--- /dev/null
+++ b/providers/odbc/docs/operators.rst
@@ -0,0 +1,95 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+.. _howto/operator:OdbcOperator:
+
+OdbcOperator
+============
+
+Open Database Connectivity (ODBC) is a standard API for accessing database
+management systems (DBMS).
+
+
+Prerequisite Tasks
+^^^^^^^^^^^^^^^^^^
+
+To use this operator you need:
+
+ * Install the python module ``pyodbc``:
+ .. code-block:: bash
+
+ pip install apache-airflow[odbc]
+
+ * Have the ODBC driver for your database installed.
+ * Configure an ODBC Data Source Name (DSN) if required by your database.
+
+Once these prerequisites are satisfied you should be able to run
+this Python snippet (replacing the variables values with the ones
+related to your driver).
+
+Other error messages will inform you in case the ``pyodbc`` module
+is missing or the driver is not available. A ``Connection Refused``
+error means that the connection string is pointing to a host where no
+database is listening for new connections.
+
+ .. code-block:: python
+
+ import pyodbc
+
+ driver = "{ODBC Driver 17 for SQL Server}"
+ server = "localhost"
+ database = "testdb"
+ username = "user"
+ password = "password"
+
+ conn_str = (
+ f"DRIVER={driver};" f"SERVER={server};" f"DATABASE={database};"
f"UID={username};" f"PWD={password};"
+ )
+
+ conn = pyodbc.connect(conn_str)
+
+Usage
+^^^^^
+Use the
:class:`~airflow.providers.common.sql.operators.SQLExecuteQueryOperator` to
execute
+commands against a database (or data storage) accessible via an ODBC driver.
+
+The :doc:`ODBC Connection <connections/odbc>` must be passed as
+``conn_id``.
+
+.. exampleinclude:: /../../providers/odbc/tests/system/odbc/example_odbc.py
+ :language: python
+ :start-after: [START howto_operator_odbc]
+ :end-before: [END howto_operator_odbc]
+
+
+The parameter ``sql`` can receive a string or a list of strings.
+Each string can be an SQL statement or a reference to a template file.
+Template references are recognized by ending in '.sql'.
+
+The parameter ``autocommit`` if set to ``True`` will execute a commit after
+each command (default is ``False``).
+
+Templating
+----------
+
+You can use :ref:`Jinja templates <concepts:jinja-templating>` to parameterize
+``sql``.
+
+.. exampleinclude:: /../../providers/odbc/tests/system/odbc/example_odbc.py
+ :language: python
+ :start-after: [START howto_operator_odbc_template]
+ :end-before: [END howto_operator_odbc_template]
diff --git a/providers/odbc/tests/system/odbc/example_odbc.py
b/providers/odbc/tests/system/odbc/example_odbc.py
new file mode 100644
index 00000000000..cb86a2e7211
--- /dev/null
+++ b/providers/odbc/tests/system/odbc/example_odbc.py
@@ -0,0 +1,96 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example DAG demonstrating the usage of the SQLExecuteQueryOperator with
Postgres.
+"""
+
+from __future__ import annotations
+
+import os
+from datetime import datetime, timedelta
+
+from airflow import DAG
+from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_odbc_operator"
+
+with DAG(
+ dag_id=DAG_ID,
+ schedule="0 0 * * *",
+ start_date=datetime(2025, 1, 1),
+ dagrun_timeout=timedelta(minutes=60),
+ tags=["example", "odbc"],
+ catchup=False,
+) as dag:
+ # [START howto_operator_odbc]
+
+ create_table = SQLExecuteQueryOperator(
+ task_id="create_table",
+ sql="""
+ CREATE TABLE IF NOT EXISTS my_table (
+ dt VARCHAR(50),
+ value VARCHAR(255)
+ );
+ """,
+ conn_id="my_odbc_conn",
+ autocommit=True,
+ )
+
+ # [END howto_operator_odbc]
+
+ # [START howto_operator_odbc_template]
+
+ insert_data = SQLExecuteQueryOperator(
+ task_id="insert_data",
+ sql="""
+ INSERT INTO my_table (dt, value)
+ VALUES ('{{ ds }}', 'test_value');
+ """,
+ conn_id="my_odbc_conn",
+ autocommit=True,
+ )
+
+ # [END howto_operator_odbc_template]
+
+ delete_data = SQLExecuteQueryOperator(
+ task_id="delete_data",
+ sql="""
+ DELETE FROM my_table
+ WHERE dt = '{{ ds }}';
+ """,
+ conn_id="my_odbc_conn",
+ autocommit=True,
+ )
+
+ drop_table = SQLExecuteQueryOperator(
+ task_id="drop_table",
+ sql="DROP TABLE IF EXISTS my_table;",
+ conn_id="my_odbc_conn",
+ autocommit=True,
+ )
+
+ create_table >> insert_data >> delete_data >> drop_table
+
+ from tests_common.test_utils.watcher import watcher
+
+ list(dag.tasks) >> watcher()
+
+from tests_common.test_utils.system_tests import get_test_run # noqa: E402
+
+test_run = get_test_run(dag)
diff --git a/providers/oracle/docs/index.rst b/providers/oracle/docs/index.rst
index fa4e2380ce1..5ef251f5fef 100644
--- a/providers/oracle/docs/index.rst
+++ b/providers/oracle/docs/index.rst
@@ -35,7 +35,7 @@
:caption: Guides
Connection types <connections/oracle>
- Operators <operators/index>
+ Operators <operators>
.. toctree::
:hidden:
diff --git a/providers/oracle/docs/operators.rst
b/providers/oracle/docs/operators.rst
new file mode 100644
index 00000000000..a77f1c6eaeb
--- /dev/null
+++ b/providers/oracle/docs/operators.rst
@@ -0,0 +1,74 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+
+
+.. _howto/operator:OracleOperator:
+
+SQLExecuteQueryOperator to connect to Oracle
+============================================
+
+Use the
:class:`SQLExecuteQueryOperator<airflow.providers.common.sql.operators.sql>` to
execute
+Oracle commands in a `Oracle <https://docs.oracle.com/en/>`__ database.
+
+.. note::
+ Previously, ``OracleStoredProcedureOperator`` was used to perform this
kind of operation. After deprecation this has been removed. Please use
``SQLExecuteQueryOperator`` instead.
+
+Using the Operator
+^^^^^^^^^^^^^^^^^^
+
+Use the ``conn_id`` argument to connect to your Oracle instance where
+the connection metadata is structured as follows:
+
+.. list-table:: Oracle Airflow Connection Metadata
+ :widths: 25 25
+ :header-rows: 1
+
+ * - Parameter
+ - Input
+ * - Host: string
+ - Oracle database hostname
+ * - Schema: string
+ - Schema to execute SQL operations on by default
+ * - Login: string
+ - Oracle database user
+ * - Password: string
+ - Oracle database user password
+ * - Port: int
+ - Oracle database port (default: 1521)
+ * - Extra: JSON
+ - Additional connection configuration, such as DSN string:
+ ``{"dsn":
"(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=<hostname>)(PORT=1521))(CONNECT_DATA=(SERVICE_NAME=<service_name>)))"}``
+
+An example usage of the SQLExecuteQueryOperator to connect to Oracle is as
follows:
+
+.. exampleinclude::
/../../providers/oracle/tests/system/oracle/example_oracle.py
+ :language: python
+ :start-after: [START howto_operator_oracle]
+ :end-before: [END howto_operator_oracle]
+
+
+Reference
+^^^^^^^^^
+For further information, look at:
+
+* `Oracle Documentation <https://docs.oracle.com/en/>`__
+
+.. note::
+
+ Parameters given via SQLExecuteQueryOperator() are given first-place priority
+ relative to parameters set via Airflow connection metadata (such as
``schema``, ``login``, ``password`` etc).
diff --git a/providers/oracle/docs/operators/index.rst
b/providers/oracle/docs/operators/index.rst
deleted file mode 100644
index 32998380e86..00000000000
--- a/providers/oracle/docs/operators/index.rst
+++ /dev/null
@@ -1,64 +0,0 @@
- .. Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- .. http://www.apache.org/licenses/LICENSE-2.0
-
- .. Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied. See the License for the
- specific language governing permissions and limitations
- under the License.
-
-
-.. _howto/operators:oracle:
-
-Oracle Operators
-================
-The Oracle connection type provides connection to a Oracle database.
-
-Execute a Stored Procedure in an Oracle database
-------------------------------------------------
-
-To execute a Stored Procedure in an Oracle database, use the
-:class:`~airflow.providers.oracle.operators.oracle.OracleStoredProcedureOperator`.
-
-Assume a stored procedure exists in the database that looks like this:
-
- .. code-block:: sql
-
- CREATE OR REPLACE PROCEDURE
- TEST_PROCEDURE (val_in IN INT, val_out OUT INT) AS
- BEGIN
- val_out := val_in * 2;
- END;
- /
-
-This stored procedure accepts a single integer argument, val_in, and outputs
-a single integer argument, val_out. This can be represented with the following
-call using
:class:`~airflow.providers.oracle.operators.oracle.OracleStoredProcedureOperator`
-with parameters passed positionally as a list:
-
-.. exampleinclude::
/../../providers/oracle/src/airflow/providers/oracle/example_dags/example_oracle.py
- :language: python
- :start-after: [START
howto_oracle_stored_procedure_operator_with_list_inout]
- :end-before: [END howto_oracle_stored_procedure_operator_with_list_inout]
-
-
-Alternatively, parameters can be passed as keyword arguments using a dictionary
-as well.
-
-.. exampleinclude::
/../../providers/oracle/src/airflow/providers/oracle/example_dags/example_oracle.py
- :language: python
- :start-after: [START
howto_oracle_stored_procedure_operator_with_dict_inout]
- :end-before: [END howto_oracle_stored_procedure_operator_with_dict_inout]
-
-Both input and output will be passed to xcom provided that xcom push is
requested.
-
-More on stored procedure execution can be found in `oracledb documentation
-<https://python-oracledb.readthedocs.io/en/latest/user_guide/plsql_execution.html#pl-sql-stored-procedures>`_.
diff --git a/providers/oracle/tests/system/oracle/__init__.py
b/providers/oracle/tests/system/oracle/__init__.py
new file mode 100644
index 00000000000..13a83393a91
--- /dev/null
+++ b/providers/oracle/tests/system/oracle/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/providers/oracle/tests/system/oracle/example_oracle.py
b/providers/oracle/tests/system/oracle/example_oracle.py
new file mode 100644
index 00000000000..3bba7189039
--- /dev/null
+++ b/providers/oracle/tests/system/oracle/example_oracle.py
@@ -0,0 +1,93 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This is an example DAG for the use of the SQLExecuteQueryOperator with Oracle.
+"""
+
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+from airflow import DAG
+from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_oracle"
+
+with DAG(
+ dag_id=DAG_ID,
+ schedule=None,
+ start_date=datetime(2025, 1, 1),
+ default_args={"conn_id": "oracle_conn_id"},
+ tags=["example"],
+ catchup=False,
+) as dag:
+ # [START howto_operator_oracle]
+
+ # Example of creating a task that calls a common CREATE TABLE sql command.
+ create_table_oracle_task = SQLExecuteQueryOperator(
+ task_id="create_table_oracle",
+ sql=r"""
+ BEGIN
+ EXECUTE IMMEDIATE '
+ CREATE TABLE employees (
+ id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY,
+ name VARCHAR2(50),
+ salary NUMBER(10, 2),
+ hire_date DATE DEFAULT SYSDATE
+ )';
+ END;
+ """,
+ )
+
+ # [END howto_operator_oracle]
+
+ insert_data_oracle_task = SQLExecuteQueryOperator(
+ task_id="insert_data_oracle",
+ sql=r"""
+ BEGIN
+ INSERT INTO employees (name, salary) VALUES ('Alice', 50000);
+ INSERT INTO employees (name, salary) VALUES ('Bob', 60000);
+ END;
+ """,
+ )
+
+ select_data_oracle_task = SQLExecuteQueryOperator(
+ task_id="select_data_oracle",
+ sql=r"""
+ SELECT * FROM employees
+ """,
+ )
+
+ drop_table_oracle_task = SQLExecuteQueryOperator(
+ task_id="drop_table_oracle",
+ sql="DROP TABLE employees",
+ )
+
+ (create_table_oracle_task >> insert_data_oracle_task >>
select_data_oracle_task >> drop_table_oracle_task)
+
+ from tests_common.test_utils.watcher import watcher
+
+ # This test needs watcher in order to properly mark success/failure
+ # when "tearDown" task with trigger rule is part of the DAG
+ list(dag.tasks) >> watcher()
+
+from tests_common.test_utils.system_tests import get_test_run # noqa: E402
+
+# Needed to run the example DAG with pytest (see:
tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)
diff --git a/providers/postgres/docs/index.rst
b/providers/postgres/docs/index.rst
index b4d698ceb72..9a7e785bde4 100644
--- a/providers/postgres/docs/index.rst
+++ b/providers/postgres/docs/index.rst
@@ -34,8 +34,7 @@
:caption: Guides
Connection types <connections/postgres>
- PostgresOperator types <operators/postgres_operator_howto_guide>
-
+ Operators <operators>
.. toctree::
:hidden:
diff --git
a/providers/postgres/docs/operators/postgres_operator_howto_guide.rst
b/providers/postgres/docs/operators.rst
similarity index 100%
rename from providers/postgres/docs/operators/postgres_operator_howto_guide.rst
rename to providers/postgres/docs/operators.rst
diff --git a/providers/presto/docs/operators/transfer/gcs_to_presto.rst
b/providers/presto/docs/gcs_to_presto.rst
similarity index 100%
rename from providers/presto/docs/operators/transfer/gcs_to_presto.rst
rename to providers/presto/docs/gcs_to_presto.rst
diff --git a/providers/presto/docs/index.rst b/providers/presto/docs/index.rst
index 3b7c4eec32e..a6465462112 100644
--- a/providers/presto/docs/index.rst
+++ b/providers/presto/docs/index.rst
@@ -34,8 +34,9 @@
:maxdepth: 1
:caption: Guides
- PrestoTransferOperator types <operators/transfer/gcs_to_presto>
+ PrestoTransferOperator types <gcs_to_presto>
Connection types <connections>
+ Operators <operators>
.. toctree::
:hidden:
diff --git a/providers/presto/docs/operators.rst
b/providers/presto/docs/operators.rst
new file mode 100644
index 00000000000..b4ab895b8e8
--- /dev/null
+++ b/providers/presto/docs/operators.rst
@@ -0,0 +1,72 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+
+
+.. _howto/operator:PrestoOperator:
+
+SQLExecuteQueryOperator to connect to Presto
+============================================
+
+Use the
:class:`SQLExecuteQueryOperator<airflow.providers.common.sql.operators.sql>` to
execute
+Presto commands in a `Presto <https://prestodb.io/docs/current/>`__ database.
+
+
+Using the Operator
+^^^^^^^^^^^^^^^^^^
+
+Use the ``conn_id`` argument to connect to your Presto instance where
+the connection metadata is structured as follows:
+
+.. list-table:: Presto Airflow Connection Metadata
+ :widths: 25 25
+ :header-rows: 1
+
+ * - Parameter
+ - Input
+ * - Host: string
+ - Presto server hostname or container name
+ * - Schema: string
+ - Schema to execute SQL operations on by default
+ * - Login: string
+ - Presto user (required)
+ * - Password: string
+ - Presto user password (if authentication is enabled)
+ * - Port: int
+ - Presto server port (default: 8080)
+ * - Extra: JSON
+ - Additional parameters such as `{"user": "airflow_user"}`
+
+
+An example usage of the SQLExecuteQueryOperator to connect to Presto is as
follows:
+
+.. exampleinclude::
/../../providers/presto/tests/system/presto/example_presto.py
+ :language: python
+ :start-after: [START howto_operator_presto]
+ :end-before: [END howto_operator_presto]
+
+
+Reference
+^^^^^^^^^
+For further information, look at:
+
+* `Presto Documentation <https://prestodb.io/docs/current/>`__
+
+.. note::
+
+ Parameters given via SQLExecuteQueryOperator() are given first-place priority
+ relative to parameters set via Airflow connection metadata (such as
``schema``, ``login``, ``password`` etc).
diff --git a/providers/presto/tests/system/presto/example_presto.py
b/providers/presto/tests/system/presto/example_presto.py
new file mode 100644
index 00000000000..c2432139ed2
--- /dev/null
+++ b/providers/presto/tests/system/presto/example_presto.py
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This is an example DAG for the use of the SQLExecuteQueryOperator with Presto.
+"""
+
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+from airflow import DAG
+from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_presto"
+
+with DAG(
+ dag_id=DAG_ID,
+ schedule=None,
+ start_date=datetime(2025, 1, 1),
+ default_args={"conn_id": "presto_conn_id"},
+ tags=["example"],
+ catchup=False,
+) as dag:
+ # [START howto_operator_presto]
+
+ # Example of creating a task that calls a common CREATE TABLE sql command.
+ select_presto_task = SQLExecuteQueryOperator(
+ task_id="select_presto",
+ sql="SELECT 1",
+ )
+
+ # [END howto_operator_presto]
+
+ drop_table_presto_task = SQLExecuteQueryOperator(
+ task_id="drop_table_presto",
+ sql="DROP TABLE IF EXISTS hive.default.example_table",
+ )
+
+ (select_presto_task >> drop_table_presto_task)
+
+ from tests_common.test_utils.watcher import watcher
+
+ # This test needs watcher in order to properly mark success/failure
+ # when "tearDown" task with trigger rule is part of the DAG
+ list(dag.tasks) >> watcher()
+
+from tests_common.test_utils.system_tests import get_test_run # noqa: E402
+
+# Needed to run the example DAG with pytest (see:
tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)
diff --git a/providers/vertica/docs/index.rst b/providers/vertica/docs/index.rst
index aa431382bbf..86ffa42c14a 100644
--- a/providers/vertica/docs/index.rst
+++ b/providers/vertica/docs/index.rst
@@ -35,6 +35,7 @@
:caption: Guides
Connection types <connections/vertica>
+ Operators <operators>
.. toctree::
:hidden:
diff --git a/providers/vertica/docs/operators.rst
b/providers/vertica/docs/operators.rst
new file mode 100644
index 00000000000..e5af686bb0c
--- /dev/null
+++ b/providers/vertica/docs/operators.rst
@@ -0,0 +1,76 @@
+.. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+.. http://www.apache.org/licenses/LICENSE-2.0
+
+.. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+
+.. _howto/operator:VerticaOperator:
+
+SQLExecuteQueryOperator to connect to Vertica
+=============================================
+
+Use the :class:`SQLExecuteQueryOperator
<airflow.providers.common.sql.operators.sql>` to execute
+Vertica commands in a `Vertica <https://www.vertica.com/documentation/>`__
database.
+
+.. note::
+ If you previously used other legacy operators to handle Vertica
interactions, you can now use
+ ``SQLExecuteQueryOperator`` for both stored procedures and raw SQL
execution.
+
+Using the Operator
+^^^^^^^^^^^^^^^^^^
+
+Use the ``conn_id`` argument to connect to your Vertica instance where
+the connection metadata is structured as follows:
+
+.. list-table:: Vertica Airflow Connection Metadata
+ :widths: 25 25
+ :header-rows: 1
+
+ * - Parameter
+ - Input
+ * - Host: string
+ - Vertica database hostname or container name (if running in Docker
network)
+ * - Schema: string
+ - Schema to execute SQL operations on by default
+ * - Login: string
+ - Vertica database user (often ``dbadmin`` if using community Docker
image)
+ * - Password: string
+ - Vertica database user password
+ * - Port: int
+ - Vertica database port (default: 5433)
+ * - Extra: JSON
+ - Additional connection configuration (e.g. TLS settings):
+ ``{"tlsmode": "disable"}``
+
+An example usage of the ``SQLExecuteQueryOperator`` to connect to Vertica is
as follows:
+
+.. exampleinclude::
/../../providers/vertica/tests/system/vertica/example_vertica.py
+ :language: python
+ :start-after: [START howto_operator_vertica]
+ :end-before: [END howto_operator_vertica]
+
+
+Reference
+^^^^^^^^^
+
+For further information, look at:
+
+* `Vertica Documentation <https://www.vertica.com/documentation/>`__
+
+.. note::
+
+ Parameters given via ``SQLExecuteQueryOperator()`` take first-place priority
+ relative to parameters set via the Airflow connection metadata (such as
+ ``schema``, ``login``, ``password``, etc).
diff --git a/providers/vertica/tests/system/vertica/example_vertica.py
b/providers/vertica/tests/system/vertica/example_vertica.py
new file mode 100644
index 00000000000..57847cdabe1
--- /dev/null
+++ b/providers/vertica/tests/system/vertica/example_vertica.py
@@ -0,0 +1,91 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This is an example DAG for the use of the SQLExecuteQueryOperator with Vertica.
+"""
+
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+from airflow import DAG
+from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_vertica"
+
+with DAG(
+ dag_id=DAG_ID,
+ schedule=None,
+ start_date=datetime(2025, 1, 1),
+ default_args={"conn_id": "vertica_conn_id"},
+ tags=["example"],
+ catchup=False,
+) as dag:
+ # [START howto_operator_vertica]
+
+ create_table_vertica_task = SQLExecuteQueryOperator(
+ task_id="create_table_vertica",
+ sql=[
+ "DROP TABLE IF EXISTS employees;",
+ """
+ CREATE TABLE employees (
+ id IDENTITY,
+ name VARCHAR(50),
+ salary NUMERIC(10,2),
+ hire_date TIMESTAMP DEFAULT NOW()
+ )
+ """,
+ ],
+ )
+
+ # [END howto_operator_vertica]
+
+ insert_data_vertica_task = SQLExecuteQueryOperator(
+ task_id="insert_data_vertica",
+ sql="""
+ INSERT INTO employees (name, salary) VALUES ('Alice', 50000);
+ INSERT INTO employees (name, salary) VALUES ('Bob', 60000);
+ """,
+ )
+
+ select_data_vertica_task = SQLExecuteQueryOperator(
+ task_id="select_data_vertica",
+ sql="SELECT * FROM employees",
+ )
+
+ drop_table_vertica_task = SQLExecuteQueryOperator(
+ task_id="drop_table_vertica",
+ sql="DROP TABLE IF EXISTS employees",
+ )
+
+ (
+ create_table_vertica_task
+ >> insert_data_vertica_task
+ >> select_data_vertica_task
+ >> drop_table_vertica_task
+ )
+
+ from tests_common.test_utils.watcher import watcher
+
+ # Ensure test watchers are triggered for success/failure
+ list(dag.tasks) >> watcher()
+
+from tests_common.test_utils.system_tests import get_test_run # noqa: E402
+
+test_run = get_test_run(dag)