This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new d22813d0e3f Standardize connection docs labels across providers
(#63455)
d22813d0e3f is described below
commit d22813d0e3fdc12e4d6b08cca6871b8dc3ed3b6c
Author: HARSHITH GANDHE <[email protected]>
AuthorDate: Sun Mar 15 02:57:12 2026 +0530
Standardize connection docs labels across providers (#63455)
* Standardize connection docs labels across providers
* Standardize connection docs labels across providers[2]
* remove duplicates
* Add pre-commit hook
* executable permission
* CI fixes with correct refs in python files
---
.../production-deployment.rst | 4 +-
providers/.pre-commit-config.yaml | 7 +
providers/airbyte/docs/connections.rst | 2 +-
providers/alibaba/docs/connections/alibaba.rst | 2 +
.../cassandra/docs/connections/cassandra.rst | 2 +-
providers/apache/hdfs/docs/connections.rst | 2 +
providers/apache/kafka/docs/connections/kafka.rst | 2 +-
providers/apache/kylin/docs/connections.rst | 2 +
providers/apache/livy/docs/connections.rst | 2 +
.../spark/docs/connections/spark-connect.rst | 2 +-
.../apache/spark/docs/connections/spark-sql.rst | 2 +-
.../apache/spark/docs/connections/spark-submit.rst | 2 +-
.../providers/apache/spark/hooks/spark_jdbc.py | 2 +-
.../providers/apache/spark/hooks/spark_submit.py | 2 +-
.../providers/apache/spark/operators/spark_jdbc.py | 2 +-
.../apache/spark/operators/spark_pipelines.py | 2 +-
.../apache/spark/operators/spark_submit.py | 2 +-
.../tinkerpop/docs/connections/tinkerpop.rst | 2 +-
providers/asana/docs/connections/asana.rst | 2 +-
providers/atlassian/jira/docs/connections.rst | 2 +
.../common/ai/docs/connections/pydantic_ai.rst | 2 +-
providers/common/ai/docs/hooks/pydantic_ai.rst | 2 +-
providers/common/ai/docs/operators/agent.rst | 2 +-
providers/common/ai/docs/operators/llm.rst | 2 +-
providers/common/ai/docs/operators/llm_branch.rst | 2 +-
.../ai/docs/operators/llm_schema_compare.rst | 2 +-
providers/common/ai/docs/operators/llm_sql.rst | 2 +-
providers/datadog/docs/connections/datadog.rst | 2 +-
providers/dbt/cloud/docs/connections.rst | 2 +-
.../src/airflow/providers/dbt/cloud/hooks/dbt.py | 2 +-
.../discord/docs/connections/discord-webhook.rst | 2 +-
providers/google/docs/connections/bigquery.rst | 2 +-
providers/google/docs/connections/gcp.rst | 6 +-
providers/google/docs/connections/gcp_looker.rst | 2 +
providers/google/docs/connections/gcp_sql.rst | 2 +-
providers/google/docs/connections/gcp_ssh.rst | 2 +
.../google-cloud-secret-manager-backend.rst | 2 +-
providers/grpc/docs/connections/grpc.rst | 2 +-
providers/jenkins/docs/connections.rst | 2 +-
providers/microsoft/azure/docs/connections/acr.rst | 2 +-
providers/microsoft/azure/docs/connections/adf.rst | 2 +-
providers/microsoft/azure/docs/connections/adl.rst | 2 +-
providers/microsoft/azure/docs/connections/adx.rst | 2 +-
.../azure/docs/connections/azure_synapse.rst | 2 +-
.../airflow/providers/microsoft/azure/hooks/adx.py | 2 +-
.../microsoft/azure/hooks/container_registry.py | 2 +-
.../microsoft/azure/hooks/data_factory.py | 4 +-
.../providers/microsoft/azure/hooks/data_lake.py | 2 +-
.../providers/microsoft/azure/hooks/synapse.py | 8 +-
.../providers/microsoft/azure/operators/adls.py | 6 +-
.../providers/microsoft/azure/operators/adx.py | 2 +-
providers/odbc/docs/connections/odbc.rst | 2 +-
.../opensearch/docs/connections/opensearch.rst | 2 +-
providers/opsgenie/docs/connections.rst | 2 +
.../pagerduty/docs/connections/pagerdurty.rst | 2 +-
.../docs/connections/pagerduty-events.rst | 2 +-
providers/presto/docs/connections.rst | 2 +
providers/redis/docs/connections.rst | 2 +
.../salesforce/docs/connections/salesforce.rst | 2 +-
.../airflow/providers/salesforce/operators/bulk.py | 2 +-
.../salesforce/operators/salesforce_apex_rest.py | 2 +-
providers/samba/docs/connections.rst | 2 +
.../docs/connections/slack-incoming-webhook.rst | 2 +-
.../airflow/providers/slack/hooks/slack_webhook.py | 2 +-
providers/sqlite/docs/connections/sqlite.rst | 2 +-
.../teradata/transfers/azure_blob_to_teradata.py | 2 +-
.../providers/teradata/transfers/s3_to_teradata.py | 2 +-
.../teradata/transfers/teradata_to_teradata.py | 2 +-
providers/trino/docs/connections.rst | 2 +
providers/yandex/docs/connections/yandexcloud.rst | 2 +-
.../yandex-cloud-lockbox-secret-backend.rst | 2 +-
providers/zendesk/docs/connections.rst | 2 +
scripts/ci/prek/check_connection_doc_labels.py | 149 +++++++++++++++++++++
73 files changed, 249 insertions(+), 67 deletions(-)
diff --git
a/airflow-core/docs/administration-and-deployment/production-deployment.rst
b/airflow-core/docs/administration-and-deployment/production-deployment.rst
index 7d7b0d520e3..e69d4364887 100644
--- a/airflow-core/docs/administration-and-deployment/production-deployment.rst
+++ b/airflow-core/docs/administration-and-deployment/production-deployment.rst
@@ -240,13 +240,13 @@ If you are using Kubernetes Engine, you can use
`Workload Identity
<https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity>`__
to assign
an identity to individual pods.
-For more information about service accounts in the Airflow, see
:ref:`howto/connection:gcp`
+For more information about service accounts in the Airflow, see
:ref:`howto/connection:google_cloud_platform`
Impersonate Service Accounts
----------------------------
If you need access to other service accounts, you can
-:ref:`impersonate other service accounts <howto/connection:gcp:impersonation>`
to exchange the token with
+:ref:`impersonate other service accounts
<howto/connection:google_cloud_platform:impersonation>` to exchange the token
with
the default identity to another service account. Thus, the account keys are
still managed by Google
and cannot be read by your workload.
diff --git a/providers/.pre-commit-config.yaml
b/providers/.pre-commit-config.yaml
index 7376116a13c..4926188916f 100644
--- a/providers/.pre-commit-config.yaml
+++ b/providers/.pre-commit-config.yaml
@@ -199,6 +199,13 @@ repos:
files: ^.*/provider\.yaml$|^.*/docs/.*
require_serial: true
pass_filenames: false
+ - id: check-connection-doc-labels
+ name: Validate connection doc labels match provider.yaml
+ entry: ../scripts/ci/prek/check_connection_doc_labels.py
+ language: python
+ files:
^.*/provider\.yaml$|^.*/docs/.*/connections.*\.rst$|^.*/docs/connections\.rst$
+ require_serial: true
+ pass_filenames: false
- id: mypy-providers
stages: ['pre-push']
name: Run mypy for providers
diff --git a/providers/airbyte/docs/connections.rst
b/providers/airbyte/docs/connections.rst
index e238da4f8b7..1f2a00643ed 100644
--- a/providers/airbyte/docs/connections.rst
+++ b/providers/airbyte/docs/connections.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-
+.. _howto/connection:airbyte:
Airbyte Connection
==================
diff --git a/providers/alibaba/docs/connections/alibaba.rst
b/providers/alibaba/docs/connections/alibaba.rst
index 4cf4747d7e2..eb0d8e06cb5 100644
--- a/providers/alibaba/docs/connections/alibaba.rst
+++ b/providers/alibaba/docs/connections/alibaba.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:alibaba_cloud:
+
Alibaba Cloud Connection
========================
diff --git a/providers/apache/cassandra/docs/connections/cassandra.rst
b/providers/apache/cassandra/docs/connections/cassandra.rst
index 8adfebc381c..54184bf8941 100644
--- a/providers/apache/cassandra/docs/connections/cassandra.rst
+++ b/providers/apache/cassandra/docs/connections/cassandra.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-
+.. _howto/connection:cassandra:
Apache Cassandra Connection
===========================
diff --git a/providers/apache/hdfs/docs/connections.rst
b/providers/apache/hdfs/docs/connections.rst
index c54cd829978..5a82ebe7bf8 100644
--- a/providers/apache/hdfs/docs/connections.rst
+++ b/providers/apache/hdfs/docs/connections.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:webhdfs:
+
Apache HDFS Connection
======================
diff --git a/providers/apache/kafka/docs/connections/kafka.rst
b/providers/apache/kafka/docs/connections/kafka.rst
index c1803f0b21c..300499a1e80 100644
--- a/providers/apache/kafka/docs/connections/kafka.rst
+++ b/providers/apache/kafka/docs/connections/kafka.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-.. _howto/connection: kafka
+.. _howto/connection:kafka:
Apache Kafka Connection
========================
diff --git a/providers/apache/kylin/docs/connections.rst
b/providers/apache/kylin/docs/connections.rst
index 9d660039960..0b072621fe0 100644
--- a/providers/apache/kylin/docs/connections.rst
+++ b/providers/apache/kylin/docs/connections.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:kylin:
+
Apache Kylin Connection
=======================
diff --git a/providers/apache/livy/docs/connections.rst
b/providers/apache/livy/docs/connections.rst
index 86752ebbf8a..35a810402f9 100644
--- a/providers/apache/livy/docs/connections.rst
+++ b/providers/apache/livy/docs/connections.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:livy:
+
Apache Livy Connection
======================
diff --git a/providers/apache/spark/docs/connections/spark-connect.rst
b/providers/apache/spark/docs/connections/spark-connect.rst
index aa5ef071578..9c26ad26b94 100644
--- a/providers/apache/spark/docs/connections/spark-connect.rst
+++ b/providers/apache/spark/docs/connections/spark-connect.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:spark-connect:
+.. _howto/connection:spark_connect:
Apache Spark Connect Connection
===============================
diff --git a/providers/apache/spark/docs/connections/spark-sql.rst
b/providers/apache/spark/docs/connections/spark-sql.rst
index c4e4c606de1..94da92d9a90 100644
--- a/providers/apache/spark/docs/connections/spark-sql.rst
+++ b/providers/apache/spark/docs/connections/spark-sql.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:spark-sql:
+.. _howto/connection:spark_sql:
Apache Spark SQL Connection
===========================
diff --git a/providers/apache/spark/docs/connections/spark-submit.rst
b/providers/apache/spark/docs/connections/spark-submit.rst
index 3b808bfc32b..498796d016b 100644
--- a/providers/apache/spark/docs/connections/spark-submit.rst
+++ b/providers/apache/spark/docs/connections/spark-submit.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:spark-submit:
+.. _howto/connection:spark:
Apache Spark Submit Connection
==============================
diff --git
a/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_jdbc.py
b/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_jdbc.py
index abe98c34bcf..380b2e28b8f 100644
---
a/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_jdbc.py
+++
b/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_jdbc.py
@@ -29,7 +29,7 @@ class SparkJDBCHook(SparkSubmitHook):
Extends the SparkSubmitHook for performing data transfers to/from
JDBC-based databases with Apache Spark.
:param spark_app_name: Name of the job (default airflow-spark-jdbc)
- :param spark_conn_id: The :ref:`spark connection id
<howto/connection:spark-submit>`
+ :param spark_conn_id: The :ref:`spark connection id
<howto/connection:spark>`
as configured in Airflow administration
:param spark_conf: Any additional Spark configuration properties
:param spark_py_files: Additional python files used (.zip, .egg, or .py)
diff --git
a/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
b/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
index 7870b790535..d275c5fc7b4 100644
---
a/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
+++
b/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
@@ -46,7 +46,7 @@ class SparkSubmitHook(BaseHook, LoggingMixin):
Wrap the spark-submit binary to kick off a spark-submit job; requires
"spark-submit" binary in the PATH.
:param conf: Arbitrary Spark configuration properties
- :param spark_conn_id: The :ref:`spark connection id
<howto/connection:spark-submit>` as configured
+ :param spark_conn_id: The :ref:`spark connection id
<howto/connection:spark>` as configured
in Airflow administration. When an invalid connection_id is supplied,
it will default
to yarn.
:param files: Upload additional files to the executor running the job,
separated by a
diff --git
a/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_jdbc.py
b/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_jdbc.py
index a4a334fb452..57526553ca4 100644
---
a/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_jdbc.py
+++
b/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_jdbc.py
@@ -37,7 +37,7 @@ class SparkJDBCOperator(SparkSubmitOperator):
:ref:`howto/operator:SparkJDBCOperator`
:param spark_app_name: Name of the job (default airflow-spark-jdbc)
- :param spark_conn_id: The :ref:`spark connection id
<howto/connection:spark-submit>`
+ :param spark_conn_id: The :ref:`spark connection id
<howto/connection:spark>`
as configured in Airflow administration
:param spark_conf: Any additional Spark configuration properties
:param spark_py_files: Additional python files used (.zip, .egg, or .py)
diff --git
a/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_pipelines.py
b/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_pipelines.py
index 3e717271229..ff7ece0b5b1 100644
---
a/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_pipelines.py
+++
b/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_pipelines.py
@@ -46,7 +46,7 @@ class SparkPipelinesOperator(BaseOperator):
:param pipeline_spec: Path to the pipeline specification file (YAML).
(templated)
:param pipeline_command: The spark-pipelines command to execute ('run',
'dry-run'). Default is 'run'.
:param conf: Arbitrary Spark configuration properties (templated)
- :param conn_id: The :ref:`spark connection id
<howto/connection:spark-submit>` as configured
+ :param conn_id: The :ref:`spark connection id <howto/connection:spark>` as
configured
in Airflow administration. When an invalid connection_id is supplied,
it will default to yarn.
:param num_executors: Number of executors to launch
:param executor_cores: Number of cores per executor (Default: 2)
diff --git
a/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_submit.py
b/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_submit.py
index 7e26d5b2a6c..3c7b0b5db56 100644
---
a/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_submit.py
+++
b/providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_submit.py
@@ -41,7 +41,7 @@ class SparkSubmitOperator(BaseOperator):
:param application: The application that submitted as a job, either jar or
py file. (templated)
:param conf: Arbitrary Spark configuration properties (templated)
- :param conn_id: The :ref:`spark connection id
<howto/connection:spark-submit>` as configured
+ :param conn_id: The :ref:`spark connection id <howto/connection:spark>` as
configured
in Airflow administration. When an invalid connection_id is supplied,
it will default to yarn.
:param files: Upload additional files to the executor running the job,
separated by a
comma. Files will be placed in the working directory of each
executor.
diff --git a/providers/apache/tinkerpop/docs/connections/tinkerpop.rst
b/providers/apache/tinkerpop/docs/connections/tinkerpop.rst
index ce64ad135cd..50b82c9f4bc 100644
--- a/providers/apache/tinkerpop/docs/connections/tinkerpop.rst
+++ b/providers/apache/tinkerpop/docs/connections/tinkerpop.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:tinkerpop:
+.. _howto/connection:gremlin:
Gremlin Connection
====================
diff --git a/providers/asana/docs/connections/asana.rst
b/providers/asana/docs/connections/asana.rst
index 5d6a58d9c1e..f2d953504a8 100644
--- a/providers/asana/docs/connections/asana.rst
+++ b/providers/asana/docs/connections/asana.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-
+.. _howto/connection:asana:
Asana Connection
================
diff --git a/providers/atlassian/jira/docs/connections.rst
b/providers/atlassian/jira/docs/connections.rst
index e33392f54d8..7504e58a043 100644
--- a/providers/atlassian/jira/docs/connections.rst
+++ b/providers/atlassian/jira/docs/connections.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:jira:
+
Jira Connection
===============
diff --git a/providers/common/ai/docs/connections/pydantic_ai.rst
b/providers/common/ai/docs/connections/pydantic_ai.rst
index 6d5363397a0..a7aa0c85a50 100644
--- a/providers/common/ai/docs/connections/pydantic_ai.rst
+++ b/providers/common/ai/docs/connections/pydantic_ai.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-.. _howto/connection:pydantic_ai:
+.. _howto/connection:pydanticai:
Pydantic AI Connection
======================
diff --git a/providers/common/ai/docs/hooks/pydantic_ai.rst
b/providers/common/ai/docs/hooks/pydantic_ai.rst
index 2cea9740925..65bb53b4a38 100644
--- a/providers/common/ai/docs/hooks/pydantic_ai.rst
+++ b/providers/common/ai/docs/hooks/pydantic_ai.rst
@@ -27,7 +27,7 @@ The hook manages API credentials from an Airflow connection
and creates pydantic
``Model`` and ``Agent`` objects. It supports any provider that pydantic-ai
supports.
.. seealso::
- :ref:`Connection configuration <howto/connection:pydantic_ai>`
+ :ref:`Connection configuration <howto/connection:pydanticai>`
Basic Usage
-----------
diff --git a/providers/common/ai/docs/operators/agent.rst
b/providers/common/ai/docs/operators/agent.rst
index 01d1efc0cc4..8052facd59f 100644
--- a/providers/common/ai/docs/operators/agent.rst
+++ b/providers/common/ai/docs/operators/agent.rst
@@ -31,7 +31,7 @@ a single prompt and returns the output. ``AgentOperator``
manages a stateful
tool-call loop where the LLM decides which tools to call and when to stop.
.. seealso::
- :ref:`Connection configuration <howto/connection:pydantic_ai>`
+ :ref:`Connection configuration <howto/connection:pydanticai>`
SQL Agent
diff --git a/providers/common/ai/docs/operators/llm.rst
b/providers/common/ai/docs/operators/llm.rst
index ce7618245b6..219965578bf 100644
--- a/providers/common/ai/docs/operators/llm.rst
+++ b/providers/common/ai/docs/operators/llm.rst
@@ -29,7 +29,7 @@ The operator sends a prompt to an LLM via
returns the output as XCom.
.. seealso::
- :ref:`Connection configuration <howto/connection:pydantic_ai>`
+ :ref:`Connection configuration <howto/connection:pydanticai>`
Basic Usage
-----------
diff --git a/providers/common/ai/docs/operators/llm_branch.rst
b/providers/common/ai/docs/operators/llm_branch.rst
index 4e7630bbbb5..94e1ce16fc8 100644
--- a/providers/common/ai/docs/operators/llm_branch.rst
+++ b/providers/common/ai/docs/operators/llm_branch.rst
@@ -29,7 +29,7 @@ and presents them to the LLM as a constrained enum via
pydantic-ai structured
output. No text parsing or manual validation is needed.
.. seealso::
- :ref:`Connection configuration <howto/connection:pydantic_ai>`
+ :ref:`Connection configuration <howto/connection:pydanticai>`
Basic Usage
-----------
diff --git a/providers/common/ai/docs/operators/llm_schema_compare.rst
b/providers/common/ai/docs/operators/llm_schema_compare.rst
index ebdf3f48994..ad548c7fc6e 100644
--- a/providers/common/ai/docs/operators/llm_schema_compare.rst
+++ b/providers/common/ai/docs/operators/llm_schema_compare.rst
@@ -32,7 +32,7 @@ The result is a structured
:class:`~airflow.providers.common.ai.operators.llm_sc
containing a list of mismatches with severity levels, descriptions, and
suggested actions.
.. seealso::
- :ref:`Connection configuration <howto/connection:pydantic_ai>`
+ :ref:`Connection configuration <howto/connection:pydanticai>`
Basic Usage
-----------
diff --git a/providers/common/ai/docs/operators/llm_sql.rst
b/providers/common/ai/docs/operators/llm_sql.rst
index acfb944aad8..cdcc6c4a276 100644
--- a/providers/common/ai/docs/operators/llm_sql.rst
+++ b/providers/common/ai/docs/operators/llm_sql.rst
@@ -27,7 +27,7 @@ The operator generates SQL but does not execute it. The
generated query is retur
as XCom and can be passed to ``SQLExecuteQueryOperator`` or used in downstream
tasks.
.. seealso::
- :ref:`Connection configuration <howto/connection:pydantic_ai>`
+ :ref:`Connection configuration <howto/connection:pydanticai>`
Basic Usage
-----------
diff --git a/providers/datadog/docs/connections/datadog.rst
b/providers/datadog/docs/connections/datadog.rst
index b56ad1f82b7..49657a3adf9 100644
--- a/providers/datadog/docs/connections/datadog.rst
+++ b/providers/datadog/docs/connections/datadog.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-.. _howto/connection:kubernetes:
+.. _howto/connection:datadog:
Datadog Connection
=============================
diff --git a/providers/dbt/cloud/docs/connections.rst
b/providers/dbt/cloud/docs/connections.rst
index b105e1089ef..d50ca2ae1dc 100644
--- a/providers/dbt/cloud/docs/connections.rst
+++ b/providers/dbt/cloud/docs/connections.rst
@@ -20,7 +20,7 @@
getdbt
-.. _howto/connection:dbt-cloud:
+.. _howto/connection:dbt_cloud:
Connecting to dbt Cloud
=======================
diff --git a/providers/dbt/cloud/src/airflow/providers/dbt/cloud/hooks/dbt.py
b/providers/dbt/cloud/src/airflow/providers/dbt/cloud/hooks/dbt.py
index 0a7cd33d7c7..89e512c51fb 100644
--- a/providers/dbt/cloud/src/airflow/providers/dbt/cloud/hooks/dbt.py
+++ b/providers/dbt/cloud/src/airflow/providers/dbt/cloud/hooks/dbt.py
@@ -168,7 +168,7 @@ class DbtCloudHook(HttpHook):
"""
Interact with dbt Cloud using the V2 (V3 if supported) API.
- :param dbt_cloud_conn_id: The ID of the :ref:`dbt Cloud connection
<howto/connection:dbt-cloud>`.
+ :param dbt_cloud_conn_id: The ID of the :ref:`dbt Cloud connection
<howto/connection:dbt_cloud>`.
:param timeout_seconds: Optional. The timeout in seconds for HTTP
requests. If not provided, no timeout is applied.
:param retry_limit: The number of times to retry a request in case of
failure.
:param retry_delay: The delay in seconds between retries.
diff --git a/providers/discord/docs/connections/discord-webhook.rst
b/providers/discord/docs/connections/discord-webhook.rst
index 2430f69897c..20b4ade17f7 100644
--- a/providers/discord/docs/connections/discord-webhook.rst
+++ b/providers/discord/docs/connections/discord-webhook.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:http:
+.. _howto/connection:discord:
Discord Webhook Connection
==========================
diff --git a/providers/google/docs/connections/bigquery.rst
b/providers/google/docs/connections/bigquery.rst
index 27a66582f0d..c596d3e86b1 100644
--- a/providers/google/docs/connections/bigquery.rst
+++ b/providers/google/docs/connections/bigquery.rst
@@ -31,7 +31,7 @@ Extra parameters that are specific to BigQuery will be
covered in this document.
Configuring the Connection
--------------------------
.. note::
- Please refer to :ref:`Google Cloud Connection
docs<howto/connection:gcp:configuring_the_connection>`
+ Please refer to :ref:`Google Cloud Connection
docs<howto/connection:google_cloud_platform:configuring_the_connection>`
for information regarding the basic authentication parameters.
Impersonation Scopes
diff --git a/providers/google/docs/connections/gcp.rst
b/providers/google/docs/connections/gcp.rst
index 62ef99e7d39..776f53a50a0 100644
--- a/providers/google/docs/connections/gcp.rst
+++ b/providers/google/docs/connections/gcp.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:gcp:
+.. _howto/connection:google_cloud_platform:
Google Cloud Connection
================================
@@ -82,7 +82,7 @@ For example:
export AIRFLOW_CONN_GOOGLE_CLOUD_DEFAULT='google-cloud-platform://'
-.. _howto/connection:gcp:configuring_the_connection:
+.. _howto/connection:google_cloud_platform:configuring_the_connection:
Configuring the Connection
--------------------------
@@ -169,7 +169,7 @@ Impersonation Chain
export AIRFLOW_CONN_GOOGLE_CLOUD_DEFAULT='{"conn_type":
"google_cloud_platform", "extra": {"key_path": "/keys/key.json", "scope":
"https://www.googleapis.com/auth/cloud-platform", "project": "airflow",
"num_retries": 5}}'
-.. _howto/connection:gcp:impersonation:
+.. _howto/connection:google_cloud_platform:impersonation:
Direct impersonation of a service account
-----------------------------------------
diff --git a/providers/google/docs/connections/gcp_looker.rst
b/providers/google/docs/connections/gcp_looker.rst
index 13a0e5a2ca3..8f3099c8a23 100644
--- a/providers/google/docs/connections/gcp_looker.rst
+++ b/providers/google/docs/connections/gcp_looker.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:gcp_looker:
+
Google Cloud Platform Looker Connection
=======================================
diff --git a/providers/google/docs/connections/gcp_sql.rst
b/providers/google/docs/connections/gcp_sql.rst
index b8071a0dc99..22efe50d8e1 100644
--- a/providers/google/docs/connections/gcp_sql.rst
+++ b/providers/google/docs/connections/gcp_sql.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-
+.. _howto/connection:gcpcloudsql:
Google Cloud SQL Connection
===========================
diff --git a/providers/google/docs/connections/gcp_ssh.rst
b/providers/google/docs/connections/gcp_ssh.rst
index c23358d5b79..e598b5dd84d 100644
--- a/providers/google/docs/connections/gcp_ssh.rst
+++ b/providers/google/docs/connections/gcp_ssh.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:gcpssh:
+
Google Cloud Platform SSH Connection
====================================
diff --git
a/providers/google/docs/secrets-backends/google-cloud-secret-manager-backend.rst
b/providers/google/docs/secrets-backends/google-cloud-secret-manager-backend.rst
index 7ff0451d277..cce93a1ff8c 100644
---
a/providers/google/docs/secrets-backends/google-cloud-secret-manager-backend.rst
+++
b/providers/google/docs/secrets-backends/google-cloud-secret-manager-backend.rst
@@ -166,7 +166,7 @@ command as in the example below.
.. note:: If only key of the connection should be hidden there is an option to
store
only that key in Cloud Secret Manager and not entire connection. For more
details take
- a look at :ref:`Google Cloud Connection <howto/connection:gcp>`.
+ a look at :ref:`Google Cloud Connection
<howto/connection:google_cloud_platform>`.
Checking configuration
======================
diff --git a/providers/grpc/docs/connections/grpc.rst
b/providers/grpc/docs/connections/grpc.rst
index e56b9c9de4b..5eaaca7ecfb 100644
--- a/providers/grpc/docs/connections/grpc.rst
+++ b/providers/grpc/docs/connections/grpc.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-
+.. _howto/connection:grpc:
gRPC
~~~~~~~~~~~~~~~~~~~~~
diff --git a/providers/jenkins/docs/connections.rst
b/providers/jenkins/docs/connections.rst
index 253ed7eff85..32f1cd1f26b 100644
--- a/providers/jenkins/docs/connections.rst
+++ b/providers/jenkins/docs/connections.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:dbt-cloud:
+.. _howto/connection:jenkins:
Jenkins Connecting
=======================
diff --git a/providers/microsoft/azure/docs/connections/acr.rst
b/providers/microsoft/azure/docs/connections/acr.rst
index c539d8bfefc..1e85edf251f 100644
--- a/providers/microsoft/azure/docs/connections/acr.rst
+++ b/providers/microsoft/azure/docs/connections/acr.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:acr:
+.. _howto/connection:azure_container_registry:
Microsoft Azure Container Registry Connection
==============================================
diff --git a/providers/microsoft/azure/docs/connections/adf.rst
b/providers/microsoft/azure/docs/connections/adf.rst
index 0059d19f5b5..fba5bae4ad4 100644
--- a/providers/microsoft/azure/docs/connections/adf.rst
+++ b/providers/microsoft/azure/docs/connections/adf.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:adf:
+.. _howto/connection:azure_data_factory:
Microsoft Azure Data Factory
=======================================
diff --git a/providers/microsoft/azure/docs/connections/adl.rst
b/providers/microsoft/azure/docs/connections/adl.rst
index 2f877d6b07a..afebda81a45 100644
--- a/providers/microsoft/azure/docs/connections/adl.rst
+++ b/providers/microsoft/azure/docs/connections/adl.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:adl:
+.. _howto/connection:azure_data_lake:
Microsoft Azure Data Lake Connection
====================================
diff --git a/providers/microsoft/azure/docs/connections/adx.rst
b/providers/microsoft/azure/docs/connections/adx.rst
index 4b759005eb3..03ff179e484 100644
--- a/providers/microsoft/azure/docs/connections/adx.rst
+++ b/providers/microsoft/azure/docs/connections/adx.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:adx:
+.. _howto/connection:azure_data_explorer:
Microsoft Azure Data Explorer
=============================
diff --git a/providers/microsoft/azure/docs/connections/azure_synapse.rst
b/providers/microsoft/azure/docs/connections/azure_synapse.rst
index ee0da07521a..5c5bcf73ba6 100644
--- a/providers/microsoft/azure/docs/connections/azure_synapse.rst
+++ b/providers/microsoft/azure/docs/connections/azure_synapse.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:synapse:
+.. _howto/connection:azure_synapse:
Microsoft Azure Synapse
=======================
diff --git
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/adx.py
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/adx.py
index 2dfd51fa55d..691f902c455 100644
---
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/adx.py
+++
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/adx.py
@@ -74,7 +74,7 @@ class AzureDataExplorerHook(BaseHook):
instance and use it for all queries.
:param azure_data_explorer_conn_id: Reference to the
- :ref:`Azure Data Explorer connection<howto/connection:adx>`.
+ :ref:`Azure Data Explorer
connection<howto/connection:azure_data_explorer>`.
"""
conn_name_attr = "azure_data_explorer_conn_id"
diff --git
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/container_registry.py
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/container_registry.py
index 836eb43a534..79d412e136c 100644
---
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/container_registry.py
+++
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/container_registry.py
@@ -37,7 +37,7 @@ class AzureContainerRegistryHook(BaseHook):
"""
A hook to communicate with a Azure Container Registry.
- :param conn_id: :ref:`Azure Container Registry connection
id<howto/connection:acr>`
+ :param conn_id: :ref:`Azure Container Registry connection
id<howto/connection:azure_container_registry>`
of a service principal which will be used to start the container
instance
"""
diff --git
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_factory.py
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_factory.py
index ca8048eff7d..dd408bed515 100644
---
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_factory.py
+++
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_factory.py
@@ -149,7 +149,7 @@ class AzureDataFactoryHook(BaseHook):
"""
A hook to interact with Azure Data Factory.
- :param azure_data_factory_conn_id: The :ref:`Azure Data Factory connection
id<howto/connection:adf>`.
+ :param azure_data_factory_conn_id: The :ref:`Azure Data Factory connection
id<howto/connection:azure_data_factory>`.
"""
conn_type: str = "azure_data_factory"
@@ -1111,7 +1111,7 @@ class AzureDataFactoryAsyncHook(AzureDataFactoryHook):
"""
An Async Hook that connects to Azure DataFactory to perform pipeline
operations.
- :param azure_data_factory_conn_id: The :ref:`Azure Data Factory connection
id<howto/connection:adf>`.
+ :param azure_data_factory_conn_id: The :ref:`Azure Data Factory connection
id<howto/connection:azure_data_factory>`.
"""
default_conn_name: str = "azure_data_factory_default"
diff --git
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_lake.py
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_lake.py
index bbdbc3ad969..7535eaaaa3a 100644
---
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_lake.py
+++
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/data_lake.py
@@ -58,7 +58,7 @@ class AzureDataLakeHook(BaseHook):
``{"tenant": "<TENANT>", "account_name": "ACCOUNT_NAME"}``.
:param azure_data_lake_conn_id: Reference to
- :ref:`Azure Data Lake connection<howto/connection:adl>`.
+ :ref:`Azure Data Lake connection<howto/connection:azure_data_lake>`.
"""
conn_name_attr = "azure_data_lake_conn_id"
diff --git
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/synapse.py
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/synapse.py
index d3c9129f461..e37c6bca9f2 100644
---
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/synapse.py
+++
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/hooks/synapse.py
@@ -65,7 +65,7 @@ class AzureSynapseHook(BaseHook):
"""
A hook to interact with Azure Synapse.
- :param azure_synapse_conn_id: The :ref:`Azure Synapse connection
id<howto/connection:synapse>`.
+ :param azure_synapse_conn_id: The :ref:`Azure Synapse connection
id<howto/connection:azure_synapse>`.
:param spark_pool: The Apache Spark pool used to submit the job
"""
@@ -249,7 +249,7 @@ class BaseAzureSynapseHook(BaseHook):
"""
A base hook class to create session and connection to Azure Synapse using
connection id.
- :param azure_synapse_conn_id: The :ref:`Azure Synapse connection
id<howto/connection:synapse>`.
+ :param azure_synapse_conn_id: The :ref:`Azure Synapse connection
id<howto/connection:azure_synapse>`.
"""
conn_type: str = "azure_synapse"
@@ -299,7 +299,7 @@ class AzureSynapsePipelineHook(BaseAzureSynapseHook):
"""
A hook to interact with Azure Synapse Pipeline.
- :param azure_synapse_conn_id: The :ref:`Azure Synapse connection
id<howto/connection:synapse>`.
+ :param azure_synapse_conn_id: The :ref:`Azure Synapse connection
id<howto/connection:azure_synapse>`.
:param azure_synapse_workspace_dev_endpoint: The Azure Synapse Workspace
development endpoint.
"""
@@ -453,7 +453,7 @@ class
AzureSynapsePipelineAsyncHook(AzureSynapsePipelineHook):
"""
An asynchronous hook to interact with Azure Synapse Pipeline.
- :param azure_synapse_conn_id: The :ref:`Azure Synapse connection
id<howto/connection:synapse>`.
+ :param azure_synapse_conn_id: The :ref:`Azure Synapse connection
id<howto/connection:azure_synapse>`.
:param azure_synapse_workspace_dev_endpoint: The Azure Synapse Workspace
development endpoint.
"""
diff --git
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/adls.py
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/adls.py
index 1beaa290549..2569bd01067 100644
---
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/adls.py
+++
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/adls.py
@@ -44,7 +44,7 @@ class ADLSCreateObjectOperator(BaseOperator):
If False and remote path is a directory, will quit regardless if
any files
would be overwritten or not. If True, only matching filenames are
actually
overwritten.
- :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake
connection<howto/connection:adl>`.
+ :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake
connection<howto/connection:azure_data_lake>`.
"""
template_fields: Sequence[str] = ("file_system_name", "file_name", "data")
@@ -89,7 +89,7 @@ class ADLSDeleteOperator(BaseOperator):
:param path: A directory or file to remove
:param recursive: Whether to loop into directories in the location and
remove the files
:param ignore_not_found: Whether to raise error if file to delete is not
found
- :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake
connection<howto/connection:adl>`.
+ :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake
connection<howto/connection:azure_data_lake>`.
"""
template_fields: Sequence[str] = ("path",)
@@ -128,7 +128,7 @@ class ADLSListOperator(BaseOperator):
:param file_system_name: Name of the file system (container) in ADLS Gen2.
:param path: The directory path within the file system to list files from
(templated).
- :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake
connection<howto/connection:adl>`.
+ :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake
connection<howto/connection:azure_data_lake>`.
"""
template_fields: Sequence[str] = ("path",)
diff --git
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/adx.py
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/adx.py
index 88c1b9688ec..684b6c4b014 100644
---
a/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/adx.py
+++
b/providers/microsoft/azure/src/airflow/providers/microsoft/azure/operators/adx.py
@@ -41,7 +41,7 @@ class AzureDataExplorerQueryOperator(BaseOperator):
:param options: Optional query options. See:
https://docs.microsoft.com/en-us/azure/kusto/api/netfx/request-properties#list-of-clientrequestproperties
:param azure_data_explorer_conn_id: Reference to the
- :ref:`Azure Data Explorer connection<howto/connection:adx>`.
+ :ref:`Azure Data Explorer
connection<howto/connection:azure_data_explorer>`.
"""
ui_color = "#00a1f2"
diff --git a/providers/odbc/docs/connections/odbc.rst
b/providers/odbc/docs/connections/odbc.rst
index b7d913083b4..d156f553b1d 100644
--- a/providers/odbc/docs/connections/odbc.rst
+++ b/providers/odbc/docs/connections/odbc.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-.. _howto/connection/odbc:
+.. _howto/connection:odbc:
ODBC Connection
===============
diff --git a/providers/opensearch/docs/connections/opensearch.rst
b/providers/opensearch/docs/connections/opensearch.rst
index a9fb6f24b1e..e1f7e5a08fb 100644
--- a/providers/opensearch/docs/connections/opensearch.rst
+++ b/providers/opensearch/docs/connections/opensearch.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-
+.. _howto/connection:opensearch:
OpenSearch Connection
=====================
diff --git a/providers/opsgenie/docs/connections.rst
b/providers/opsgenie/docs/connections.rst
index c021eafcd75..25fa10b4a0e 100644
--- a/providers/opsgenie/docs/connections.rst
+++ b/providers/opsgenie/docs/connections.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:opsgenie:
+
Opsgenie Connection
===================
diff --git a/providers/pagerduty/docs/connections/pagerdurty.rst
b/providers/pagerduty/docs/connections/pagerdurty.rst
index 63fb13efa7f..83fb3e7173b 100644
--- a/providers/pagerduty/docs/connections/pagerdurty.rst
+++ b/providers/pagerduty/docs/connections/pagerdurty.rst
@@ -42,4 +42,4 @@ Pagerduty Routing key (Integration key)
.. note::
The Pagerduty Routing key is deprecated.
- Please use the :ref:`PagerDutyEvents connection
<howto/connection:pagerduty-events>` instead.
+ Please use the :ref:`PagerDutyEvents connection
<howto/connection:pagerduty_events>` instead.
diff --git a/providers/pagerduty/docs/connections/pagerduty-events.rst
b/providers/pagerduty/docs/connections/pagerduty-events.rst
index f00e79cb75d..fa0dccfa9a0 100644
--- a/providers/pagerduty/docs/connections/pagerduty-events.rst
+++ b/providers/pagerduty/docs/connections/pagerduty-events.rst
@@ -17,7 +17,7 @@
-.. _howto/connection:pagerduty-events:
+.. _howto/connection:pagerduty_events:
PagerDuty Events Connection
===========================
diff --git a/providers/presto/docs/connections.rst
b/providers/presto/docs/connections.rst
index f0e3d878734..03aa142dfea 100644
--- a/providers/presto/docs/connections.rst
+++ b/providers/presto/docs/connections.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:presto:
+
Presto Connection
=================
diff --git a/providers/redis/docs/connections.rst
b/providers/redis/docs/connections.rst
index 330447023f6..35fc33e3c31 100644
--- a/providers/redis/docs/connections.rst
+++ b/providers/redis/docs/connections.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:redis:
+
Redis Connection
================
diff --git a/providers/salesforce/docs/connections/salesforce.rst
b/providers/salesforce/docs/connections/salesforce.rst
index 5c5bb5e47c5..396eeac1f58 100644
--- a/providers/salesforce/docs/connections/salesforce.rst
+++ b/providers/salesforce/docs/connections/salesforce.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-.. _howto/connection:SalesforceHook:
+.. _howto/connection:salesforce:
Salesforce Connection
=====================
diff --git
a/providers/salesforce/src/airflow/providers/salesforce/operators/bulk.py
b/providers/salesforce/src/airflow/providers/salesforce/operators/bulk.py
index 041492f391a..f2bb5007fc7 100644
--- a/providers/salesforce/src/airflow/providers/salesforce/operators/bulk.py
+++ b/providers/salesforce/src/airflow/providers/salesforce/operators/bulk.py
@@ -45,7 +45,7 @@ class SalesforceBulkOperator(BaseOperator):
:param external_id_field: unique identifier field for upsert operations
:param batch_size: number of records to assign for each batch in the job
:param use_serial: Process batches in serial mode
- :param salesforce_conn_id: The :ref:`Salesforce Connection id
<howto/connection:SalesforceHook>`.
+ :param salesforce_conn_id: The :ref:`Salesforce Connection id
<howto/connection:salesforce>`.
"""
available_operations = ("insert", "update", "upsert", "delete",
"hard_delete")
diff --git
a/providers/salesforce/src/airflow/providers/salesforce/operators/salesforce_apex_rest.py
b/providers/salesforce/src/airflow/providers/salesforce/operators/salesforce_apex_rest.py
index 64e8c2d2e56..dc9e9490301 100644
---
a/providers/salesforce/src/airflow/providers/salesforce/operators/salesforce_apex_rest.py
+++
b/providers/salesforce/src/airflow/providers/salesforce/operators/salesforce_apex_rest.py
@@ -36,7 +36,7 @@ class SalesforceApexRestOperator(BaseOperator):
:param endpoint: The REST endpoint for the request.
:param method: HTTP method for the request (default GET)
:param payload: A dict of parameters to send in a POST / PUT request
- :param salesforce_conn_id: The :ref:`Salesforce Connection id
<howto/connection:SalesforceHook>`.
+ :param salesforce_conn_id: The :ref:`Salesforce Connection id
<howto/connection:salesforce>`.
"""
def __init__(
diff --git a/providers/samba/docs/connections.rst
b/providers/samba/docs/connections.rst
index 20661a525f0..403c432d8f7 100644
--- a/providers/samba/docs/connections.rst
+++ b/providers/samba/docs/connections.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:samba:
+
Samba Connection
=================
diff --git a/providers/slack/docs/connections/slack-incoming-webhook.rst
b/providers/slack/docs/connections/slack-incoming-webhook.rst
index 6017862721d..d325741c91b 100644
--- a/providers/slack/docs/connections/slack-incoming-webhook.rst
+++ b/providers/slack/docs/connections/slack-incoming-webhook.rst
@@ -16,7 +16,7 @@
under the License.
-.. _howto/connection:slack-incoming-webhook:
+.. _howto/connection:slackwebhook:
Slack Incoming Webhook Connection
=================================
diff --git a/providers/slack/src/airflow/providers/slack/hooks/slack_webhook.py
b/providers/slack/src/airflow/providers/slack/hooks/slack_webhook.py
index 7b86be93b0e..fa4814875c6 100644
--- a/providers/slack/src/airflow/providers/slack/hooks/slack_webhook.py
+++ b/providers/slack/src/airflow/providers/slack/hooks/slack_webhook.py
@@ -76,7 +76,7 @@ class SlackWebhookHook(BaseHook):
This hook allows you to post messages to Slack by using Incoming Webhooks.
.. seealso::
- - :ref:`Slack Incoming Webhook connection
<howto/connection:slack-incoming-webhook>`
+ - :ref:`Slack Incoming Webhook connection
<howto/connection:slackwebhook>`
- https://api.slack.com/messaging/webhooks
- https://slack.dev/python-slack-sdk/webhook/index.html
diff --git a/providers/sqlite/docs/connections/sqlite.rst
b/providers/sqlite/docs/connections/sqlite.rst
index b7c655a88eb..5424bf0c183 100644
--- a/providers/sqlite/docs/connections/sqlite.rst
+++ b/providers/sqlite/docs/connections/sqlite.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-
+.. _howto/connection:sqlite:
SQLite Connection
=================
diff --git
a/providers/teradata/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py
b/providers/teradata/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py
index 2393a883195..f6d474513c3 100644
---
a/providers/teradata/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py
+++
b/providers/teradata/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py
@@ -54,7 +54,7 @@ class AzureBlobStorageToTeradataOperator(BaseOperator):
:param azure_conn_id: The Airflow WASB connection used for azure blob
credentials.
:param teradata_table: The name of the teradata table to which the data is
transferred.(templated)
:param teradata_conn_id: The connection ID used to connect to Teradata
- :ref:`Teradata connection <howto/connection:Teradata>`
+ :ref:`Teradata connection <howto/connection:teradata>`
:param teradata_authorization_name: The name of Teradata Authorization
Database Object,
is used to control who can access an Azure Blob object store.
Refer to
diff --git
a/providers/teradata/src/airflow/providers/teradata/transfers/s3_to_teradata.py
b/providers/teradata/src/airflow/providers/teradata/transfers/s3_to_teradata.py
index 51f3643c528..36bac281fdf 100644
---
a/providers/teradata/src/airflow/providers/teradata/transfers/s3_to_teradata.py
+++
b/providers/teradata/src/airflow/providers/teradata/transfers/s3_to_teradata.py
@@ -52,7 +52,7 @@ class S3ToTeradataOperator(BaseOperator):
:param teradata_table: The name of the teradata table to which the data is
transferred.(templated)
:param aws_conn_id: The Airflow AWS connection used for AWS credentials.
:param teradata_conn_id: The connection ID used to connect to Teradata
- :ref:`Teradata connection <howto/connection:Teradata>`.
+ :ref:`Teradata connection <howto/connection:teradata>`.
:param teradata_authorization_name: The name of Teradata Authorization
Database Object,
is used to control who can access an S3 object store.
Refer to
diff --git
a/providers/teradata/src/airflow/providers/teradata/transfers/teradata_to_teradata.py
b/providers/teradata/src/airflow/providers/teradata/transfers/teradata_to_teradata.py
index f36d24432ab..126146cae36 100644
---
a/providers/teradata/src/airflow/providers/teradata/transfers/teradata_to_teradata.py
+++
b/providers/teradata/src/airflow/providers/teradata/transfers/teradata_to_teradata.py
@@ -38,7 +38,7 @@ class TeradataToTeradataOperator(BaseOperator):
:param dest_teradata_conn_id: destination Teradata connection.
:param destination_table: destination table to insert rows.
- :param source_teradata_conn_id: :ref:`Source Teradata connection
<howto/connection:Teradata>`.
+ :param source_teradata_conn_id: :ref:`Source Teradata connection
<howto/connection:teradata>`.
:param sql: SQL query to execute against the source Teradata database
:param sql_params: Parameters to use in sql query.
:param rows_chunk: number of rows per chunk to commit.
diff --git a/providers/trino/docs/connections.rst
b/providers/trino/docs/connections.rst
index 32e0abe360d..677b504d2d6 100644
--- a/providers/trino/docs/connections.rst
+++ b/providers/trino/docs/connections.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:trino:
+
Apache Trino Connection
=======================
diff --git a/providers/yandex/docs/connections/yandexcloud.rst
b/providers/yandex/docs/connections/yandexcloud.rst
index b1d8b4074c2..1f7292815c5 100644
--- a/providers/yandex/docs/connections/yandexcloud.rst
+++ b/providers/yandex/docs/connections/yandexcloud.rst
@@ -15,7 +15,7 @@
specific language governing permissions and limitations
under the License.
-.. _yandex_cloud_connection:
+.. _howto/connection:yandexcloud:
Yandex.Cloud Connection
=======================
diff --git
a/providers/yandex/docs/secrets-backends/yandex-cloud-lockbox-secret-backend.rst
b/providers/yandex/docs/secrets-backends/yandex-cloud-lockbox-secret-backend.rst
index f30346b24da..f0baea493f1 100644
---
a/providers/yandex/docs/secrets-backends/yandex-cloud-lockbox-secret-backend.rst
+++
b/providers/yandex/docs/secrets-backends/yandex-cloud-lockbox-secret-backend.rst
@@ -143,7 +143,7 @@ Alternatively, you can specify the path to the JSON file in
``backend_kwargs``:
Using Yandex Cloud connection for authorization
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-First, you need to create :ref:`Yandex Cloud connection
<yandex_cloud_connection>`.
+First, you need to create :ref:`Yandex Cloud connection
<howto/connection:yandexcloud>`.
Then, you need to specify the ``connection_id`` in ``backend_kwargs``:
diff --git a/providers/zendesk/docs/connections.rst
b/providers/zendesk/docs/connections.rst
index 8d2d586b30b..771c2430ff0 100644
--- a/providers/zendesk/docs/connections.rst
+++ b/providers/zendesk/docs/connections.rst
@@ -15,6 +15,8 @@
specific language governing permissions and limitations
under the License.
+.. _howto/connection:zendesk:
+
Zendesk Connection
=========================
diff --git a/scripts/ci/prek/check_connection_doc_labels.py
b/scripts/ci/prek/check_connection_doc_labels.py
new file mode 100755
index 00000000000..8dc16b346ea
--- /dev/null
+++ b/scripts/ci/prek/check_connection_doc_labels.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# /// script
+# requires-python = ">=3.10,<3.11"
+# dependencies = [
+# "pyyaml>=6.0.3",
+# "rich>=13.6.0",
+# ]
+# ///
+"""
+Ensure ``howto/connection:`` labels in RST docs stay consistent with
provider.yaml.
+
+Source of truth: ``connection-type`` values in each provider's
``provider.yaml``.
+
+Checks:
+ ORPHAN LABEL — an RST anchor ``.. _howto/connection:{X}:`` where X is not a
+ registered connection-type.
+ MULTIPLE LABELS — more than one top-level ``howto/connection:`` anchor per
file.
+ BROKEN REF — a ``:ref:`` to ``howto/connection:*`` in RST or Python with
no
+ matching anchor in any RST file.
+"""
+
+from __future__ import annotations
+
+import re
+import sys
+from pathlib import Path
+
+from rich.console import Console
+
+sys.path.insert(0, str(Path(__file__).parent.resolve()))
+
+from common_prek_utils import (
+ AIRFLOW_CORE_SOURCES_PATH,
+ AIRFLOW_PROVIDERS_ROOT_PATH,
+ AIRFLOW_ROOT_PATH,
+ get_all_provider_info_dicts,
+)
+
+console = Console(color_system="standard", width=200)
+
+KNOWN_EXCEPTIONS: set[str] = {
+ "pgvector",
+ "sql",
+}
+
+TOP_LEVEL_ANCHOR_RE =
re.compile(r"^\.\.\s+_howto/connection:([a-zA-Z0-9_-]+):\s*$", re.MULTILINE)
+ANY_ANCHOR_RE = re.compile(r"^\.\.\s+_(howto/connection:[^\s]+?):\s*$",
re.MULTILINE)
+REF_RE =
re.compile(r":ref:`(?:[^`]*<(howto/connection:[^>]+)>|(howto/connection:[^`]+))`")
+
+
+def collect_connection_types() -> set[str]:
+ conn_types: set[str] = set()
+ for _pid, info in get_all_provider_info_dicts().items():
+ for ct in info.get("connection-types", []):
+ conn_types.add(ct["connection-type"])
+ return conn_types
+
+
+def collect_rst_files() -> list[Path]:
+ rst_files: list[Path] = list(AIRFLOW_PROVIDERS_ROOT_PATH.rglob("*.rst"))
+ core_docs = AIRFLOW_ROOT_PATH / "airflow-core" / "docs"
+ if core_docs.is_dir():
+ rst_files.extend(core_docs.rglob("*.rst"))
+ return rst_files
+
+
+def collect_python_files() -> list[Path]:
+ py_files: list[Path] = list(AIRFLOW_PROVIDERS_ROOT_PATH.rglob("*.py"))
+ if AIRFLOW_CORE_SOURCES_PATH.is_dir():
+ py_files.extend(AIRFLOW_CORE_SOURCES_PATH.rglob("*.py"))
+ return py_files
+
+
+def main() -> int:
+ errors: list[str] = []
+ valid_conn_types = collect_connection_types() | KNOWN_EXCEPTIONS
+
+ rst_files = collect_rst_files()
+ all_anchors: set[str] = set()
+ top_level_per_file: dict[Path, list[str]] = {}
+
+ for rst_file in rst_files:
+ content = rst_file.read_text()
+ for full_label in ANY_ANCHOR_RE.findall(content):
+ all_anchors.add(full_label)
+ top_labels = TOP_LEVEL_ANCHOR_RE.findall(content)
+ if top_labels:
+ top_level_per_file[rst_file] = top_labels
+
+ for rst_file, labels in top_level_per_file.items():
+ rel = rst_file.relative_to(AIRFLOW_ROOT_PATH)
+ for label in labels:
+ if label not in valid_conn_types:
+ errors.append(
+ f"ORPHAN LABEL: {rel} — "
+ f"'howto/connection:{label}' does not match any
connection-type in provider.yaml"
+ )
+ if len(labels) > 1:
+ errors.append(
+ f"MULTIPLE LABELS: {rel} — "
+ f"found {len(labels)} top-level labels ({', '.join(labels)}),
expected at most 1"
+ )
+
+ for rst_file in rst_files:
+ content = rst_file.read_text()
+ for match in REF_RE.finditer(content):
+ target = match.group(1) or match.group(2)
+ if target not in all_anchors:
+ rel = rst_file.relative_to(AIRFLOW_ROOT_PATH)
+ errors.append(f"BROKEN REF: {rel} — :ref:`{target}` has no
matching anchor in any RST file")
+
+ for py_file in collect_python_files():
+ content = py_file.read_text()
+ for match in REF_RE.finditer(content):
+ target = match.group(1) or match.group(2)
+ if target not in all_anchors:
+ rel = py_file.relative_to(AIRFLOW_ROOT_PATH)
+ errors.append(f"BROKEN REF: {rel} — :ref:`{target}` has no
matching anchor in any RST file")
+
+ if errors:
+ console.print()
+ for error in errors:
+ console.print(f" [red]✗[/] {error}")
+ console.print()
+ console.print(f"[red]Connection doc label check failed with
{len(errors)} error(s).[/]")
+ return 1
+
+ console.print("[green]All connection doc labels and cross-references are
consistent.[/]")
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())