This is an automated email from the ASF dual-hosted git repository.

ephraimanierobi pushed a commit to branch v2-3-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit ba6795e3b57c4a801587533758bbd3eb002a8e8a
Author: Leah E. Cole <[email protected]>
AuthorDate: Thu May 5 11:26:14 2022 -0400

    Replace DummyOperator references in docs (#23502)
    
    (cherry picked from commit 69e361ccc08f9e72be5a6e39bfd5be8e4ee6387d)
---
 docs/apache-airflow/concepts/dags.rst              | 38 +++++++++++-----------
 docs/apache-airflow/howto/timetable.rst            |  4 +--
 docs/apache-airflow/lineage.rst                    |  4 +--
 .../logging-monitoring/callbacks.rst               |  8 ++---
 docs/apache-airflow/timezone.rst                   |  2 +-
 5 files changed, 28 insertions(+), 28 deletions(-)

diff --git a/docs/apache-airflow/concepts/dags.rst 
b/docs/apache-airflow/concepts/dags.rst
index 7e70cdecd7..222b3847b7 100644
--- a/docs/apache-airflow/concepts/dags.rst
+++ b/docs/apache-airflow/concepts/dags.rst
@@ -41,21 +41,21 @@ which will add the DAG to anything inside it implicitly::
         "my_dag_name", start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
         schedule_interval="@daily", catchup=False
     ) as dag:
-        op = DummyOperator(task_id="task")
+        op = EmptyOperator(task_id="task")
 
 Or, you can use a standard constructor, passing the dag into any
 operators you use::
 
     my_dag = DAG("my_dag_name", start_date=pendulum.datetime(2021, 1, 1, 
tz="UTC"),
                  schedule_interval="@daily", catchup=False)
-    op = DummyOperator(task_id="task", dag=my_dag)
+    op = EmptyOperator(task_id="task", dag=my_dag)
 
 Or, you can use the ``@dag`` decorator to :ref:`turn a function into a DAG 
generator <concepts:dag-decorator>`::
 
     @dag(start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
          schedule_interval="@daily", catchup=False)
     def generate_dag():
-        op = DummyOperator(task_id="task")
+        op = EmptyOperator(task_id="task")
 
     dag = generate_dag()
 
@@ -94,7 +94,7 @@ And if you want to chain together dependencies, you can use 
``chain``::
     chain(op1, op2, op3, op4)
 
     # You can also do it dynamically
-    chain(*[DummyOperator(task_id='op' + i) for i in range(1, 6)])
+    chain(*[EmptyOperator(task_id='op' + i) for i in range(1, 6)])
 
 Chain can also do *pairwise* dependencies for lists the same size (this is 
different to the *cross dependencies* done by ``cross_downstream``!)::
 
@@ -309,8 +309,8 @@ The ``BranchPythonOperator`` can also be used with XComs 
allowing branching cont
         dag=dag,
     )
 
-    continue_op = DummyOperator(task_id="continue_task", dag=dag)
-    stop_op = DummyOperator(task_id="stop_task", dag=dag)
+    continue_op = EmptyOperator(task_id="continue_task", dag=dag)
+    stop_op = EmptyOperator(task_id="stop_task", dag=dag)
 
     start_op >> branch_op >> [continue_op, stop_op]
 
@@ -403,7 +403,7 @@ You can also combine this with the 
:ref:`concepts:depends-on-past` functionality
         import pendulum
 
         from airflow.models import DAG
-        from airflow.operators.dummy import DummyOperator
+        from airflow.operators.empty import EmptyOperator
         from airflow.operators.python import BranchPythonOperator
 
         dag = DAG(
@@ -412,17 +412,17 @@ You can also combine this with the 
:ref:`concepts:depends-on-past` functionality
             start_date=pendulum.datetime(2019, 2, 28, tz="UTC"),
         )
 
-        run_this_first = DummyOperator(task_id="run_this_first", dag=dag)
+        run_this_first = EmptyOperator(task_id="run_this_first", dag=dag)
         branching = BranchPythonOperator(
             task_id="branching", dag=dag, python_callable=lambda: "branch_a"
         )
 
-        branch_a = DummyOperator(task_id="branch_a", dag=dag)
-        follow_branch_a = DummyOperator(task_id="follow_branch_a", dag=dag)
+        branch_a = EmptyOperator(task_id="branch_a", dag=dag)
+        follow_branch_a = EmptyOperator(task_id="follow_branch_a", dag=dag)
 
-        branch_false = DummyOperator(task_id="branch_false", dag=dag)
+        branch_false = EmptyOperator(task_id="branch_false", dag=dag)
 
-        join = DummyOperator(task_id="join", dag=dag)
+        join = EmptyOperator(task_id="join", dag=dag)
 
         run_this_first >> branching
         branching >> branch_a >> follow_branch_a >> join
@@ -446,12 +446,12 @@ For example, here is a DAG that uses a ``for`` loop to 
define some Tasks::
 
     with DAG("loop_example") as dag:
 
-        first = DummyOperator(task_id="first")
-        last = DummyOperator(task_id="last")
+        first = EmptyOperator(task_id="first")
+        last = EmptyOperator(task_id="last")
 
         options = ["branch_a", "branch_b", "branch_c", "branch_d"]
         for option in options:
-            t = DummyOperator(task_id=option)
+            t = EmptyOperator(task_id=option)
             first >> t >> last
 
 In general, we advise you to try and keep the *topology* (the layout) of your 
DAG tasks relatively stable; dynamic DAGs are usually better used for 
dynamically loading configuration options or changing operator options.
@@ -484,10 +484,10 @@ Unlike :ref:`concepts:subdags`, TaskGroups are purely a 
UI grouping concept. Tas
 Dependency relationships can be applied across all tasks in a TaskGroup with 
the ``>>`` and ``<<`` operators. For example, the following code puts ``task1`` 
and ``task2`` in TaskGroup ``group1`` and then puts both tasks upstream of 
``task3``::
 
     with TaskGroup("group1") as group1:
-        task1 = DummyOperator(task_id="task1")
-        task2 = DummyOperator(task_id="task2")
+        task1 = EmptyOperator(task_id="task1")
+        task2 = EmptyOperator(task_id="task2")
 
-    task3 = DummyOperator(task_id="task3")
+    task3 = EmptyOperator(task_id="task3")
 
     group1 >> task3
 
@@ -503,7 +503,7 @@ TaskGroup also supports ``default_args`` like DAG, it will 
overwrite the ``defau
         default_args={'retries': 1},
     ):
         with TaskGroup('group1', default_args={'retries': 3}):
-            task1 = DummyOperator(task_id='task1')
+            task1 = EmptyOperator(task_id='task1')
             task2 = BashOperator(task_id='task2', bash_command='echo Hello 
World!', retries=2)
             print(task1.retries) # 3
             print(task2.retries) # 2
diff --git a/docs/apache-airflow/howto/timetable.rst 
b/docs/apache-airflow/howto/timetable.rst
index 61dba35214..0748e137da 100644
--- a/docs/apache-airflow/howto/timetable.rst
+++ b/docs/apache-airflow/howto/timetable.rst
@@ -194,7 +194,7 @@ For reference, here's our plugin and DAG files in their 
entirety:
 
     from airflow import DAG
     from airflow.example_dags.plugins.workday import AfterWorkdayTimetable
-    from airflow.operators.dummy import DummyOperator
+    from airflow.operators.empty import EmptyOperator
 
 
     with DAG(
@@ -203,7 +203,7 @@ For reference, here's our plugin and DAG files in their 
entirety:
         timetable=AfterWorkdayTimetable(),
         tags=["example", "timetable"],
     ) as dag:
-        DummyOperator(task_id="run_this")
+        EmptyOperator(task_id="run_this")
 
 
 Parameterized Timetables
diff --git a/docs/apache-airflow/lineage.rst b/docs/apache-airflow/lineage.rst
index 3c8899c6f9..20adfb96fa 100644
--- a/docs/apache-airflow/lineage.rst
+++ b/docs/apache-airflow/lineage.rst
@@ -37,7 +37,7 @@ works.
     from airflow.lineage.entities import File
     from airflow.models import DAG
     from airflow.operators.bash import BashOperator
-    from airflow.operators.dummy import DummyOperator
+    from airflow.operators.empty import EmptyOperator
 
     FILE_CATEGORIES = ["CAT1", "CAT2", "CAT3"]
 
@@ -50,7 +50,7 @@ works.
     )
 
     f_final = File(url="/tmp/final")
-    run_this_last = DummyOperator(
+    run_this_last = EmptyOperator(
         task_id="run_this_last", dag=dag, inlets=AUTO, outlets=f_final
     )
 
diff --git a/docs/apache-airflow/logging-monitoring/callbacks.rst 
b/docs/apache-airflow/logging-monitoring/callbacks.rst
index 4b1b754581..da8c4a0b8c 100644
--- a/docs/apache-airflow/logging-monitoring/callbacks.rst
+++ b/docs/apache-airflow/logging-monitoring/callbacks.rst
@@ -56,7 +56,7 @@ In the following example, failures in any task call the 
``task_failure_alert`` f
     import pendulum
 
     from airflow import DAG
-    from airflow.operators.dummy import DummyOperator
+    from airflow.operators.empty import EmptyOperator
 
 
     def task_failure_alert(context):
@@ -78,7 +78,7 @@ In the following example, failures in any task call the 
``task_failure_alert`` f
         tags=["example"],
     ) as dag:
 
-        task1 = DummyOperator(task_id="task1")
-        task2 = DummyOperator(task_id="task2")
-        task3 = DummyOperator(task_id="task3", 
on_success_callback=dag_success_alert)
+        task1 = EmptyOperator(task_id="task1")
+        task2 = EmptyOperator(task_id="task2")
+        task3 = EmptyOperator(task_id="task3", 
on_success_callback=dag_success_alert)
         task1 >> task2 >> task3
diff --git a/docs/apache-airflow/timezone.rst b/docs/apache-airflow/timezone.rst
index 5df5a37fa2..d49d3bab5a 100644
--- a/docs/apache-airflow/timezone.rst
+++ b/docs/apache-airflow/timezone.rst
@@ -140,7 +140,7 @@ have limitations and we deliberately disallow using them in 
DAGs.
     import pendulum
 
     dag = DAG("my_tz_dag", start_date=pendulum.datetime(2016, 1, 1, 
tz="Europe/Amsterdam"))
-    op = DummyOperator(task_id="dummy", dag=dag)
+    op = EmptyOperator(task_id="empty", dag=dag)
     print(dag.timezone)  # <Timezone [Europe/Amsterdam]>
 
 Please note that while it is possible to set a ``start_date`` and ``end_date``

Reply via email to