josh-fell commented on code in PR #36041:
URL: https://github.com/apache/airflow/pull/36041#discussion_r1416318396


##########
airflow/example_dags/example_python_operator.py:
##########
@@ -28,55 +28,59 @@
 
 import pendulum
 
-from airflow.decorators import task
 from airflow.models.dag import DAG
-from airflow.operators.python import ExternalPythonOperator, 
PythonVirtualenvOperator, is_venv_installed
+from airflow.operators.python import (
+    ExternalPythonOperator,
+    PythonOperator,
+    PythonVirtualenvOperator,
+    is_venv_installed,
+)
 
 log = logging.getLogger(__name__)
 
 PATH_TO_PYTHON_BINARY = sys.executable
 
 
-def x():
-    pass
-
-
 with DAG(
     dag_id="example_python_operator",
     schedule=None,
     start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
     catchup=False,
     tags=["example"],
-) as dag:
+):
     # [START howto_operator_python]
-    @task(task_id="print_the_context")
     def print_context(ds=None, **kwargs):
         """Print the Airflow context and ds variable from the context."""
         pprint(kwargs)
         print(ds)
         return "Whatever you return gets printed in the logs"
 
-    run_this = print_context()
+    run_this = PythonOperator(task_id="print_the_context", 
python_callable=print_context)
     # [END howto_operator_python]
 
     # [START howto_operator_python_render_sql]
-    @task(task_id="log_sql_query", templates_dict={"query": "sql/sample.sql"}, 
templates_exts=[".sql"])
     def log_sql(**kwargs):
         logging.info("Python task decorator query: %s", 
str(kwargs["templates_dict"]["query"]))
 
-    log_the_sql = log_sql()
+    log_the_sql = PythonOperator(
+        task_id="log_sql_query",
+        python_callable=log_sql,
+        templates_dict={"query": "sql/sample.sql"},
+        templates_exts=[".sql"],
+    )
     # [END howto_operator_python_render_sql]
 
     # [START howto_operator_python_kwargs]
     # Generate 5 sleeping tasks, sleeping from 0.0 to 0.4 seconds respectively
     for i in range(5):
 
-        @task(task_id=f"sleep_for_{i}")
         def my_sleeping_function(random_base):
             """This is a function that will run within the DAG execution"""
             time.sleep(random_base)

Review Comment:
   Related note, @uranusjr do you think it's worth enabling [flake8-bugbear in 
Ruff](https://docs.astral.sh/ruff/rules/#flake8-bugbear-b) to catch this and 
other related issues?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to