ferruzzi commented on a change in pull request #22311:
URL: https://github.com/apache/airflow/pull/22311#discussion_r829287507



##########
File path: scripts/ci/pre_commit/pre_commit_check_watcher_in_examples.py
##########
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import sys
+from pathlib import Path
+from typing import List
+
+from rich.console import Console
+
+if __name__ not in ("__main__", "__mp_main__"):
+    raise SystemExit(
+        "This file is intended to be executed as an executable program. You 
cannot use it as a module."
+        f"To run this script, run the ./{__file__} command [FILE] ..."
+    )
+
+
+console = Console(color_system="standard", width=200)
+
+errors: List[str] = []
+
+WATCHER_APPEND_INSTRUCTION = "list(dag.tasks) >> watcher()"

Review comment:
       Apologies if I missed this in the email discussions.  If I am reading 
this and the code below correctly, then the `chain()` method of declaring the 
taskflow is not really supported anymore?
   
   Related, there is nothing more frustrating to me than a precommit failing 
because of something it could easily fix.  Could this append the ">> watcher()" 
if it is not found?

##########
File path: scripts/ci/pre_commit/pre_commit_check_watcher_in_examples.py
##########
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import sys
+from pathlib import Path
+from typing import List
+
+from rich.console import Console
+
+if __name__ not in ("__main__", "__mp_main__"):
+    raise SystemExit(
+        "This file is intended to be executed as an executable program. You 
cannot use it as a module."
+        f"To run this script, run the ./{__file__} command [FILE] ..."
+    )
+
+
+console = Console(color_system="standard", width=200)
+
+errors: List[str] = []
+
+WATCHER_APPEND_INSTRUCTION = "list(dag.tasks) >> watcher()"

Review comment:
       Apologies if I missed this in the email discussions.  If I am reading 
this and the code below correctly, then the `chain()` method of declaring the 
taskflow is not really supported anymore?

##########
File path: scripts/ci/pre_commit/pre_commit_check_watcher_in_examples.py
##########
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import sys
+from pathlib import Path
+from typing import List
+
+from rich.console import Console
+
+if __name__ not in ("__main__", "__mp_main__"):
+    raise SystemExit(
+        "This file is intended to be executed as an executable program. You 
cannot use it as a module."
+        f"To run this script, run the ./{__file__} command [FILE] ..."
+    )
+
+
+console = Console(color_system="standard", width=200)
+
+errors: List[str] = []
+
+WATCHER_APPEND_INSTRUCTION = "list(dag.tasks) >> watcher()"
+
+PYTEST_FUNCTION = """
+def test_run():
+    from airflow.utils.state import State
+
+    dag.clear(dag_run_state=State.NONE)
+    dag.run()
+"""
+
+
+def _check_file(file: Path):
+    content = file.read_text()
+    if "from tests.system.utils.watcher import watcher" in content:
+        index = content.find(WATCHER_APPEND_INSTRUCTION)
+        if index == -1:
+            errors.append(
+                f"[red]The example {file} imports tests.system.utils.watcher "
+                f"but does not use it properly![/]\n\n"
+                "[yellow]Make sure you have:[/]\n\n"
+                f"        {WATCHER_APPEND_INSTRUCTION}\n\n"
+                "[yellow]as last instruction in your example DAG.[/]\n"
+            )
+        else:
+            operator_leftshift_index = content.find("<<", index + 
len(WATCHER_APPEND_INSTRUCTION))
+            operator_rightshift_index = content.find(">>", index + 
len(WATCHER_APPEND_INSTRUCTION))
+            if operator_leftshift_index != -1 or operator_rightshift_index != 
-1:
+                errors.append(
+                    f"[red]In the  example {file} "
+                    f"watcher is not last instruction in your DAG (there are 
<< "
+                    f"or >> operators after it)![/]\n\n"
+                    "[yellow]Make sure you have:[/]\n"
+                    f"        {WATCHER_APPEND_INSTRUCTION}\n\n"
+                    "[yellow]as last instruction in your example DAG.[/]\n"
+                )
+        if PYTEST_FUNCTION not in content:
+            errors.append(
+                f"[yellow]The  example {file} missed the pytest function at 
the end.[/]\n\n"
+                "All example tests should have this function added:\n\n" + 
PYTEST_FUNCTION + "\n\n"
+                "[yellow]Automatically adding it now!\n"

Review comment:
       Thank you for this.  There is nothing more frustrating to me than a 
precommit failing because of something it could easily fix magically.

##########
File path: tests/system/providers/google/bigquery/example_bigquery_operations.py
##########
@@ -0,0 +1,106 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Example Airflow DAG for Google BigQuery service local file upload and external 
table creation.
+"""
+import os
+from datetime import datetime
+from pathlib import Path
+
+from airflow import models
+from airflow.providers.google.cloud.operators.bigquery import (
+    BigQueryCreateEmptyDatasetOperator,
+    BigQueryCreateExternalTableOperator,
+    BigQueryDeleteDatasetOperator,
+)
+from airflow.providers.google.cloud.operators.gcs import 
GCSCreateBucketOperator, GCSDeleteBucketOperator
+from airflow.providers.google.cloud.transfers.local_to_gcs import 
LocalFilesystemToGCSOperator
+from airflow.utils.trigger_rule import TriggerRule
+from tests.system.utils.watcher import watcher
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "bigquery_operations"
+
+DATASET_NAME = f"dataset_{DAG_ID}_{ENV_ID}"
+DATA_SAMPLE_GCS_BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
+DATA_SAMPLE_GCS_OBJECT_NAME = "bigquery/us-states/us-states.csv"
+CSV_FILE_LOCAL_PATH = str(Path(__file__).parent / "resources" / 
"us-states.csv")
+
+
+with models.DAG(
+    DAG_ID,
+    schedule_interval="@once",
+    start_date=datetime(2021, 1, 1),
+    catchup=False,
+    tags=["example", "bigquery"],
+) as dag:
+    create_bucket = GCSCreateBucketOperator(task_id="create_bucket", 
bucket_name=DATA_SAMPLE_GCS_BUCKET_NAME)
+
+    create_dataset = 
BigQueryCreateEmptyDatasetOperator(task_id="create_dataset", 
dataset_id=DATASET_NAME)
+
+    upload_file = LocalFilesystemToGCSOperator(
+        task_id="upload_file_to_bucket",
+        src=CSV_FILE_LOCAL_PATH,
+        dst=DATA_SAMPLE_GCS_OBJECT_NAME,
+        bucket=DATA_SAMPLE_GCS_BUCKET_NAME,
+    )
+
+    # [START howto_operator_bigquery_create_external_table]
+    create_external_table = BigQueryCreateExternalTableOperator(
+        task_id="create_external_table",
+        destination_project_dataset_table=f"{DATASET_NAME}.external_table",
+        bucket=DATA_SAMPLE_GCS_BUCKET_NAME,
+        source_objects=[DATA_SAMPLE_GCS_OBJECT_NAME],
+        schema_fields=[
+            {"name": "emp_name", "type": "STRING", "mode": "REQUIRED"},
+            {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"},
+        ],
+    )
+    # [END howto_operator_bigquery_create_external_table]
+
+    delete_dataset = BigQueryDeleteDatasetOperator(
+        task_id="delete_dataset",
+        dataset_id=DATASET_NAME,
+        delete_contents=True,
+        trigger_rule=TriggerRule.ALL_DONE,
+    )
+
+    delete_bucket = GCSDeleteBucketOperator(
+        task_id="delete_bucket", bucket_name=DATA_SAMPLE_GCS_BUCKET_NAME, 
trigger_rule=TriggerRule.ALL_DONE
+    )
+
+    (
+        # TEST SETUP
+        [create_bucket, create_dataset]
+        # TEST BODY
+        >> upload_file
+        >> create_external_table
+        # TEST TEARDOWN
+        >> delete_dataset
+        >> delete_bucket
+    )
+
+    list(dag.tasks) >> watcher()

Review comment:
       I know this was discussed int he email thread, but I'll raise the 
concern one last time because this example really drives it home, then I 
promise I'll drop it.  We are adding the exact same line of code to every 
Example DAG (now System Test), shouldn't that really be handled behind the 
scenes when the test DAG is being parsed instead of adding this copy/pasted 
line to literally hundreds of files?

##########
File path: scripts/ci/pre_commit/pre_commit_check_watcher_in_examples.py
##########
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import sys
+from pathlib import Path
+from typing import List
+
+from rich.console import Console
+
+if __name__ not in ("__main__", "__mp_main__"):
+    raise SystemExit(
+        "This file is intended to be executed as an executable program. You 
cannot use it as a module."
+        f"To run this script, run the ./{__file__} command [FILE] ..."
+    )
+
+
+console = Console(color_system="standard", width=200)
+
+errors: List[str] = []
+
+WATCHER_APPEND_INSTRUCTION = "list(dag.tasks) >> watcher()"

Review comment:
       ~Apologies if I missed this in the email discussions.  If I am reading 
this and the code below correctly, then the `chain()` method of declaring the 
taskflow is not really supported anymore?~
   
   Totally did miss it, very sorry about that.

##########
File path: tests/system/providers/google/bigquery/example_bigquery_operations.py
##########
@@ -0,0 +1,106 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Example Airflow DAG for Google BigQuery service local file upload and external 
table creation.
+"""
+import os
+from datetime import datetime
+from pathlib import Path
+
+from airflow import models
+from airflow.providers.google.cloud.operators.bigquery import (
+    BigQueryCreateEmptyDatasetOperator,
+    BigQueryCreateExternalTableOperator,
+    BigQueryDeleteDatasetOperator,
+)
+from airflow.providers.google.cloud.operators.gcs import 
GCSCreateBucketOperator, GCSDeleteBucketOperator
+from airflow.providers.google.cloud.transfers.local_to_gcs import 
LocalFilesystemToGCSOperator
+from airflow.utils.trigger_rule import TriggerRule
+from tests.system.utils.watcher import watcher
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "bigquery_operations"
+
+DATASET_NAME = f"dataset_{DAG_ID}_{ENV_ID}"
+DATA_SAMPLE_GCS_BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
+DATA_SAMPLE_GCS_OBJECT_NAME = "bigquery/us-states/us-states.csv"
+CSV_FILE_LOCAL_PATH = str(Path(__file__).parent / "resources" / 
"us-states.csv")
+
+
+with models.DAG(
+    DAG_ID,
+    schedule_interval="@once",
+    start_date=datetime(2021, 1, 1),
+    catchup=False,
+    tags=["example", "bigquery"],
+) as dag:
+    create_bucket = GCSCreateBucketOperator(task_id="create_bucket", 
bucket_name=DATA_SAMPLE_GCS_BUCKET_NAME)
+
+    create_dataset = 
BigQueryCreateEmptyDatasetOperator(task_id="create_dataset", 
dataset_id=DATASET_NAME)
+
+    upload_file = LocalFilesystemToGCSOperator(
+        task_id="upload_file_to_bucket",
+        src=CSV_FILE_LOCAL_PATH,
+        dst=DATA_SAMPLE_GCS_OBJECT_NAME,
+        bucket=DATA_SAMPLE_GCS_BUCKET_NAME,
+    )
+
+    # [START howto_operator_bigquery_create_external_table]
+    create_external_table = BigQueryCreateExternalTableOperator(
+        task_id="create_external_table",
+        destination_project_dataset_table=f"{DATASET_NAME}.external_table",
+        bucket=DATA_SAMPLE_GCS_BUCKET_NAME,
+        source_objects=[DATA_SAMPLE_GCS_OBJECT_NAME],
+        schema_fields=[
+            {"name": "emp_name", "type": "STRING", "mode": "REQUIRED"},
+            {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"},
+        ],
+    )
+    # [END howto_operator_bigquery_create_external_table]
+
+    delete_dataset = BigQueryDeleteDatasetOperator(
+        task_id="delete_dataset",
+        dataset_id=DATASET_NAME,
+        delete_contents=True,
+        trigger_rule=TriggerRule.ALL_DONE,
+    )
+
+    delete_bucket = GCSDeleteBucketOperator(
+        task_id="delete_bucket", bucket_name=DATA_SAMPLE_GCS_BUCKET_NAME, 
trigger_rule=TriggerRule.ALL_DONE
+    )
+
+    (
+        # TEST SETUP
+        [create_bucket, create_dataset]
+        # TEST BODY
+        >> upload_file
+        >> create_external_table
+        # TEST TEARDOWN
+        >> delete_dataset
+        >> delete_bucket
+    )
+
+    list(dag.tasks) >> watcher()

Review comment:
       I know this was discussed in the email thread, but I'll raise the 
concern one last time because this example really drives it home, then I 
promise I'll drop it.  We are adding the exact same line of code to every 
Example DAG (now System Test), shouldn't that really be handled behind the 
scenes when the test DAG is being parsed instead of adding this copy/pasted 
line to literally hundreds of files?

##########
File path: tests/system/providers/google/bigquery/example_bigquery_operations.py
##########
@@ -0,0 +1,106 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Example Airflow DAG for Google BigQuery service local file upload and external 
table creation.
+"""
+import os
+from datetime import datetime
+from pathlib import Path
+
+from airflow import models
+from airflow.providers.google.cloud.operators.bigquery import (
+    BigQueryCreateEmptyDatasetOperator,
+    BigQueryCreateExternalTableOperator,
+    BigQueryDeleteDatasetOperator,
+)
+from airflow.providers.google.cloud.operators.gcs import 
GCSCreateBucketOperator, GCSDeleteBucketOperator
+from airflow.providers.google.cloud.transfers.local_to_gcs import 
LocalFilesystemToGCSOperator
+from airflow.utils.trigger_rule import TriggerRule
+from tests.system.utils.watcher import watcher
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "bigquery_operations"
+
+DATASET_NAME = f"dataset_{DAG_ID}_{ENV_ID}"
+DATA_SAMPLE_GCS_BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
+DATA_SAMPLE_GCS_OBJECT_NAME = "bigquery/us-states/us-states.csv"
+CSV_FILE_LOCAL_PATH = str(Path(__file__).parent / "resources" / 
"us-states.csv")
+
+
+with models.DAG(
+    DAG_ID,
+    schedule_interval="@once",
+    start_date=datetime(2021, 1, 1),
+    catchup=False,
+    tags=["example", "bigquery"],
+) as dag:
+    create_bucket = GCSCreateBucketOperator(task_id="create_bucket", 
bucket_name=DATA_SAMPLE_GCS_BUCKET_NAME)
+
+    create_dataset = 
BigQueryCreateEmptyDatasetOperator(task_id="create_dataset", 
dataset_id=DATASET_NAME)
+
+    upload_file = LocalFilesystemToGCSOperator(
+        task_id="upload_file_to_bucket",
+        src=CSV_FILE_LOCAL_PATH,
+        dst=DATA_SAMPLE_GCS_OBJECT_NAME,
+        bucket=DATA_SAMPLE_GCS_BUCKET_NAME,
+    )
+
+    # [START howto_operator_bigquery_create_external_table]
+    create_external_table = BigQueryCreateExternalTableOperator(
+        task_id="create_external_table",
+        destination_project_dataset_table=f"{DATASET_NAME}.external_table",
+        bucket=DATA_SAMPLE_GCS_BUCKET_NAME,
+        source_objects=[DATA_SAMPLE_GCS_OBJECT_NAME],
+        schema_fields=[
+            {"name": "emp_name", "type": "STRING", "mode": "REQUIRED"},
+            {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"},
+        ],
+    )
+    # [END howto_operator_bigquery_create_external_table]
+
+    delete_dataset = BigQueryDeleteDatasetOperator(
+        task_id="delete_dataset",
+        dataset_id=DATASET_NAME,
+        delete_contents=True,
+        trigger_rule=TriggerRule.ALL_DONE,
+    )
+
+    delete_bucket = GCSDeleteBucketOperator(
+        task_id="delete_bucket", bucket_name=DATA_SAMPLE_GCS_BUCKET_NAME, 
trigger_rule=TriggerRule.ALL_DONE
+    )
+
+    (
+        # TEST SETUP
+        [create_bucket, create_dataset]
+        # TEST BODY
+        >> upload_file
+        >> create_external_table
+        # TEST TEARDOWN
+        >> delete_dataset
+        >> delete_bucket
+    )
+
+    list(dag.tasks) >> watcher()

Review comment:
       > I am open to any specific suggestions
   
   I get that.  Unfortunately I only have half-baked suggestions.  the only 
thing I can really think of would be if we had something like 
`tests.system.DAG` which inherits `airflow.DAG` and adds that watcher to the 
task list, but that's only a half-baked idea and I haven't really thought it 
through all the way.  Writing the same line in hundreds of files just feels 
wrong, but without a more solid alternative, I'll stop beating that dead horse. 
 Thanks for the effort you've put into this, and I hope I'm not coming off as 
argumentative or anything.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to