This is an automated email from the ASF dual-hosted git repository.

shahar pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 0b6634e74b6 refactor vertex_ai image training system test and docs 
(#56036)
0b6634e74b6 is described below

commit 0b6634e74b6e62e77628843228dbe260ae4c0015
Author: olegkachur-e <[email protected]>
AuthorDate: Thu Sep 25 22:10:10 2025 +0000

    refactor vertex_ai image training system test and docs (#56036)
    
    - Move image training example, to avoid duplication.
    
    Co-authored-by: Oleg Kachur <[email protected]>
---
 .../google/docs/operators/cloud/vertex_ai.rst      |   2 +-
 .../example_vertex_ai_auto_ml_image_training.py    | 152 ---------------------
 .../cloud/vertex_ai/example_vertex_ai_endpoint.py  |   7 +-
 3 files changed, 6 insertions(+), 155 deletions(-)

diff --git a/providers/google/docs/operators/cloud/vertex_ai.rst 
b/providers/google/docs/operators/cloud/vertex_ai.rst
index 055a1bb2804..831d44454e8 100644
--- a/providers/google/docs/operators/cloud/vertex_ai.rst
+++ b/providers/google/docs/operators/cloud/vertex_ai.rst
@@ -239,7 +239,7 @@ How to run AutoML Image Training Job
 Before start running this Job you must prepare and create ``Image`` dataset. 
After that you should
 put dataset id to ``dataset_id`` parameter in operator.
 
-.. exampleinclude:: 
/../../google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py
+.. exampleinclude:: 
/../../google/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py
     :language: python
     :dedent: 4
     :start-after: [START 
how_to_cloud_vertex_ai_create_auto_ml_image_training_job_operator]
diff --git 
a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py
 
b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py
deleted file mode 100644
index 6f7b772559a..00000000000
--- 
a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py
+++ /dev/null
@@ -1,152 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-
-"""
-Example Airflow DAG for Google Vertex AI service testing Auto ML operations.
-"""
-
-from __future__ import annotations
-
-import os
-from datetime import datetime
-
-from google.cloud.aiplatform import schema
-from google.protobuf.struct_pb2 import Value
-
-from airflow.models.dag import DAG
-from airflow.providers.google.cloud.operators.vertex_ai.auto_ml import (
-    CreateAutoMLImageTrainingJobOperator,
-    DeleteAutoMLTrainingJobOperator,
-)
-from airflow.providers.google.cloud.operators.vertex_ai.dataset import (
-    CreateDatasetOperator,
-    DeleteDatasetOperator,
-    ImportDataOperator,
-)
-
-try:
-    from airflow.sdk import TriggerRule
-except ImportError:
-    # Compatibility for Airflow < 3.1
-    from airflow.utils.trigger_rule import TriggerRule  # type: 
ignore[no-redef,attr-defined]
-
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
-DAG_ID = "vertex_ai_auto_ml_operations"
-REGION = "us-central1"
-IMAGE_DISPLAY_NAME = f"auto-ml-image-{ENV_ID}"
-MODEL_DISPLAY_NAME = f"auto-ml-image-model-{ENV_ID}"
-
-RESOURCE_DATA_BUCKET = "airflow-system-tests-resources"
-
-IMAGE_DATASET = {
-    "display_name": f"image-dataset-{ENV_ID}",
-    "metadata_schema_uri": schema.dataset.metadata.image,
-    "metadata": Value(string_value="image-dataset"),
-}
-IMAGE_DATA_CONFIG = [
-    {
-        "import_schema_uri": 
schema.dataset.ioformat.image.single_label_classification,
-        "gcs_source": {"uris": 
[f"gs://{RESOURCE_DATA_BUCKET}/vertex-ai/datasets/flowers-dataset.csv"]},
-    },
-]
-
-
-with DAG(
-    f"{DAG_ID}_image_training_job",
-    schedule="@once",
-    start_date=datetime(2021, 1, 1),
-    catchup=False,
-    tags=["example", "vertex_ai", "auto_ml"],
-) as dag:
-    create_image_dataset = CreateDatasetOperator(
-        task_id="image_dataset",
-        dataset=IMAGE_DATASET,
-        region=REGION,
-        project_id=PROJECT_ID,
-    )
-    image_dataset_id = create_image_dataset.output["dataset_id"]
-
-    import_image_dataset = ImportDataOperator(
-        task_id="import_image_data",
-        dataset_id=image_dataset_id,
-        region=REGION,
-        project_id=PROJECT_ID,
-        import_configs=IMAGE_DATA_CONFIG,
-    )
-
-    # [START how_to_cloud_vertex_ai_create_auto_ml_image_training_job_operator]
-    create_auto_ml_image_training_job = CreateAutoMLImageTrainingJobOperator(
-        task_id="auto_ml_image_task",
-        display_name=IMAGE_DISPLAY_NAME,
-        dataset_id=image_dataset_id,
-        prediction_type="classification",
-        multi_label=False,
-        model_type="CLOUD",
-        training_fraction_split=0.6,
-        validation_fraction_split=0.2,
-        test_fraction_split=0.2,
-        budget_milli_node_hours=8000,
-        model_display_name=MODEL_DISPLAY_NAME,
-        disable_early_stopping=False,
-        region=REGION,
-        project_id=PROJECT_ID,
-    )
-    # [END how_to_cloud_vertex_ai_create_auto_ml_image_training_job_operator]
-
-    delete_auto_ml_image_training_job = DeleteAutoMLTrainingJobOperator(
-        task_id="delete_auto_ml_training_job",
-        training_pipeline_id="{{ 
task_instance.xcom_pull(task_ids='auto_ml_image_task', "
-        "key='training_id') }}",
-        region=REGION,
-        project_id=PROJECT_ID,
-        trigger_rule=TriggerRule.ALL_DONE,
-    )
-
-    delete_image_dataset = DeleteDatasetOperator(
-        task_id="delete_image_dataset",
-        dataset_id=image_dataset_id,
-        region=REGION,
-        project_id=PROJECT_ID,
-        trigger_rule=TriggerRule.ALL_DONE,
-    )
-
-    (
-        # TEST SETUP
-        create_image_dataset
-        >> import_image_dataset
-        # TEST BODY
-        >> create_auto_ml_image_training_job
-        # TEST TEARDOWN
-        >> delete_auto_ml_image_training_job
-        >> delete_image_dataset
-    )
-
-    # ### Everything below this line is not part of example ###
-    # ### Just for system tests purpose ###
-    from tests_common.test_utils.watcher import watcher
-
-    # This test needs watcher in order to properly mark success/failure
-    # when "tearDown" task with trigger rule is part of the DAG
-    list(dag.tasks) >> watcher()
-
-from tests_common.test_utils.system_tests import get_test_run  # noqa: E402
-
-# Needed to run the example DAG with pytest (see: 
tests/system/README.md#run_via_pytest)
-test_run = get_test_run(dag)
diff --git 
a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py
 
b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py
index ff3219efe4f..2d694dd3217 100644
--- 
a/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py
+++ 
b/providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py
@@ -82,10 +82,10 @@ ENDPOINT_CONF = {
 with DAG(
     DAG_ID,
     schedule="@once",
-    start_date=datetime(2021, 1, 1),
+    start_date=datetime(2025, 1, 1),
     catchup=False,
     render_template_as_native_obj=True,
-    tags=["example", "vertex_ai", "endpoint_service"],
+    tags=["example", "vertex_ai", "auto_ml", "image_classification", 
"endpoint_service"],
 ) as dag:
     create_image_dataset = CreateDatasetOperator(
         task_id="image_dataset",
@@ -103,6 +103,7 @@ with DAG(
         import_configs=IMAGE_DATA_CONFIG,
     )
 
+    # [START how_to_cloud_vertex_ai_create_auto_ml_image_training_job_operator]
     create_auto_ml_image_training_job = CreateAutoMLImageTrainingJobOperator(
         task_id="auto_ml_image_task",
         display_name=IMAGE_DISPLAY_NAME,
@@ -119,6 +120,8 @@ with DAG(
         region=REGION,
         project_id=PROJECT_ID,
     )
+    # [END how_to_cloud_vertex_ai_create_auto_ml_image_training_job_operator]
+
     DEPLOYED_MODEL = {
         # format: 'projects/{project}/locations/{location}/models/{model}'
         "model": "{{ti.xcom_pull('auto_ml_image_task')['name']}}",

Reply via email to