xianhualiu commented on code in PR #32678: URL: https://github.com/apache/airflow/pull/32678#discussion_r1267245584
########## airflow/providers/google/cloud/operators/datapipeline.py: ########## @@ -0,0 +1,139 @@ + +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""This module contains Google DataPipeline operators.""" +from __future__ import annotations + +import copy +import re +import uuid +import warnings +from contextlib import ExitStack +from enum import Enum +from functools import cached_property +from typing import TYPE_CHECKING, Any, Sequence + +from airflow import AirflowException +from airflow.exceptions import AirflowProviderDeprecationWarning +from airflow.providers.google.cloud.hooks.datapipeline import ( + DEFAULT_DATAPIPELINE_LOCATION, + DataPipelineHook +) +from airflow.providers.google.cloud.hooks.gcs import GCSHook +from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator +from airflow.version import version + + +class CreateDataPipelineOperator(GoogleCloudBaseOperator): + """ + Creates a new Data Pipeline instance from the Data Pipeline API. + + :param body: The request body (contains instance of Pipeline). See: + https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines/create#request-body + :param project_id: The ID of the GCP project that owns the job. + :param location: The location to direct the Data Pipeline instance to (example_dags uses uscentral-1). + :param gcp_conn_id: The connection ID to connect to the Google Cloud + Platform. + + Returns the created Pipeline instance in JSON representation. + """ + def __init__( + self, + *, + body: dict, + project_id: str | None = None, + location: str = DEFAULT_DATAPIPELINE_LOCATION, + gcp_conn_id: str = "google_cloud_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + + self.body = body + self.project_id = project_id + self.location = location + self.gcp_conn_id = gcp_conn_id + self.datapipeline_hook : DataPipelineHook | None = None + self.body["pipelineSources"] = {"airflow":"airflow"} + + def execute(self, context: Context): + self.datapipeline_hook = DataPipelineHook( + gcp_conn_id=self.gcp_conn_id + ) + + self.data_pipeline = self.datapipeline_hook.create_data_pipeline( + project_id = self.project_id, + body = self.body, + location = self.location, + ) + self.log.info("Response Body: ", self.data_pipeline) + Review Comment: please add failure handle logic based on the response code. If the response contains "error", AirflowException needs to be raised with the "message" of the "error". -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
