pankajastro commented on code in PR #27833:
URL: https://github.com/apache/airflow/pull/27833#discussion_r1032833197
##########
airflow/providers/google/cloud/operators/bigquery_dts.py:
##########
@@ -257,7 +266,7 @@ def __init__(
self,
*,
transfer_config_id: str,
- project_id: str | None = None,
+ project_id,
Review Comment:
this will be a beaking change, no?
##########
airflow/providers/google/cloud/triggers/bigquery_dts.py:
##########
@@ -0,0 +1,156 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import asyncio
+from typing import Any, AsyncIterator, Sequence
+
+from google.cloud.bigquery_datatransfer_v1 import TransferRun, TransferState
+
+from airflow.providers.google.cloud.hooks.bigquery_dts import
AsyncBiqQueryDataTransferServiceHook
+from airflow.triggers.base import BaseTrigger, TriggerEvent
+
+
+class BigQueryDataTransferRunTrigger(BaseTrigger):
+ """Triggers class to watch the Transfer Run state to define when the job
is done.
+ :param project_id: The BigQuery project id where the transfer
configuration should be
+ :param config_id: ID of the config of the Transfer Run which should be
watched.
+ :param run_id: ID of the Transfer Run which should be watched.
+ :param poll_interval: Optional. Interval which defines how often triggers
check status of the job.
+ :param gcp_conn_id: The connection ID used to connect to Google Cloud.
+ :param delegate_to: The account to impersonate using domain-wide
delegation of authority,
+ if any. For this to work, the service account making the request must have
+ domain-wide delegation enabled.
+ :param location: BigQuery Transfer Service location for regional transfers.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the access_token
+ of the last account in the list, which will be impersonated in the request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding identity,
with first
+ account from the list granting this role to the originating account
(templated).
+ """
+
+ def __init__(
+ self,
+ project_id: str,
+ config_id: str,
+ run_id: str,
+ poll_interval: int = 10,
+ gcp_conn_id: str = "google_cloud_default",
+ delegate_to: str | None = None,
+ location: str | None = None,
+ impersonation_chain: str | Sequence[str] | None = None,
+ ):
+ super().__init__()
+ self.project_id = project_id
+ self.config_id = config_id
+ self.run_id = run_id
+ self.poll_interval = poll_interval
+ self.gcp_conn_id = gcp_conn_id
+ self.delegate_to = delegate_to
+ self.location = location
+ self.impersonation_chain = impersonation_chain
+
+ def serialize(self) -> tuple[str, dict[str, Any]]:
+ """Serializes class arguments and classpath."""
+ return (
+
"airflow.providers.google.cloud.triggers.bigquery_dts.BigQueryDataTransferRunTrigger",
+ {
+ "project_id": self.project_id,
+ "config_id": self.config_id,
+ "run_id": self.run_id,
+ "poll_interval": self.poll_interval,
+ "gcp_conn_id": self.gcp_conn_id,
+ "delegate_to": self.delegate_to,
+ "location": self.location,
+ "impersonation_chain": self.impersonation_chain,
+ },
+ )
+
+ async def run(self) -> AsyncIterator[TriggerEvent]:
+ """
+ Get Transfer Run status and if it one of the statuses which mean end
of the job
+ then yield TriggerEvent object.
+ """
+ hook = self._get_async_hook()
+ while True:
+ try:
+ transfer_run: TransferRun = await hook.get_transfer_run(
+ project_id=self.project_id,
+ config_id=self.config_id,
+ run_id=self.run_id,
+ )
+ state = transfer_run.state
+ self.log.info("Current state is %s", state)
+
+ if state == TransferState.SUCCEEDED:
+ self.log.info("Job has completed it's work.")
+ yield TriggerEvent(
+ {
+ "status": "success",
+ "run_id": self.run_id,
+ "message": "Job completed",
+ }
+ )
+ return
+
+ elif state == TransferState.FAILED:
+ self.log.info("Job has failed")
+ yield TriggerEvent(
+ {
+ "status": "failed",
+ "run_id": self.run_id,
+ "message": "Job has failed",
+ }
+ )
+ return
+
+ if state == TransferState.CANCELLED:
+ self.log.info("Job has been cancelled.")
+ yield TriggerEvent(
+ {
+ "status": "cancelled",
+ "run_id": self.run_id,
+ "message": "Job was cancelled",
+ }
+ )
+ return
+
+ else:
+ self.log.info("Job is still working...")
+ self.log.info("Waiting for %s seconds", self.poll_interval)
+ await asyncio.sleep(self.poll_interval)
+
+ except Exception as e:
+ yield TriggerEvent(
+ {
+ "status": "failed",
+ "message": f"Trigger failed with exception: {str(e)}",
+ }
+ )
+ return
Review Comment:
```suggestion
```
##########
airflow/providers/google/cloud/triggers/bigquery_dts.py:
##########
@@ -0,0 +1,156 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import asyncio
+from typing import Any, AsyncIterator, Sequence
+
+from google.cloud.bigquery_datatransfer_v1 import TransferRun, TransferState
+
+from airflow.providers.google.cloud.hooks.bigquery_dts import
AsyncBiqQueryDataTransferServiceHook
+from airflow.triggers.base import BaseTrigger, TriggerEvent
+
+
+class BigQueryDataTransferRunTrigger(BaseTrigger):
+ """Triggers class to watch the Transfer Run state to define when the job
is done.
+ :param project_id: The BigQuery project id where the transfer
configuration should be
+ :param config_id: ID of the config of the Transfer Run which should be
watched.
+ :param run_id: ID of the Transfer Run which should be watched.
+ :param poll_interval: Optional. Interval which defines how often triggers
check status of the job.
+ :param gcp_conn_id: The connection ID used to connect to Google Cloud.
+ :param delegate_to: The account to impersonate using domain-wide
delegation of authority,
+ if any. For this to work, the service account making the request must have
+ domain-wide delegation enabled.
+ :param location: BigQuery Transfer Service location for regional transfers.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the access_token
+ of the last account in the list, which will be impersonated in the request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding identity,
with first
+ account from the list granting this role to the originating account
(templated).
+ """
+
+ def __init__(
+ self,
+ project_id: str,
+ config_id: str,
+ run_id: str,
+ poll_interval: int = 10,
+ gcp_conn_id: str = "google_cloud_default",
+ delegate_to: str | None = None,
+ location: str | None = None,
+ impersonation_chain: str | Sequence[str] | None = None,
+ ):
+ super().__init__()
+ self.project_id = project_id
+ self.config_id = config_id
+ self.run_id = run_id
+ self.poll_interval = poll_interval
+ self.gcp_conn_id = gcp_conn_id
+ self.delegate_to = delegate_to
+ self.location = location
+ self.impersonation_chain = impersonation_chain
+
+ def serialize(self) -> tuple[str, dict[str, Any]]:
+ """Serializes class arguments and classpath."""
+ return (
+
"airflow.providers.google.cloud.triggers.bigquery_dts.BigQueryDataTransferRunTrigger",
+ {
+ "project_id": self.project_id,
+ "config_id": self.config_id,
+ "run_id": self.run_id,
+ "poll_interval": self.poll_interval,
+ "gcp_conn_id": self.gcp_conn_id,
+ "delegate_to": self.delegate_to,
+ "location": self.location,
+ "impersonation_chain": self.impersonation_chain,
+ },
+ )
+
+ async def run(self) -> AsyncIterator[TriggerEvent]:
+ """
+ Get Transfer Run status and if it one of the statuses which mean end
of the job
+ then yield TriggerEvent object.
+ """
+ hook = self._get_async_hook()
+ while True:
+ try:
+ transfer_run: TransferRun = await hook.get_transfer_run(
+ project_id=self.project_id,
+ config_id=self.config_id,
+ run_id=self.run_id,
+ )
+ state = transfer_run.state
+ self.log.info("Current state is %s", state)
+
+ if state == TransferState.SUCCEEDED:
+ self.log.info("Job has completed it's work.")
+ yield TriggerEvent(
+ {
+ "status": "success",
+ "run_id": self.run_id,
+ "message": "Job completed",
+ }
+ )
+ return
+
+ elif state == TransferState.FAILED:
+ self.log.info("Job has failed")
+ yield TriggerEvent(
+ {
+ "status": "failed",
+ "run_id": self.run_id,
+ "message": "Job has failed",
+ }
+ )
+ return
+
+ if state == TransferState.CANCELLED:
+ self.log.info("Job has been cancelled.")
+ yield TriggerEvent(
+ {
+ "status": "cancelled",
+ "run_id": self.run_id,
+ "message": "Job was cancelled",
+ }
+ )
+ return
Review Comment:
```suggestion
```
##########
airflow/providers/google/cloud/triggers/bigquery_dts.py:
##########
@@ -0,0 +1,156 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import asyncio
+from typing import Any, AsyncIterator, Sequence
+
+from google.cloud.bigquery_datatransfer_v1 import TransferRun, TransferState
+
+from airflow.providers.google.cloud.hooks.bigquery_dts import
AsyncBiqQueryDataTransferServiceHook
+from airflow.triggers.base import BaseTrigger, TriggerEvent
+
+
+class BigQueryDataTransferRunTrigger(BaseTrigger):
+ """Triggers class to watch the Transfer Run state to define when the job
is done.
+ :param project_id: The BigQuery project id where the transfer
configuration should be
+ :param config_id: ID of the config of the Transfer Run which should be
watched.
+ :param run_id: ID of the Transfer Run which should be watched.
+ :param poll_interval: Optional. Interval which defines how often triggers
check status of the job.
+ :param gcp_conn_id: The connection ID used to connect to Google Cloud.
+ :param delegate_to: The account to impersonate using domain-wide
delegation of authority,
+ if any. For this to work, the service account making the request must have
+ domain-wide delegation enabled.
+ :param location: BigQuery Transfer Service location for regional transfers.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the access_token
+ of the last account in the list, which will be impersonated in the request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding identity,
with first
+ account from the list granting this role to the originating account
(templated).
+ """
+
+ def __init__(
+ self,
+ project_id: str,
+ config_id: str,
+ run_id: str,
+ poll_interval: int = 10,
+ gcp_conn_id: str = "google_cloud_default",
+ delegate_to: str | None = None,
+ location: str | None = None,
+ impersonation_chain: str | Sequence[str] | None = None,
+ ):
+ super().__init__()
+ self.project_id = project_id
+ self.config_id = config_id
+ self.run_id = run_id
+ self.poll_interval = poll_interval
+ self.gcp_conn_id = gcp_conn_id
+ self.delegate_to = delegate_to
+ self.location = location
+ self.impersonation_chain = impersonation_chain
+
+ def serialize(self) -> tuple[str, dict[str, Any]]:
+ """Serializes class arguments and classpath."""
+ return (
+
"airflow.providers.google.cloud.triggers.bigquery_dts.BigQueryDataTransferRunTrigger",
+ {
+ "project_id": self.project_id,
+ "config_id": self.config_id,
+ "run_id": self.run_id,
+ "poll_interval": self.poll_interval,
+ "gcp_conn_id": self.gcp_conn_id,
+ "delegate_to": self.delegate_to,
+ "location": self.location,
+ "impersonation_chain": self.impersonation_chain,
+ },
+ )
+
+ async def run(self) -> AsyncIterator[TriggerEvent]:
+ """
+ Get Transfer Run status and if it one of the statuses which mean end
of the job
+ then yield TriggerEvent object.
+ """
+ hook = self._get_async_hook()
+ while True:
+ try:
+ transfer_run: TransferRun = await hook.get_transfer_run(
+ project_id=self.project_id,
+ config_id=self.config_id,
+ run_id=self.run_id,
+ )
+ state = transfer_run.state
+ self.log.info("Current state is %s", state)
+
+ if state == TransferState.SUCCEEDED:
+ self.log.info("Job has completed it's work.")
+ yield TriggerEvent(
+ {
+ "status": "success",
+ "run_id": self.run_id,
+ "message": "Job completed",
+ }
+ )
+ return
Review Comment:
return statement here should not require since you are yielding
##########
airflow/providers/google/cloud/triggers/bigquery_dts.py:
##########
@@ -0,0 +1,156 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import asyncio
+from typing import Any, AsyncIterator, Sequence
+
+from google.cloud.bigquery_datatransfer_v1 import TransferRun, TransferState
+
+from airflow.providers.google.cloud.hooks.bigquery_dts import
AsyncBiqQueryDataTransferServiceHook
+from airflow.triggers.base import BaseTrigger, TriggerEvent
+
+
+class BigQueryDataTransferRunTrigger(BaseTrigger):
+ """Triggers class to watch the Transfer Run state to define when the job
is done.
+ :param project_id: The BigQuery project id where the transfer
configuration should be
+ :param config_id: ID of the config of the Transfer Run which should be
watched.
+ :param run_id: ID of the Transfer Run which should be watched.
+ :param poll_interval: Optional. Interval which defines how often triggers
check status of the job.
+ :param gcp_conn_id: The connection ID used to connect to Google Cloud.
+ :param delegate_to: The account to impersonate using domain-wide
delegation of authority,
+ if any. For this to work, the service account making the request must have
+ domain-wide delegation enabled.
+ :param location: BigQuery Transfer Service location for regional transfers.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the access_token
+ of the last account in the list, which will be impersonated in the request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding identity,
with first
+ account from the list granting this role to the originating account
(templated).
+ """
+
+ def __init__(
+ self,
+ project_id: str,
+ config_id: str,
+ run_id: str,
+ poll_interval: int = 10,
+ gcp_conn_id: str = "google_cloud_default",
+ delegate_to: str | None = None,
+ location: str | None = None,
+ impersonation_chain: str | Sequence[str] | None = None,
+ ):
+ super().__init__()
+ self.project_id = project_id
+ self.config_id = config_id
+ self.run_id = run_id
+ self.poll_interval = poll_interval
+ self.gcp_conn_id = gcp_conn_id
+ self.delegate_to = delegate_to
+ self.location = location
+ self.impersonation_chain = impersonation_chain
+
+ def serialize(self) -> tuple[str, dict[str, Any]]:
+ """Serializes class arguments and classpath."""
+ return (
+
"airflow.providers.google.cloud.triggers.bigquery_dts.BigQueryDataTransferRunTrigger",
+ {
+ "project_id": self.project_id,
+ "config_id": self.config_id,
+ "run_id": self.run_id,
+ "poll_interval": self.poll_interval,
+ "gcp_conn_id": self.gcp_conn_id,
+ "delegate_to": self.delegate_to,
+ "location": self.location,
+ "impersonation_chain": self.impersonation_chain,
+ },
+ )
+
+ async def run(self) -> AsyncIterator[TriggerEvent]:
+ """
+ Get Transfer Run status and if it one of the statuses which mean end
of the job
+ then yield TriggerEvent object.
+ """
+ hook = self._get_async_hook()
+ while True:
+ try:
+ transfer_run: TransferRun = await hook.get_transfer_run(
+ project_id=self.project_id,
+ config_id=self.config_id,
+ run_id=self.run_id,
+ )
+ state = transfer_run.state
+ self.log.info("Current state is %s", state)
+
+ if state == TransferState.SUCCEEDED:
+ self.log.info("Job has completed it's work.")
+ yield TriggerEvent(
+ {
+ "status": "success",
+ "run_id": self.run_id,
+ "message": "Job completed",
+ }
+ )
+ return
+
+ elif state == TransferState.FAILED:
+ self.log.info("Job has failed")
+ yield TriggerEvent(
+ {
+ "status": "failed",
+ "run_id": self.run_id,
+ "message": "Job has failed",
+ }
+ )
+ return
Review Comment:
```suggestion
```
##########
airflow/providers/google/cloud/triggers/bigquery_dts.py:
##########
@@ -0,0 +1,156 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import asyncio
+from typing import Any, AsyncIterator, Sequence
+
+from google.cloud.bigquery_datatransfer_v1 import TransferRun, TransferState
+
+from airflow.providers.google.cloud.hooks.bigquery_dts import
AsyncBiqQueryDataTransferServiceHook
+from airflow.triggers.base import BaseTrigger, TriggerEvent
+
+
+class BigQueryDataTransferRunTrigger(BaseTrigger):
+ """Triggers class to watch the Transfer Run state to define when the job
is done.
+ :param project_id: The BigQuery project id where the transfer
configuration should be
+ :param config_id: ID of the config of the Transfer Run which should be
watched.
+ :param run_id: ID of the Transfer Run which should be watched.
+ :param poll_interval: Optional. Interval which defines how often triggers
check status of the job.
+ :param gcp_conn_id: The connection ID used to connect to Google Cloud.
+ :param delegate_to: The account to impersonate using domain-wide
delegation of authority,
+ if any. For this to work, the service account making the request must have
+ domain-wide delegation enabled.
+ :param location: BigQuery Transfer Service location for regional transfers.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the access_token
+ of the last account in the list, which will be impersonated in the request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding identity,
with first
+ account from the list granting this role to the originating account
(templated).
+ """
+
+ def __init__(
+ self,
+ project_id: str,
+ config_id: str,
+ run_id: str,
+ poll_interval: int = 10,
+ gcp_conn_id: str = "google_cloud_default",
+ delegate_to: str | None = None,
+ location: str | None = None,
+ impersonation_chain: str | Sequence[str] | None = None,
+ ):
+ super().__init__()
+ self.project_id = project_id
+ self.config_id = config_id
+ self.run_id = run_id
+ self.poll_interval = poll_interval
+ self.gcp_conn_id = gcp_conn_id
+ self.delegate_to = delegate_to
+ self.location = location
+ self.impersonation_chain = impersonation_chain
+
+ def serialize(self) -> tuple[str, dict[str, Any]]:
+ """Serializes class arguments and classpath."""
+ return (
+
"airflow.providers.google.cloud.triggers.bigquery_dts.BigQueryDataTransferRunTrigger",
+ {
+ "project_id": self.project_id,
+ "config_id": self.config_id,
+ "run_id": self.run_id,
+ "poll_interval": self.poll_interval,
+ "gcp_conn_id": self.gcp_conn_id,
+ "delegate_to": self.delegate_to,
+ "location": self.location,
+ "impersonation_chain": self.impersonation_chain,
+ },
+ )
+
+ async def run(self) -> AsyncIterator[TriggerEvent]:
+ """
+ Get Transfer Run status and if it one of the statuses which mean end
of the job
+ then yield TriggerEvent object.
+ """
+ hook = self._get_async_hook()
+ while True:
+ try:
+ transfer_run: TransferRun = await hook.get_transfer_run(
+ project_id=self.project_id,
+ config_id=self.config_id,
+ run_id=self.run_id,
+ )
+ state = transfer_run.state
+ self.log.info("Current state is %s", state)
+
+ if state == TransferState.SUCCEEDED:
+ self.log.info("Job has completed it's work.")
+ yield TriggerEvent(
+ {
+ "status": "success",
+ "run_id": self.run_id,
+ "message": "Job completed",
+ }
+ )
+ return
Review Comment:
```suggestion
```
##########
airflow/providers/google/cloud/operators/bigquery_dts.py:
##########
@@ -307,5 +317,82 @@ def execute(self, context: Context):
result = StartManualTransferRunsResponse.to_dict(response)
run_id = get_object_id(result["runs"][0])
self.xcom_push(context, key="run_id", value=run_id)
- self.log.info("Transfer run %s submitted successfully.", run_id)
- return result
+
+ if not self.deferrable:
+ result = self._wait_for_transfer_to_be_done(
+ run_id=run_id,
+ transfer_config_id=transfer_config["config_id"],
+ )
+ self.log.info("Transfer run %s submitted successfully.", run_id)
+ return result
+
+ self.defer(
+ trigger=BigQueryDataTransferRunTrigger(
+ project_id=self.project_id,
+ config_id=transfer_config["config_id"],
+ run_id=run_id,
+ gcp_conn_id=self.gcp_conn_id,
+ location=self.location,
+ impersonation_chain=self.impersonation_chain,
+ ),
+ method_name="execute_completed",
+ )
+
+ def _get_hook(self) -> BiqQueryDataTransferServiceHook:
+ if self._hook is None:
+ self._hook = BiqQueryDataTransferServiceHook(
+ gcp_conn_id=self.gcp_conn_id,
+ impersonation_chain=self.impersonation_chain,
+ location=self.location,
+ )
+ return self._hook
+
+ def _wait_for_transfer_to_be_done(self, run_id: str, transfer_config_id:
str, interval: int = 10):
+ if interval < 0:
+ raise ValueError("Interval must be > 0")
+
+ while True:
+ transfer_run: TransferRun = self._get_hook().get_transfer_run(
+ run_id=run_id,
+ transfer_config_id=transfer_config_id,
+ project_id=self.project_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ state = transfer_run.state
+
+ if self._job_is_done(state):
+ if state == TransferState.FAILED or state ==
TransferState.CANCELLED:
+ raise AirflowException(f"Transfer run was finished with
{state} status.")
+
+ result = TransferRun.to_dict(transfer_run)
+ return result
+
+ self.log.info("Transfer run is still working, waiting for %s
seconds...", interval)
+ self.log.info("Transfer run status: %s", state)
+ time.sleep(interval)
+
+ @staticmethod
+ def _job_is_done(state: TransferState) -> bool:
+ finished_job_statuses = [
+ state.SUCCEEDED,
+ state.CANCELLED,
+ state.FAILED,
+ ]
+
+ return state in finished_job_statuses
+
+ def execute_completed(self, context: Context, event: dict):
+ """Method to be executed after invoked trigger in defer method
finishes its job."""
+ if event["status"] == "failed" or event["status"] == "cancelled":
Review Comment:
what if the trigger will return none event?
##########
airflow/providers/google/cloud/operators/bigquery_dts.py:
##########
@@ -307,5 +317,82 @@ def execute(self, context: Context):
result = StartManualTransferRunsResponse.to_dict(response)
run_id = get_object_id(result["runs"][0])
self.xcom_push(context, key="run_id", value=run_id)
- self.log.info("Transfer run %s submitted successfully.", run_id)
- return result
+
+ if not self.deferrable:
+ result = self._wait_for_transfer_to_be_done(
+ run_id=run_id,
+ transfer_config_id=transfer_config["config_id"],
+ )
+ self.log.info("Transfer run %s submitted successfully.", run_id)
+ return result
+
+ self.defer(
+ trigger=BigQueryDataTransferRunTrigger(
+ project_id=self.project_id,
+ config_id=transfer_config["config_id"],
+ run_id=run_id,
+ gcp_conn_id=self.gcp_conn_id,
+ location=self.location,
+ impersonation_chain=self.impersonation_chain,
+ ),
+ method_name="execute_completed",
+ )
+
+ def _get_hook(self) -> BiqQueryDataTransferServiceHook:
+ if self._hook is None:
+ self._hook = BiqQueryDataTransferServiceHook(
+ gcp_conn_id=self.gcp_conn_id,
+ impersonation_chain=self.impersonation_chain,
+ location=self.location,
+ )
+ return self._hook
+
+ def _wait_for_transfer_to_be_done(self, run_id: str, transfer_config_id:
str, interval: int = 10):
+ if interval < 0:
+ raise ValueError("Interval must be > 0")
+
+ while True:
+ transfer_run: TransferRun = self._get_hook().get_transfer_run(
+ run_id=run_id,
+ transfer_config_id=transfer_config_id,
+ project_id=self.project_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ state = transfer_run.state
+
+ if self._job_is_done(state):
+ if state == TransferState.FAILED or state ==
TransferState.CANCELLED:
+ raise AirflowException(f"Transfer run was finished with
{state} status.")
+
+ result = TransferRun.to_dict(transfer_run)
+ return result
+
+ self.log.info("Transfer run is still working, waiting for %s
seconds...", interval)
+ self.log.info("Transfer run status: %s", state)
+ time.sleep(interval)
+
+ @staticmethod
+ def _job_is_done(state: TransferState) -> bool:
+ finished_job_statuses = [
+ state.SUCCEEDED,
+ state.CANCELLED,
+ state.FAILED,
+ ]
+
+ return state in finished_job_statuses
+
+ def execute_completed(self, context: Context, event: dict):
+ """Method to be executed after invoked trigger in defer method
finishes its job."""
+ if event["status"] == "failed" or event["status"] == "cancelled":
+ self.log.error("Trigger finished its work with status: %s.",
event["status"])
+ raise AirflowException(event["message"])
+
+ self.log.info(
+ "%s finished with message: %s",
+ event["run_id"],
+ event["message"],
+ )
+
+ return event["run_id"]
Review Comment:
the sync version operator push `TransferRun` result in the xcom, I feel we
should do the same for async, WDYT?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]