aaltay commented on a change in pull request #12249:
URL: https://github.com/apache/airflow/pull/12249#discussion_r527165624
##########
File path: airflow/providers/google/cloud/sensors/dataflow.py
##########
@@ -188,3 +188,167 @@ def poke(self, context: dict) -> bool:
)
return self.callback(result["metrics"])
+
+
+class DataflowJobMessagesSensor(BaseSensorOperator):
+ """
+ Checks for the job message in Google Cloud Dataflow.
+
+ :param job_id: ID of the job to be checked.
+ :type job_id: str
+ :param callback: callback which is called with list of read job metrics
+ See:
+ https://cloud.google.com/dataflow/docs/reference/rest/v1b3/MetricUpdate
+ :type callback: callable
+ :param project_id: Optional, the Google Cloud project ID in which to start
a job.
+ If set to None or missing, the default project_id from the Google
Cloud connection is used.
+ :type project_id: str
+ :param location: Job location.
+ :type location: str
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
+ :type gcp_conn_id: str
+ :param delegate_to: The account to impersonate using domain-wide
delegation of authority,
+ if any. For this to work, the service account making the request must
have
+ domain-wide delegation enabled.
+ :type delegate_to: str
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :type impersonation_chain: Union[str, Sequence[str]]
+ """
+
+ template_fields = ['job_id']
+
+ @apply_defaults
+ def __init__(
+ self,
+ *,
+ job_id: str,
+ callback: Callable,
+ project_id: Optional[str] = None,
+ location: str = DEFAULT_DATAFLOW_LOCATION,
+ gcp_conn_id: str = 'google_cloud_default',
+ delegate_to: Optional[str] = None,
+ impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+ **kwargs,
+ ) -> None:
+ super().__init__(**kwargs)
+ self.job_id = job_id
+ self.project_id = project_id
+ self.callback = callback
+ self.location = location
+ self.gcp_conn_id = gcp_conn_id
+ self.delegate_to = delegate_to
+ self.impersonation_chain = impersonation_chain
+ self.hook: Optional[DataflowHook] = None
+
+ def poke(self, context: dict) -> bool:
+ self.hook = DataflowHook(
+ gcp_conn_id=self.gcp_conn_id,
+ delegate_to=self.delegate_to,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ job = self.hook.get_job(
+ job_id=self.job_id,
+ project_id=self.project_id,
+ location=self.location,
+ )
+ job_status = job["currentState"]
+ if job_status in DataflowJobStatus.TERMINAL_STATES:
+ raise AirflowException(f"Job with id '{self.job_id}' is already in
terminal state: {job_status}")
Review comment:
Would not this prevent for sensing message after a terminal state?
Presumably a user might want to get job messages and they can ask for it before
job reach a terminal state but if the job reaches to terminal state in between
this sensor will not work.
I have limited knowledge about how airflow users use sensors. So this might
be the right way.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]