bkossakowska commented on code in PR #32256:
URL: https://github.com/apache/airflow/pull/32256#discussion_r1277649062


##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +613,851 @@ def execute(self, context: Context) -> None:
         DataplexLakeLink.persist(context=context, task_instance=self)
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Creates a DataScan resource.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan: Required. DataScan resource.
+    :param data_scan_id: Required. DataScan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: Dataplex data scan id
+    """
+
+    template_fields = ("project_id", "data_scan_id", "data_scan", 
"impersonation_chain")
+    template_fields_renderers = {"data_scan": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        data_scan: dict[str, Any],
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.data_scan = data_scan
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Creating Dataplex data scan %s", self.data_scan_id)
+        operation = hook.create_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            data_scan=self.data_scan,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        operation.result()
+        self.log.info("Dataplex scan data %s created successfully!", 
self.data_scan_id)
+        return self.data_scan_id
+
+
+class DataplexDeleteDataQualityOperator(GoogleCloudBaseOperator):
+    """
+    Deletes a DataScan resource.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: None
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context) -> None:
+
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Deleting Dataplex data scan job: %s", self.data_scan_id)
+
+        operation = hook.delete_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        operation.result()
+        self.log.info("Dataplex data_scan %s deleted successfully!", 
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Runs an on-demand execution of a DataScan.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :return: Dataplex data scan job id.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context) -> str:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        result = hook.run_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        job_id = result.job.name.split("/")[-1]
+        return job_id
+
+
+class DataplexExecuteDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Executes a Dataplex data quality scan.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param polling_interval_seconds: time in seconds between polling for job 
completion.
+        The value is considered only when running in deferrable mode. Must be 
greater than 0.
+    :param deferrable: Run operator in the deferrable mode.
+    :return: Dict representing Data scan job.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        failure_mode: bool = False,
+        wait_timeout: int | None = None,
+        deferrable: bool = conf.getboolean("operators", "default_deferrable", 
fallback=False),
+        polling_interval_seconds: int = 10,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.failure_mode = failure_mode
+        self.wait_timeout = wait_timeout
+        self.deferrable = deferrable
+        self.polling_interval_seconds = polling_interval_seconds
+
+    def execute(self, context: Context) -> dict:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        result = hook.run_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        job_id = result.job.name.split("/")[-1]
+
+        if self.deferrable:
+            self.defer(
+                trigger=DataplexJobTrigger(
+                    job_id=job_id,
+                    data_scan_id=self.data_scan_id,
+                    project_id=self.project_id,
+                    region=self.region,
+                    gcp_conn_id=self.gcp_conn_id,
+                    impersonation_chain=self.impersonation_chain,
+                    polling_interval_seconds=self.polling_interval_seconds,
+                ),
+                method_name="execute_complete",
+            )
+        else:
+            self.log.info("Waiting for job %s to complete", job_id)
+
+            job = hook.wait_for_job(
+                job=result.job,
+                project_id=self.project_id,
+                region=self.region,
+            )
+            self.log.info("Job %s completed successfully.", job_id)
+
+            if job.state == DataScanJob.State.FAILED:
+                raise AirflowException(f"Job failed:\n{job.name}")
+            job = MessageToDict(job._pb)
+            if self.failure_mode:
+                if not job["dataQualityResult"]["passed"]:
+                    raise AirflowException(f"Job failed due to failure of data 
scan:\n{job.name}")
+        return job
+
+    def execute_complete(self, context, event=None) -> None:
+        """
+        Callback for when the trigger fires - returns immediately.
+        Relies on trigger to throw an exception, otherwise it assumes 
execution was
+        successful.
+        """
+        job_state = event["job_state"]
+        job_id = event["job_id"]
+        if job_state == DataScanJob.State.FAILED:
+            raise AirflowException(f"Job failed:\n{job_id}")
+        if job_state == DataScanJob.State.CANCELLED:
+            raise AirflowException(f"Job was cancelled:\n{job_id}")
+        self.log.info("%s completed successfully.", self.task_id)
+        return event["job"]
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+    """
+    Gets a Data Scan Job resource.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param job_id: Optional. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param wait_for_result: Wait for job to finish.
+    :param polling_interval_seconds: time in seconds between polling for job 
completion.
+        The value is considered only when running in deferrable mode. Must be 
greater than 0.
+    :param deferrable: Run operator in the deferrable mode.
+    :return: Dict representing Data scan job.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        job_id: str | None = None,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        wait_for_result: bool = True,
+        wait_timeout: int | None = None,
+        deferrable: bool = conf.getboolean("operators", "default_deferrable", 
fallback=False),
+        polling_interval_seconds: int = 10,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.job_id = job_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.wait_for_result = wait_for_result
+        self.wait_timeout = wait_timeout
+        self.deferrable = deferrable
+        self.polling_interval_seconds = polling_interval_seconds
+
+    @staticmethod
+    def _handle_job_error(job) -> None:
+        if job.state == DataScanJob.State.FAILED:
+            raise AirflowException(f"Job failed:\n{job.name}")
+        if job.state == DataScanJob.State.CANCELLED:
+            raise AirflowException(f"Job was cancelled:\n{job.name}")
+
+    def execute(self, context: Context) -> dict:
+
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        # fetch the last job
+        if not self.job_id:
+            jobs = hook.list_data_scan_jobs(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            job_ids = [DataScanJob.to_dict(job) for job in jobs]
+            if not job_ids:
+                raise AirflowException("Create a job before.")
+            job_id = job_ids[0]["name"]
+            job = job_ids[0]
+            self.job_id = job_id.split("/")[-1]
+
+        if self.deferrable:
+            self.defer(
+                trigger=DataplexJobTrigger(
+                    job_id=self.job_id,
+                    data_scan_id=self.data_scan_id,
+                    project_id=self.project_id,
+                    region=self.region,
+                    gcp_conn_id=self.gcp_conn_id,
+                    impersonation_chain=self.impersonation_chain,
+                    polling_interval_seconds=self.polling_interval_seconds,
+                ),
+                method_name="execute_complete",
+            )
+        else:
+            job = hook.get_data_scan_job(
+                project_id=self.project_id,
+                region=self.region,
+                job_id=self.job_id,
+                data_scan_id=self.data_scan_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            if self.wait_for_result:
+                job = hook.wait_for_job(
+                    job=job,
+                    project_id=self.project_id,
+                    region=self.region,
+                )
+        self._handle_job_error(job)
+        return MessageToDict(job._pb)

Review Comment:
   done



##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +613,851 @@ def execute(self, context: Context) -> None:
         DataplexLakeLink.persist(context=context, task_instance=self)
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Creates a DataScan resource.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan: Required. DataScan resource.
+    :param data_scan_id: Required. DataScan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: Dataplex data scan id
+    """
+
+    template_fields = ("project_id", "data_scan_id", "data_scan", 
"impersonation_chain")
+    template_fields_renderers = {"data_scan": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        data_scan: dict[str, Any],
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.data_scan = data_scan
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Creating Dataplex data scan %s", self.data_scan_id)
+        operation = hook.create_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            data_scan=self.data_scan,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        operation.result()
+        self.log.info("Dataplex scan data %s created successfully!", 
self.data_scan_id)
+        return self.data_scan_id
+
+
+class DataplexDeleteDataQualityOperator(GoogleCloudBaseOperator):
+    """
+    Deletes a DataScan resource.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: None
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context) -> None:
+
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Deleting Dataplex data scan job: %s", self.data_scan_id)
+
+        operation = hook.delete_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        operation.result()
+        self.log.info("Dataplex data_scan %s deleted successfully!", 
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Runs an on-demand execution of a DataScan.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :return: Dataplex data scan job id.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context) -> str:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        result = hook.run_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        job_id = result.job.name.split("/")[-1]
+        return job_id
+
+
+class DataplexExecuteDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Executes a Dataplex data quality scan.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param polling_interval_seconds: time in seconds between polling for job 
completion.
+        The value is considered only when running in deferrable mode. Must be 
greater than 0.
+    :param deferrable: Run operator in the deferrable mode.
+    :return: Dict representing Data scan job.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        failure_mode: bool = False,
+        wait_timeout: int | None = None,
+        deferrable: bool = conf.getboolean("operators", "default_deferrable", 
fallback=False),
+        polling_interval_seconds: int = 10,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.failure_mode = failure_mode
+        self.wait_timeout = wait_timeout
+        self.deferrable = deferrable
+        self.polling_interval_seconds = polling_interval_seconds
+
+    def execute(self, context: Context) -> dict:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        result = hook.run_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        job_id = result.job.name.split("/")[-1]
+
+        if self.deferrable:
+            self.defer(
+                trigger=DataplexJobTrigger(
+                    job_id=job_id,
+                    data_scan_id=self.data_scan_id,
+                    project_id=self.project_id,
+                    region=self.region,
+                    gcp_conn_id=self.gcp_conn_id,
+                    impersonation_chain=self.impersonation_chain,
+                    polling_interval_seconds=self.polling_interval_seconds,
+                ),
+                method_name="execute_complete",
+            )
+        else:
+            self.log.info("Waiting for job %s to complete", job_id)
+
+            job = hook.wait_for_job(
+                job=result.job,
+                project_id=self.project_id,
+                region=self.region,
+            )
+            self.log.info("Job %s completed successfully.", job_id)
+
+            if job.state == DataScanJob.State.FAILED:
+                raise AirflowException(f"Job failed:\n{job.name}")
+            job = MessageToDict(job._pb)
+            if self.failure_mode:
+                if not job["dataQualityResult"]["passed"]:
+                    raise AirflowException(f"Job failed due to failure of data 
scan:\n{job.name}")
+        return job
+
+    def execute_complete(self, context, event=None) -> None:
+        """
+        Callback for when the trigger fires - returns immediately.
+        Relies on trigger to throw an exception, otherwise it assumes 
execution was
+        successful.
+        """
+        job_state = event["job_state"]
+        job_id = event["job_id"]
+        if job_state == DataScanJob.State.FAILED:
+            raise AirflowException(f"Job failed:\n{job_id}")
+        if job_state == DataScanJob.State.CANCELLED:
+            raise AirflowException(f"Job was cancelled:\n{job_id}")
+        self.log.info("%s completed successfully.", self.task_id)
+        return event["job"]
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+    """
+    Gets a Data Scan Job resource.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param job_id: Optional. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param wait_for_result: Wait for job to finish.
+    :param polling_interval_seconds: time in seconds between polling for job 
completion.
+        The value is considered only when running in deferrable mode. Must be 
greater than 0.
+    :param deferrable: Run operator in the deferrable mode.
+    :return: Dict representing Data scan job.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        job_id: str | None = None,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        wait_for_result: bool = True,
+        wait_timeout: int | None = None,
+        deferrable: bool = conf.getboolean("operators", "default_deferrable", 
fallback=False),
+        polling_interval_seconds: int = 10,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.job_id = job_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.wait_for_result = wait_for_result
+        self.wait_timeout = wait_timeout
+        self.deferrable = deferrable
+        self.polling_interval_seconds = polling_interval_seconds
+
+    @staticmethod
+    def _handle_job_error(job) -> None:
+        if job.state == DataScanJob.State.FAILED:
+            raise AirflowException(f"Job failed:\n{job.name}")
+        if job.state == DataScanJob.State.CANCELLED:
+            raise AirflowException(f"Job was cancelled:\n{job.name}")
+
+    def execute(self, context: Context) -> dict:
+
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        # fetch the last job
+        if not self.job_id:
+            jobs = hook.list_data_scan_jobs(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            job_ids = [DataScanJob.to_dict(job) for job in jobs]
+            if not job_ids:
+                raise AirflowException("Create a job before.")
+            job_id = job_ids[0]["name"]
+            job = job_ids[0]
+            self.job_id = job_id.split("/")[-1]
+
+        if self.deferrable:
+            self.defer(
+                trigger=DataplexJobTrigger(
+                    job_id=self.job_id,
+                    data_scan_id=self.data_scan_id,
+                    project_id=self.project_id,
+                    region=self.region,
+                    gcp_conn_id=self.gcp_conn_id,
+                    impersonation_chain=self.impersonation_chain,
+                    polling_interval_seconds=self.polling_interval_seconds,
+                ),
+                method_name="execute_complete",
+            )
+        else:
+            job = hook.get_data_scan_job(
+                project_id=self.project_id,
+                region=self.region,
+                job_id=self.job_id,
+                data_scan_id=self.data_scan_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            if self.wait_for_result:
+                job = hook.wait_for_job(
+                    job=job,
+                    project_id=self.project_id,
+                    region=self.region,
+                )
+        self._handle_job_error(job)
+        return MessageToDict(job._pb)
+
+    def execute_complete(self, context, event=None) -> None:
+        """
+        Callback for when the trigger fires - returns immediately.
+        Relies on trigger to throw an exception, otherwise it assumes 
execution was
+        successful.
+        """
+        job_state = event["job_state"]
+        job_id = event["job_id"]
+        if job_state == DataScanJob.State.FAILED:
+            raise AirflowException(f"Job failed:\n{job_id}")
+        if job_state == DataScanJob.State.CANCELLED:
+            raise AirflowException(f"Job was cancelled:\n{job_id}")
+        self.log.info("%s completed successfully.", self.task_id)
+        return event["job"]
+
+
+class DataplexCreateZoneOperator(GoogleCloudBaseOperator):
+    """
+    Creates a zone resource within a lake.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
task belongs to.
+    :param region: Required. The ID of the Google Cloud region that the task 
belongs to.
+    :param lake_id: Required. The ID of the Google Cloud lake that the task 
belongs to.
+    :param zone:  Required. The Request body contains an instance of Task.
+    :param zone_id: Required. Task identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v3'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: None
+    """
+
+    template_fields = (
+        "project_id",
+        "zone_id",
+        "zone",
+        "lake_id",
+        "impersonation_chain",
+    )
+    template_fields_renderers = {"zone": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        lake_id: str,
+        zone: dict[str, Any],
+        zone_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.lake_id = lake_id
+        self.zone = zone
+        self.zone_id = zone_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        self.log.info("Creating Dataplex zone %s", self.zone_id)
+
+        operation = hook.create_zone(
+            project_id=self.project_id,
+            region=self.region,
+            lake_id=self.lake_id,
+            zone=self.zone,
+            zone_id=self.zone_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+
+        operation.result()
+        self.log.info("Dataplex zone %s created successfully!", self.zone_id)

Review Comment:
   done



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to