VladaZakharova commented on code in PR #32256:
URL: https://github.com/apache/airflow/pull/32256#discussion_r1280574553


##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,714 @@ def execute(self, context: Context) -> None:
         DataplexLakeLink.persist(context=context, task_instance=self)
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Creates a DataScan result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan: Required. DataScan resource.

Review Comment:
   Is this parameter for resource name?



##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,714 @@ def execute(self, context: Context) -> None:
         DataplexLakeLink.persist(context=context, task_instance=self)
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Creates a DataScan result.

Review Comment:
   ```suggestion
       Creates a DataScan resource.
   ```



##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,714 @@ def execute(self, context: Context) -> None:
         DataplexLakeLink.persist(context=context, task_instance=self)
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Creates a DataScan result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan: Required. DataScan resource.
+    :param data_scan_id: Required. DataScan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: Dataplex data scan id
+    """
+
+    template_fields = ("project_id", "data_scan_id", "data_scan", 
"impersonation_chain")
+    template_fields_renderers = {"data_scan": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        data_scan: dict[str, Any],
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.data_scan = data_scan
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Creating Dataplex data scan %s", self.data_scan_id)
+        try:
+            operation = hook.create_data_scan(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                data_scan=self.data_scan,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        except AlreadyExists:
+            self.log.warning("Data scan already exists: %s", self.data_scan_id)

Review Comment:
   I think there is no need to output the same error message twice (in 
self.log.info and when raising an exception). WDYT of having only one of them?



##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,714 @@ def execute(self, context: Context) -> None:
         DataplexLakeLink.persist(context=context, task_instance=self)
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Creates a DataScan result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan: Required. DataScan resource.
+    :param data_scan_id: Required. DataScan identifier.

Review Comment:
   Is this parameter for id (like a digital unique number)? What is the 
difference between it and data_scan parameter? 



##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,714 @@ def execute(self, context: Context) -> None:
         DataplexLakeLink.persist(context=context, task_instance=self)
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Creates a DataScan result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan: Required. DataScan resource.
+    :param data_scan_id: Required. DataScan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: Dataplex data scan id
+    """
+
+    template_fields = ("project_id", "data_scan_id", "data_scan", 
"impersonation_chain")
+    template_fields_renderers = {"data_scan": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        data_scan: dict[str, Any],
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.data_scan = data_scan
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Creating Dataplex data scan %s", self.data_scan_id)
+        try:
+            operation = hook.create_data_scan(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                data_scan=self.data_scan,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        except AlreadyExists:
+            self.log.warning("Data scan already exists: %s", self.data_scan_id)
+            raise AirflowException("Data scan already exists: %s", 
{self.data_scan_id})
+        except GoogleAPICallError as e:
+            raise AirflowException(f"Error creating data scan 
{self.data_scan_id}", e)
+
+        self.log.info("Dataplex scan data %s created successfully!", 
self.data_scan_id)
+        return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Deletes a DataScan resource.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: None
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context) -> None:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Deleting Dataplex data scan job: %s", self.data_scan_id)
+
+        operation = hook.delete_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        self.log.info("Dataplex data_scan %s deleted successfully!", 
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Runs an on-demand execution of a DataScan.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param asynchronous: Flag informing should the Dataplex job be run 
asynchronously.
+        This is useful for submitting long-running jobs and
+        waiting on them asynchronously using the 
DataplexDataQualityJobStatusSensor
+    :return: Dataplex data scan job id.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        asynchronous: bool = False,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.asynchronous = asynchronous
+
+    def execute(self, context: Context) -> str:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        result = hook.run_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        job_id = result.job.name.split("/")[-1]
+        if not self.asynchronous:
+            hook.wait_for_data_scan_job(
+                job_id=job_id,
+                data_scan_id=self.data_scan_id,
+                project_id=self.project_id,
+                region=self.region,
+            )
+        return job_id
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+    """
+    Gets a Data Scan Job result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param job_id: Optional. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+        :param fail_on_dq_failure: If set to true, sensor throws exception if 
Data Quality fails.
+
+    :return: Dict representing Data scan job.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        job_id: str | None = None,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        fail_on_dq_failure: bool = False,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.job_id = job_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.fail_on_dq_failure = fail_on_dq_failure
+
+    @staticmethod
+    def _handle_job_error(job) -> None:
+        if job.state == DataScanJob.State.FAILED:
+            raise AirflowException(f"Job failed:\n{job.name}")
+        if job.state == DataScanJob.State.CANCELLED:
+            raise AirflowException(f"Job was cancelled:\n{job.name}")
+
+    def execute(self, context: Context) -> dict:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        # fetch the last job
+        if not self.job_id:
+            jobs = hook.list_data_scan_jobs(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            job_ids = [DataScanJob.to_dict(job) for job in jobs]
+            if not job_ids:
+                raise AirflowException("Create a job before.")
+            job_id = job_ids[0]["name"]
+            job = job_ids[0]
+            self.job_id = job_id.split("/")[-1]
+
+        job = hook.get_data_scan_job(
+            project_id=self.project_id,
+            region=self.region,
+            job_id=self.job_id,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        self._handle_job_error(job)
+        if self.fail_on_dq_failure:
+            if job.state == DataScanJob.State.SUCCEEDED and not 
job.data_quality_result.passed:
+                raise AirflowException(f"DQ failed: {self.data_scan_id}")
+
+        return MessageToDict(job._pb)
+
+
+class DataplexCreateZoneOperator(GoogleCloudBaseOperator):
+    """
+    Creates a zone resource within a lake.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
task belongs to.
+    :param region: Required. The ID of the Google Cloud region that the task 
belongs to.
+    :param lake_id: Required. The ID of the Google Cloud lake that the task 
belongs to.
+    :param zone:  Required. The Request body contains an instance of Task.
+    :param zone_id: Required. Task identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v3'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: Zone
+    """
+
+    template_fields = (
+        "project_id",
+        "zone_id",
+        "zone",
+        "lake_id",
+        "impersonation_chain",
+    )
+    template_fields_renderers = {"zone": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        lake_id: str,
+        zone: dict[str, Any],
+        zone_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.lake_id = lake_id
+        self.zone = zone
+        self.zone_id = zone_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        self.log.info("Creating Dataplex zone %s", self.zone_id)
+
+        try:
+            operation = hook.create_zone(
+                project_id=self.project_id,
+                region=self.region,
+                lake_id=self.lake_id,
+                zone=self.zone,
+                zone_id=self.zone_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            zone = hook.wait_for_operation(timeout=self.timeout, 
operation=operation)
+
+        except GoogleAPICallError as e:
+            raise AirflowException(f"Error creating zone {self.zone_id}", e)

Review Comment:
   ```suggestion
               raise AirflowException(f"Error occurred when creating zone 
{self.zone_id}", e)
   ```



##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,714 @@ def execute(self, context: Context) -> None:
         DataplexLakeLink.persist(context=context, task_instance=self)
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Creates a DataScan result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan: Required. DataScan resource.
+    :param data_scan_id: Required. DataScan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: Dataplex data scan id
+    """
+
+    template_fields = ("project_id", "data_scan_id", "data_scan", 
"impersonation_chain")
+    template_fields_renderers = {"data_scan": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        data_scan: dict[str, Any],
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.data_scan = data_scan
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Creating Dataplex data scan %s", self.data_scan_id)
+        try:
+            operation = hook.create_data_scan(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                data_scan=self.data_scan,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        except AlreadyExists:
+            self.log.warning("Data scan already exists: %s", self.data_scan_id)
+            raise AirflowException("Data scan already exists: %s", 
{self.data_scan_id})
+        except GoogleAPICallError as e:
+            raise AirflowException(f"Error creating data scan 
{self.data_scan_id}", e)
+
+        self.log.info("Dataplex scan data %s created successfully!", 
self.data_scan_id)
+        return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Deletes a DataScan resource.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: None
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context) -> None:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Deleting Dataplex data scan job: %s", self.data_scan_id)
+
+        operation = hook.delete_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        self.log.info("Dataplex data_scan %s deleted successfully!", 
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Runs an on-demand execution of a DataScan.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param asynchronous: Flag informing should the Dataplex job be run 
asynchronously.
+        This is useful for submitting long-running jobs and
+        waiting on them asynchronously using the 
DataplexDataQualityJobStatusSensor
+    :return: Dataplex data scan job id.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        asynchronous: bool = False,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.asynchronous = asynchronous
+
+    def execute(self, context: Context) -> str:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        result = hook.run_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        job_id = result.job.name.split("/")[-1]
+        if not self.asynchronous:
+            hook.wait_for_data_scan_job(
+                job_id=job_id,
+                data_scan_id=self.data_scan_id,
+                project_id=self.project_id,
+                region=self.region,
+            )
+        return job_id
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+    """
+    Gets a Data Scan Job result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param job_id: Optional. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+        :param fail_on_dq_failure: If set to true, sensor throws exception if 
Data Quality fails.
+
+    :return: Dict representing Data scan job.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        job_id: str | None = None,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        fail_on_dq_failure: bool = False,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.job_id = job_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.fail_on_dq_failure = fail_on_dq_failure
+
+    @staticmethod
+    def _handle_job_error(job) -> None:
+        if job.state == DataScanJob.State.FAILED:
+            raise AirflowException(f"Job failed:\n{job.name}")
+        if job.state == DataScanJob.State.CANCELLED:
+            raise AirflowException(f"Job was cancelled:\n{job.name}")
+
+    def execute(self, context: Context) -> dict:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        # fetch the last job
+        if not self.job_id:
+            jobs = hook.list_data_scan_jobs(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            job_ids = [DataScanJob.to_dict(job) for job in jobs]
+            if not job_ids:
+                raise AirflowException("Create a job before.")

Review Comment:
   ```suggestion
                   raise AirflowException("There are no jobs, you should create 
one before.")
   ```
   I am not sure here what exactly is job_id, is it already a data scan or it 
is a representation of running data scan execution? But i think we could 
provide more details here in error message



##########
docs/apache-airflow-providers-google/operators/cloud/dataplex.rst:
##########
@@ -142,3 +141,152 @@ To delete a lake you can use:
     :dedent: 4
     :start-after: [START howto_dataplex_delete_lake_operator]
     :end-before: [END howto_dataplex_delete_lake_operator]
+
+Create a data quality
+---------------------
+
+Before you create a dataplex data quality you need to define its body.
+For more information about the available fields to pass when creating a data 
quality, visit `Dataplex create data quality API. 
<https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.dataScans#DataScan>`__
+
+A simple data quality configuration can look as followed:
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 0
+    :start-after: [START howto_dataplex_data_quality_configuration]
+    :end-before: [END howto_dataplex_data_quality_configuration]
+
+With this configuration we can create the data quality:
+
+:class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateDataQualityScanOperator`
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_dataplex_create_data_quality_operator]
+    :end-before: [END howto_dataplex_create_data_quality_operator]
+
+Delete a data quality
+---------------------
+
+To delete a data quality you can use:

Review Comment:
   ```suggestion
   To delete a Data Quality scan you can use:
   ```



##########
airflow/providers/google/cloud/sensors/dataplex.py:
##########
@@ -114,3 +115,98 @@ def poke(self, context: Context) -> bool:
         self.log.info("Current status of the Dataplex task %s => %s", 
self.dataplex_task_id, task_status)
 
         return task_status == TaskState.ACTIVE
+
+
+class DataplexDataQualityJobStatusSensor(BaseSensorOperator):
+    """
+    Check the status of the Dataplex data scan job is SUCCEEDED.

Review Comment:
   ```suggestion
       Check the status of the Dataplex DataQuality job.
   ```



##########
airflow/providers/google/cloud/sensors/dataplex.py:
##########
@@ -114,3 +115,98 @@ def poke(self, context: Context) -> bool:
         self.log.info("Current status of the Dataplex task %s => %s", 
self.dataplex_task_id, task_status)
 
         return task_status == TaskState.ACTIVE
+
+
+class DataplexDataQualityJobStatusSensor(BaseSensorOperator):
+    """
+    Check the status of the Dataplex data scan job is SUCCEEDED.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
task belongs to.
+    :param region: Required. The ID of the Google Cloud region that the task 
belongs to.
+    :param data_scan_id: Required. DataScan identifier.
+    :param job_id: Required. Job ID.
+    :param api_version: The version of the api that will be requested for 
example 'v3'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param wait_timeout: The amount of time, in seconds, to wait for the 
request to complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param fail_on_dq_failure: If set to true, sensor throws exception if Data 
Quality fails.
+    :return: Boolean indicating if the job run has reached the 
``DataScanJob.State.SUCCEEDED``.
+    """
+
+    template_fields = ["job_id"]
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        job_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        fail_on_dq_failure: bool = False,
+        wait_timeout: float | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.job_id = job_id
+        self.api_version = api_version
+        self.retry = retry
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.fail_on_dq_failure = fail_on_dq_failure
+        self.wait_timeout = wait_timeout
+
+    def poke(self, context: Context) -> bool:
+        self.log.info("Waiting for job %s to be %s", self.job_id, 
DataScanJob.State.SUCCEEDED)
+
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        try:
+            job = hook.get_data_scan_job(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                job_id=self.job_id,
+                timeout=self.wait_timeout,
+                retry=self.retry,
+                metadata=self.metadata,
+            )
+        except GoogleAPICallError as e:
+            raise AirflowException(f"Error getting data scan job: 
{self.data_scan_id}", e)

Review Comment:
   ```suggestion
               raise AirflowException(f"Error occurred when trying to retrieve 
data scan job: {self.data_scan_id}", e)
   ```



##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,714 @@ def execute(self, context: Context) -> None:
         DataplexLakeLink.persist(context=context, task_instance=self)
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Creates a DataScan result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan: Required. DataScan resource.
+    :param data_scan_id: Required. DataScan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: Dataplex data scan id
+    """
+
+    template_fields = ("project_id", "data_scan_id", "data_scan", 
"impersonation_chain")
+    template_fields_renderers = {"data_scan": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        data_scan: dict[str, Any],
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.data_scan = data_scan
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Creating Dataplex data scan %s", self.data_scan_id)
+        try:
+            operation = hook.create_data_scan(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                data_scan=self.data_scan,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        except AlreadyExists:
+            self.log.warning("Data scan already exists: %s", self.data_scan_id)
+            raise AirflowException("Data scan already exists: %s", 
{self.data_scan_id})
+        except GoogleAPICallError as e:
+            raise AirflowException(f"Error creating data scan 
{self.data_scan_id}", e)
+
+        self.log.info("Dataplex scan data %s created successfully!", 
self.data_scan_id)
+        return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Deletes a DataScan resource.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: None
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context) -> None:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Deleting Dataplex data scan job: %s", self.data_scan_id)
+
+        operation = hook.delete_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        self.log.info("Dataplex data_scan %s deleted successfully!", 
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Runs an on-demand execution of a DataScan.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param asynchronous: Flag informing should the Dataplex job be run 
asynchronously.
+        This is useful for submitting long-running jobs and
+        waiting on them asynchronously using the 
DataplexDataQualityJobStatusSensor
+    :return: Dataplex data scan job id.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        asynchronous: bool = False,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.asynchronous = asynchronous
+
+    def execute(self, context: Context) -> str:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        result = hook.run_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        job_id = result.job.name.split("/")[-1]
+        if not self.asynchronous:
+            hook.wait_for_data_scan_job(
+                job_id=job_id,
+                data_scan_id=self.data_scan_id,
+                project_id=self.project_id,
+                region=self.region,
+            )
+        return job_id
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+    """
+    Gets a Data Scan Job result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param job_id: Optional. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+        :param fail_on_dq_failure: If set to true, sensor throws exception if 
Data Quality fails.
+
+    :return: Dict representing Data scan job.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        job_id: str | None = None,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        fail_on_dq_failure: bool = False,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.job_id = job_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.fail_on_dq_failure = fail_on_dq_failure
+
+    @staticmethod
+    def _handle_job_error(job) -> None:
+        if job.state == DataScanJob.State.FAILED:
+            raise AirflowException(f"Job failed:\n{job.name}")
+        if job.state == DataScanJob.State.CANCELLED:
+            raise AirflowException(f"Job was cancelled:\n{job.name}")
+
+    def execute(self, context: Context) -> dict:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        # fetch the last job
+        if not self.job_id:
+            jobs = hook.list_data_scan_jobs(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            job_ids = [DataScanJob.to_dict(job) for job in jobs]
+            if not job_ids:
+                raise AirflowException("Create a job before.")
+            job_id = job_ids[0]["name"]
+            job = job_ids[0]

Review Comment:
   Are we using this variable later? I can see only using job_id variable



##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,714 @@ def execute(self, context: Context) -> None:
         DataplexLakeLink.persist(context=context, task_instance=self)
         hook.wait_for_operation(timeout=self.timeout, operation=operation)
         self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Creates a DataScan result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan: Required. DataScan resource.
+    :param data_scan_id: Required. DataScan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: Dataplex data scan id
+    """
+
+    template_fields = ("project_id", "data_scan_id", "data_scan", 
"impersonation_chain")
+    template_fields_renderers = {"data_scan": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        data_scan: dict[str, Any],
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.data_scan = data_scan
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Creating Dataplex data scan %s", self.data_scan_id)
+        try:
+            operation = hook.create_data_scan(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                data_scan=self.data_scan,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        except AlreadyExists:
+            self.log.warning("Data scan already exists: %s", self.data_scan_id)
+            raise AirflowException("Data scan already exists: %s", 
{self.data_scan_id})
+        except GoogleAPICallError as e:
+            raise AirflowException(f"Error creating data scan 
{self.data_scan_id}", e)
+
+        self.log.info("Dataplex scan data %s created successfully!", 
self.data_scan_id)
+        return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Deletes a DataScan resource.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: None
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context) -> None:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        self.log.info("Deleting Dataplex data scan job: %s", self.data_scan_id)
+
+        operation = hook.delete_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        self.log.info("Dataplex data_scan %s deleted successfully!", 
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+    """
+    Runs an on-demand execution of a DataScan.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param asynchronous: Flag informing should the Dataplex job be run 
asynchronously.
+        This is useful for submitting long-running jobs and
+        waiting on them asynchronously using the 
DataplexDataQualityJobStatusSensor
+    :return: Dataplex data scan job id.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        asynchronous: bool = False,
+        *args,
+        **kwargs,
+    ) -> None:
+
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.asynchronous = asynchronous
+
+    def execute(self, context: Context) -> str:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+        result = hook.run_data_scan(
+            project_id=self.project_id,
+            region=self.region,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        job_id = result.job.name.split("/")[-1]
+        if not self.asynchronous:
+            hook.wait_for_data_scan_job(
+                job_id=job_id,
+                data_scan_id=self.data_scan_id,
+                project_id=self.project_id,
+                region=self.region,
+            )
+        return job_id
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+    """
+    Gets a Data Scan Job result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
lake belongs to.
+    :param region: Required. The ID of the Google Cloud region that the lake 
belongs to.
+    :param data_scan_id: Required. Data scan identifier.
+    :param job_id: Optional. Data scan identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v1'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+        :param fail_on_dq_failure: If set to true, sensor throws exception if 
Data Quality fails.
+
+    :return: Dict representing Data scan job.
+    """
+
+    template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        data_scan_id: str,
+        job_id: str | None = None,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        fail_on_dq_failure: bool = False,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.data_scan_id = data_scan_id
+        self.job_id = job_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.fail_on_dq_failure = fail_on_dq_failure
+
+    @staticmethod
+    def _handle_job_error(job) -> None:
+        if job.state == DataScanJob.State.FAILED:
+            raise AirflowException(f"Job failed:\n{job.name}")
+        if job.state == DataScanJob.State.CANCELLED:
+            raise AirflowException(f"Job was cancelled:\n{job.name}")
+
+    def execute(self, context: Context) -> dict:
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        # fetch the last job
+        if not self.job_id:
+            jobs = hook.list_data_scan_jobs(
+                project_id=self.project_id,
+                region=self.region,
+                data_scan_id=self.data_scan_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            job_ids = [DataScanJob.to_dict(job) for job in jobs]
+            if not job_ids:
+                raise AirflowException("Create a job before.")
+            job_id = job_ids[0]["name"]
+            job = job_ids[0]
+            self.job_id = job_id.split("/")[-1]
+
+        job = hook.get_data_scan_job(
+            project_id=self.project_id,
+            region=self.region,
+            job_id=self.job_id,
+            data_scan_id=self.data_scan_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        self._handle_job_error(job)
+        if self.fail_on_dq_failure:
+            if job.state == DataScanJob.State.SUCCEEDED and not 
job.data_quality_result.passed:
+                raise AirflowException(f"DQ failed: {self.data_scan_id}")
+
+        return MessageToDict(job._pb)
+
+
+class DataplexCreateZoneOperator(GoogleCloudBaseOperator):
+    """
+    Creates a zone resource within a lake.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
task belongs to.
+    :param region: Required. The ID of the Google Cloud region that the task 
belongs to.
+    :param lake_id: Required. The ID of the Google Cloud lake that the task 
belongs to.
+    :param zone:  Required. The Request body contains an instance of Task.
+    :param zone_id: Required. Task identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v3'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+
+    :return: Zone
+    """
+
+    template_fields = (
+        "project_id",
+        "zone_id",
+        "zone",
+        "lake_id",
+        "impersonation_chain",
+    )
+    template_fields_renderers = {"zone": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        lake_id: str,
+        zone: dict[str, Any],
+        zone_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.lake_id = lake_id
+        self.zone = zone
+        self.zone_id = zone_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        self.log.info("Creating Dataplex zone %s", self.zone_id)
+
+        try:
+            operation = hook.create_zone(
+                project_id=self.project_id,
+                region=self.region,
+                lake_id=self.lake_id,
+                zone=self.zone,
+                zone_id=self.zone_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            zone = hook.wait_for_operation(timeout=self.timeout, 
operation=operation)
+
+        except GoogleAPICallError as e:
+            raise AirflowException(f"Error creating zone {self.zone_id}", e)
+        except Exception as e:
+            raise AirflowException(e)
+
+        self.log.info("Dataplex zone %s created successfully!", self.zone_id)
+        return Zone.to_dict(zone)
+
+
+class DataplexDeleteZoneOperator(GoogleCloudBaseOperator):
+    """
+    Deletes a zone resource. All assets within a zone must be deleted before 
the zone can be deleted.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
task belongs to.
+    :param region: Required. The ID of the Google Cloud region that the task 
belongs to.
+    :param lake_id: Required. The ID of the Google Cloud lake that the task 
belongs to.
+    :param zone_id: Required. Zone identifier.
+    :param api_version: The version of the api that will be requested for 
example 'v3'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :return: None
+    """
+
+    template_fields = (
+        "project_id",
+        "lake_id",
+        "zone_id",
+        "impersonation_chain",
+    )
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        lake_id: str,
+        zone_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.lake_id = lake_id
+        self.zone_id = zone_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        self.log.info("Deleting Dataplex zone %s", self.zone_id)
+
+        operation = hook.delete_zone(
+            project_id=self.project_id,
+            region=self.region,
+            lake_id=self.lake_id,
+            zone_id=self.zone_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        hook.wait_for_operation(timeout=self.timeout, operation=operation)
+        self.log.info("Dataplex zone %s deleted successfully!", self.zone_id)
+
+
+class DataplexCreateAssetOperator(GoogleCloudBaseOperator):
+    """
+    Creates an asset result.
+
+    :param project_id: Required. The ID of the Google Cloud project that the 
task belongs to.
+    :param region: Required. The ID of the Google Cloud region that the task 
belongs to.
+    :param lake_id: Required. The ID of the Google Cloud lake that the lake 
belongs to.
+    :param zone_id: Required. Zone identifier.
+    :param asset_id: Required. Asset identifier.
+    :param asset: Required. Asset resource.
+    :param api_version: The version of the api that will be requested for 
example 'v3'.
+    :param retry: A retry object used  to retry requests. If `None` is 
specified, requests
+        will not be retried.
+    :param timeout: The amount of time, in seconds, to wait for the request to 
complete.
+        Note that if `retry` is specified, the timeout applies to each 
individual attempt.
+    :param metadata: Additional metadata that is provided to the method.
+    :param gcp_conn_id: The connection ID to use when fetching connection info.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :return: Asset
+    """
+
+    template_fields = (
+        "project_id",
+        "zone_id",
+        "asset_id",
+        "asset",
+        "impersonation_chain",
+    )
+    template_fields_renderers = {"asset": "json"}
+
+    def __init__(
+        self,
+        project_id: str,
+        region: str,
+        lake_id: str,
+        asset: dict[str, Any],
+        zone_id: str,
+        asset_id: str,
+        api_version: str = "v1",
+        retry: Retry | _MethodDefault = DEFAULT,
+        timeout: float | None = None,
+        metadata: Sequence[tuple[str, str]] = (),
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: str | Sequence[str] | None = None,
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.project_id = project_id
+        self.region = region
+        self.lake_id = lake_id
+        self.asset = asset
+        self.zone_id = zone_id
+        self.asset_id = asset_id
+        self.api_version = api_version
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context: Context):
+        hook = DataplexHook(
+            gcp_conn_id=self.gcp_conn_id,
+            api_version=self.api_version,
+            impersonation_chain=self.impersonation_chain,
+        )
+        self.log.info("Creating Dataplex asset %s", self.zone_id)
+        try:
+            operation = hook.create_asset(
+                project_id=self.project_id,
+                region=self.region,
+                lake_id=self.lake_id,
+                asset=self.asset,
+                zone_id=self.zone_id,
+                asset_id=self.asset_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            result = hook.wait_for_operation(timeout=self.timeout, 
operation=operation)
+        except GoogleAPICallError as e:
+            raise AirflowException(f"Error creating asset {self.asset_id}", e)

Review Comment:
   ```suggestion
               raise AirflowException(f"Error occurred when creating asset 
{self.asset_id}", e)
   ```



##########
docs/apache-airflow-providers-google/operators/cloud/dataplex.rst:
##########
@@ -142,3 +141,152 @@ To delete a lake you can use:
     :dedent: 4
     :start-after: [START howto_dataplex_delete_lake_operator]
     :end-before: [END howto_dataplex_delete_lake_operator]
+
+Create a data quality
+---------------------
+
+Before you create a dataplex data quality you need to define its body.
+For more information about the available fields to pass when creating a data 
quality, visit `Dataplex create data quality API. 
<https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.dataScans#DataScan>`__
+
+A simple data quality configuration can look as followed:
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 0
+    :start-after: [START howto_dataplex_data_quality_configuration]
+    :end-before: [END howto_dataplex_data_quality_configuration]
+
+With this configuration we can create the data quality:
+
+:class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateDataQualityScanOperator`
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_dataplex_create_data_quality_operator]
+    :end-before: [END howto_dataplex_create_data_quality_operator]
+
+Delete a data quality
+---------------------
+
+To delete a data quality you can use:
+
+:class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteDataQualityScanOperator`
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_dataplex_delete_data_quality_operator]
+    :end-before: [END howto_dataplex_delete_data_quality_operator]
+
+Run a data quality
+------------------
+
+We have possibility to run Dataplex data quality in the sync mode and async:

Review Comment:
   ```suggestion
   You can run Dataplex Data Quality in asynchronous modes to later check its 
status using sensor:
   ```
   The example yo are pointing to is running asynchronously only



##########
docs/apache-airflow-providers-google/operators/cloud/dataplex.rst:
##########
@@ -142,3 +141,152 @@ To delete a lake you can use:
     :dedent: 4
     :start-after: [START howto_dataplex_delete_lake_operator]
     :end-before: [END howto_dataplex_delete_lake_operator]
+
+Create a data quality
+---------------------
+
+Before you create a dataplex data quality you need to define its body.
+For more information about the available fields to pass when creating a data 
quality, visit `Dataplex create data quality API. 
<https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.dataScans#DataScan>`__
+
+A simple data quality configuration can look as followed:
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 0
+    :start-after: [START howto_dataplex_data_quality_configuration]
+    :end-before: [END howto_dataplex_data_quality_configuration]
+
+With this configuration we can create the data quality:

Review Comment:
   ```suggestion
   With this configuration we can create the Data Quality scan:
   ```



##########
docs/apache-airflow-providers-google/operators/cloud/dataplex.rst:
##########
@@ -142,3 +141,152 @@ To delete a lake you can use:
     :dedent: 4
     :start-after: [START howto_dataplex_delete_lake_operator]
     :end-before: [END howto_dataplex_delete_lake_operator]
+
+Create a data quality
+---------------------
+
+Before you create a dataplex data quality you need to define its body.
+For more information about the available fields to pass when creating a data 
quality, visit `Dataplex create data quality API. 
<https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.dataScans#DataScan>`__
+
+A simple data quality configuration can look as followed:
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 0
+    :start-after: [START howto_dataplex_data_quality_configuration]
+    :end-before: [END howto_dataplex_data_quality_configuration]
+
+With this configuration we can create the data quality:
+
+:class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateDataQualityScanOperator`
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_dataplex_create_data_quality_operator]
+    :end-before: [END howto_dataplex_create_data_quality_operator]
+
+Delete a data quality
+---------------------
+
+To delete a data quality you can use:
+
+:class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteDataQualityScanOperator`
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_dataplex_delete_data_quality_operator]
+    :end-before: [END howto_dataplex_delete_data_quality_operator]
+
+Run a data quality
+------------------
+
+We have possibility to run Dataplex data quality in the sync mode and async:
+
+:class:`~airflow.providers.google.cloud.operators.dataplex.DataplexRunDataQualityScanOperator`
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_dataplex_run_data_quality_operator]
+    :end-before: [END howto_dataplex_run_data_quality_operator]
+
+To check that running Dataplex data quality succeeded you can use:
+
+:class:`~airflow.providers.google.cloud.sensors.dataplex.DataplexDataQualityJobStatusSensor`.
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_dataplex_data_scan_job_state_sensor]
+    :end-before: [END howto_dataplex_data_scan_job_state_sensor]
+
+Get a data quality job
+----------------------
+
+To get a data quality job you can use:
+
+:class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetDataQualityScanResultOperator`
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_dataplex_get_data_quality_job_operator]
+    :end-before: [END howto_dataplex_get_data_quality_job_operator]
+
+Create a zone
+-------------
+
+Before you create a dataplex zone you need to define its body.
+
+For more information about the available fields to pass when creating a zone, 
visit `Dataplex create zone API. 
<https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.lakes.zones#Zone>`__
+
+A simple zone configuration can look as followed:
+
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+    :language: python
+    :dedent: 0
+    :start-after: [START howto_dataplex_zone_configuration]
+    :end-before: [END howto_dataplex_zone_configuration]
+
+With this configuration we can create the zone:

Review Comment:
   ```suggestion
   With this configuration we can create a zone:
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to