VladaZakharova commented on code in PR #32256:
URL: https://github.com/apache/airflow/pull/32256#discussion_r1282993039
##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,740 @@ def execute(self, context: Context) -> None:
DataplexLakeLink.persist(context=context, task_instance=self)
hook.wait_for_operation(timeout=self.timeout, operation=operation)
self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Creates a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param body: Required. The Request body contains an instance of DataScan.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: Dataplex data scan id
+ """
+
+ template_fields = ("project_id", "data_scan_id", "body",
"impersonation_chain")
+ template_fields_renderers = {"body": "json"}
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ body: dict[str, Any] | DataScan,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.body = body
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context):
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Creating Dataplex Data Quality scan %s",
self.data_scan_id)
+ try:
+ operation = hook.create_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ body=self.body,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ except AlreadyExists:
+ raise AirflowException("Data Quality scan already exists: %s",
{self.data_scan_id})
+ except GoogleAPICallError as e:
+ raise AirflowException(f"Error creating Data Quality scan
{self.data_scan_id}", e)
+
+ self.log.info("Dataplex Data Quality scan %s created successfully!",
self.data_scan_id)
+ return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Deletes a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: None
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context) -> None:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Deleting Dataplex Data Quality Scan: %s",
self.data_scan_id)
+
+ operation = hook.delete_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ self.log.info("Dataplex Data Quality scan %s deleted successfully!",
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Runs an on-demand execution of a DataScan.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
Review Comment:
If this timeout is for the whole Airflow task, is the description here is
correct? How this is documented in other airflow operators?
##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,740 @@ def execute(self, context: Context) -> None:
DataplexLakeLink.persist(context=context, task_instance=self)
hook.wait_for_operation(timeout=self.timeout, operation=operation)
self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Creates a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param body: Required. The Request body contains an instance of DataScan.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: Dataplex data scan id
+ """
+
+ template_fields = ("project_id", "data_scan_id", "body",
"impersonation_chain")
+ template_fields_renderers = {"body": "json"}
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ body: dict[str, Any] | DataScan,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.body = body
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context):
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Creating Dataplex Data Quality scan %s",
self.data_scan_id)
+ try:
+ operation = hook.create_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ body=self.body,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ except AlreadyExists:
+ raise AirflowException("Data Quality scan already exists: %s",
{self.data_scan_id})
+ except GoogleAPICallError as e:
+ raise AirflowException(f"Error creating Data Quality scan
{self.data_scan_id}", e)
+
+ self.log.info("Dataplex Data Quality scan %s created successfully!",
self.data_scan_id)
+ return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Deletes a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: None
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context) -> None:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Deleting Dataplex Data Quality Scan: %s",
self.data_scan_id)
+
+ operation = hook.delete_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ self.log.info("Dataplex Data Quality scan %s deleted successfully!",
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Runs an on-demand execution of a DataScan.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :param asynchronous: Flag informing should the Dataplex job be run
asynchronously.
+ This is useful for submitting long-running jobs and
+ waiting on them asynchronously using the
DataplexDataQualityJobStatusSensor
+ :param fail_on_dq_failure: If set to true and not all Data Quality scan
rules have been passed,
+ an exception is thrown. If set to false and not all Data Quality scan
rules have been passed,
+ execution will finish with success.
+ :param run_timeout: The amount of time, in seconds, to wait for the result.
+
+ :return: Dataplex Data Quality scan job id.
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ asynchronous: bool = False,
+ fail_on_dq_failure: bool = False,
+ run_timeout: float | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+ self.asynchronous = asynchronous
+ self.fail_on_dq_failure = fail_on_dq_failure
+ self.run_timeout = run_timeout
+
+ def execute(self, context: Context) -> str:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ result = hook.run_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ job_id = result.job.name.split("/")[-1]
+ if not self.asynchronous:
+ hook.wait_for_data_scan_job(
+ job_id=job_id,
+ data_scan_id=self.data_scan_id,
+ project_id=self.project_id,
+ region=self.region,
+ fail_on_dq_failure=self.fail_on_dq_failure,
+ run_timeout=self.run_timeout,
+ )
+ return job_id
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+ """
+ Gets a Data Scan Job resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param job_id: Optional. Data Quality scan job identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :param fail_on_dq_failure: If set to true and not all Data Quality scan
rules have been passed,
+ an exception is thrown. If set to false and not all Data Quality scan
rules have been passed,
+ execution will finish with success.
+ :param wait_for_result: Wait for job to finish.
+ :param run_timeout: The amount of time, in seconds, to wait for the result.
+
+ :return: Dict representing DataScanJob.
+ When the job completes with a successful status, information about the
Data Quality result
+ is available.
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ job_id: str | None = None,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ fail_on_dq_failure: bool = False,
+ wait_for_results: bool = True,
+ fail_on_job_failure: bool = False,
+ run_timeout: float | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.job_id = job_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+ self.fail_on_dq_failure = fail_on_dq_failure
+ self.fail_on_job_failure = fail_on_job_failure
+ self.wait_for_results = wait_for_results
+ self.run_timeout = run_timeout
+
+ def execute(self, context: Context) -> dict:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+ # fetch the last job
+ if not self.job_id:
+ jobs = hook.list_data_scan_jobs(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ job_ids = [DataScanJob.to_dict(job) for job in jobs]
+ if not job_ids:
+ raise AirflowException("There are no jobs, you should create
one before.")
+ job_id = job_ids[0]["name"]
+ self.job_id = job_id.split("/")[-1]
+
+ if self.wait_for_results:
+ hook.wait_for_data_scan_job(
+ job_id=self.job_id,
+ data_scan_id=self.data_scan_id,
+ project_id=self.project_id,
+ region=self.region,
+ fail_on_dq_failure=self.fail_on_dq_failure,
+ fail_on_job_failure=self.fail_on_job_failure,
+ fail_on_timeout=False,
+ run_timeout=self.run_timeout,
+ )
+
+ job = hook.get_data_scan_job(
+ project_id=self.project_id,
+ region=self.region,
+ job_id=self.job_id,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+
+ if self.fail_on_dq_failure:
+ if job.state == DataScanJob.State.SUCCEEDED and not
job.data_quality_result.passed:
+ raise AirflowException(f"Data Quality failed:
{self.data_scan_id}")
Review Comment:
What will be the output if the job has succeeded and
job.data_quality_result.passed?
##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,740 @@ def execute(self, context: Context) -> None:
DataplexLakeLink.persist(context=context, task_instance=self)
hook.wait_for_operation(timeout=self.timeout, operation=operation)
self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Creates a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param body: Required. The Request body contains an instance of DataScan.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: Dataplex data scan id
+ """
+
+ template_fields = ("project_id", "data_scan_id", "body",
"impersonation_chain")
+ template_fields_renderers = {"body": "json"}
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ body: dict[str, Any] | DataScan,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.body = body
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context):
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Creating Dataplex Data Quality scan %s",
self.data_scan_id)
+ try:
+ operation = hook.create_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ body=self.body,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ except AlreadyExists:
+ raise AirflowException("Data Quality scan already exists: %s",
{self.data_scan_id})
+ except GoogleAPICallError as e:
+ raise AirflowException(f"Error creating Data Quality scan
{self.data_scan_id}", e)
+
+ self.log.info("Dataplex Data Quality scan %s created successfully!",
self.data_scan_id)
+ return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Deletes a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: None
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context) -> None:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Deleting Dataplex Data Quality Scan: %s",
self.data_scan_id)
+
+ operation = hook.delete_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ self.log.info("Dataplex Data Quality scan %s deleted successfully!",
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Runs an on-demand execution of a DataScan.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :param asynchronous: Flag informing should the Dataplex job be run
asynchronously.
+ This is useful for submitting long-running jobs and
+ waiting on them asynchronously using the
DataplexDataQualityJobStatusSensor
+ :param fail_on_dq_failure: If set to true and not all Data Quality scan
rules have been passed,
+ an exception is thrown. If set to false and not all Data Quality scan
rules have been passed,
+ execution will finish with success.
+ :param run_timeout: The amount of time, in seconds, to wait for the result.
+
+ :return: Dataplex Data Quality scan job id.
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ asynchronous: bool = False,
+ fail_on_dq_failure: bool = False,
+ run_timeout: float | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+ self.asynchronous = asynchronous
+ self.fail_on_dq_failure = fail_on_dq_failure
+ self.run_timeout = run_timeout
+
+ def execute(self, context: Context) -> str:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ result = hook.run_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ job_id = result.job.name.split("/")[-1]
+ if not self.asynchronous:
+ hook.wait_for_data_scan_job(
+ job_id=job_id,
+ data_scan_id=self.data_scan_id,
+ project_id=self.project_id,
+ region=self.region,
+ fail_on_dq_failure=self.fail_on_dq_failure,
+ run_timeout=self.run_timeout,
+ )
+ return job_id
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+ """
+ Gets a Data Scan Job resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param job_id: Optional. Data Quality scan job identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :param fail_on_dq_failure: If set to true and not all Data Quality scan
rules have been passed,
+ an exception is thrown. If set to false and not all Data Quality scan
rules have been passed,
+ execution will finish with success.
+ :param wait_for_result: Wait for job to finish.
+ :param run_timeout: The amount of time, in seconds, to wait for the result.
Review Comment:
Could you please specify result of what operation we are waiting for?
##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,740 @@ def execute(self, context: Context) -> None:
DataplexLakeLink.persist(context=context, task_instance=self)
hook.wait_for_operation(timeout=self.timeout, operation=operation)
self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Creates a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param body: Required. The Request body contains an instance of DataScan.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: Dataplex data scan id
+ """
+
+ template_fields = ("project_id", "data_scan_id", "body",
"impersonation_chain")
+ template_fields_renderers = {"body": "json"}
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ body: dict[str, Any] | DataScan,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.body = body
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context):
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Creating Dataplex Data Quality scan %s",
self.data_scan_id)
+ try:
+ operation = hook.create_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ body=self.body,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ except AlreadyExists:
+ raise AirflowException("Data Quality scan already exists: %s",
{self.data_scan_id})
+ except GoogleAPICallError as e:
+ raise AirflowException(f"Error creating Data Quality scan
{self.data_scan_id}", e)
+
+ self.log.info("Dataplex Data Quality scan %s created successfully!",
self.data_scan_id)
+ return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Deletes a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: None
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context) -> None:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Deleting Dataplex Data Quality Scan: %s",
self.data_scan_id)
+
+ operation = hook.delete_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ self.log.info("Dataplex Data Quality scan %s deleted successfully!",
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Runs an on-demand execution of a DataScan.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :param asynchronous: Flag informing should the Dataplex job be run
asynchronously.
+ This is useful for submitting long-running jobs and
+ waiting on them asynchronously using the
DataplexDataQualityJobStatusSensor
+ :param fail_on_dq_failure: If set to true and not all Data Quality scan
rules have been passed,
+ an exception is thrown. If set to false and not all Data Quality scan
rules have been passed,
+ execution will finish with success.
+ :param run_timeout: The amount of time, in seconds, to wait for the result.
+
+ :return: Dataplex Data Quality scan job id.
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ asynchronous: bool = False,
+ fail_on_dq_failure: bool = False,
+ run_timeout: float | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+ self.asynchronous = asynchronous
+ self.fail_on_dq_failure = fail_on_dq_failure
+ self.run_timeout = run_timeout
+
+ def execute(self, context: Context) -> str:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ result = hook.run_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ job_id = result.job.name.split("/")[-1]
+ if not self.asynchronous:
+ hook.wait_for_data_scan_job(
+ job_id=job_id,
+ data_scan_id=self.data_scan_id,
+ project_id=self.project_id,
+ region=self.region,
+ fail_on_dq_failure=self.fail_on_dq_failure,
+ run_timeout=self.run_timeout,
+ )
+ return job_id
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+ """
+ Gets a Data Scan Job resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param job_id: Optional. Data Quality scan job identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :param fail_on_dq_failure: If set to true and not all Data Quality scan
rules have been passed,
+ an exception is thrown. If set to false and not all Data Quality scan
rules have been passed,
+ execution will finish with success.
+ :param wait_for_result: Wait for job to finish.
+ :param run_timeout: The amount of time, in seconds, to wait for the result.
+
+ :return: Dict representing DataScanJob.
+ When the job completes with a successful status, information about the
Data Quality result
+ is available.
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ job_id: str | None = None,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ fail_on_dq_failure: bool = False,
+ wait_for_results: bool = True,
+ fail_on_job_failure: bool = False,
+ run_timeout: float | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.job_id = job_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+ self.fail_on_dq_failure = fail_on_dq_failure
+ self.fail_on_job_failure = fail_on_job_failure
Review Comment:
you don't have description for this param in header. What is this parameter
about?
##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,740 @@ def execute(self, context: Context) -> None:
DataplexLakeLink.persist(context=context, task_instance=self)
hook.wait_for_operation(timeout=self.timeout, operation=operation)
self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Creates a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param body: Required. The Request body contains an instance of DataScan.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: Dataplex data scan id
+ """
+
+ template_fields = ("project_id", "data_scan_id", "body",
"impersonation_chain")
+ template_fields_renderers = {"body": "json"}
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ body: dict[str, Any] | DataScan,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.body = body
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context):
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Creating Dataplex Data Quality scan %s",
self.data_scan_id)
+ try:
+ operation = hook.create_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ body=self.body,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ except AlreadyExists:
+ raise AirflowException("Data Quality scan already exists: %s",
{self.data_scan_id})
+ except GoogleAPICallError as e:
+ raise AirflowException(f"Error creating Data Quality scan
{self.data_scan_id}", e)
+
+ self.log.info("Dataplex Data Quality scan %s created successfully!",
self.data_scan_id)
+ return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Deletes a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: None
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context) -> None:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Deleting Dataplex Data Quality Scan: %s",
self.data_scan_id)
+
+ operation = hook.delete_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ self.log.info("Dataplex Data Quality scan %s deleted successfully!",
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Runs an on-demand execution of a DataScan.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :param asynchronous: Flag informing should the Dataplex job be run
asynchronously.
Review Comment:
```suggestion
:param asynchronous: Flag informing that the Dataplex job should be run
asynchronously.
```
##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,740 @@ def execute(self, context: Context) -> None:
DataplexLakeLink.persist(context=context, task_instance=self)
hook.wait_for_operation(timeout=self.timeout, operation=operation)
self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Creates a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param body: Required. The Request body contains an instance of DataScan.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: Dataplex data scan id
+ """
+
+ template_fields = ("project_id", "data_scan_id", "body",
"impersonation_chain")
+ template_fields_renderers = {"body": "json"}
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ body: dict[str, Any] | DataScan,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.body = body
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context):
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Creating Dataplex Data Quality scan %s",
self.data_scan_id)
+ try:
+ operation = hook.create_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ body=self.body,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ except AlreadyExists:
+ raise AirflowException("Data Quality scan already exists: %s",
{self.data_scan_id})
+ except GoogleAPICallError as e:
+ raise AirflowException(f"Error creating Data Quality scan
{self.data_scan_id}", e)
+
+ self.log.info("Dataplex Data Quality scan %s created successfully!",
self.data_scan_id)
+ return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Deletes a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: None
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context) -> None:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Deleting Dataplex Data Quality Scan: %s",
self.data_scan_id)
+
+ operation = hook.delete_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ self.log.info("Dataplex Data Quality scan %s deleted successfully!",
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Runs an on-demand execution of a DataScan.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :param asynchronous: Flag informing should the Dataplex job be run
asynchronously.
+ This is useful for submitting long-running jobs and
+ waiting on them asynchronously using the
DataplexDataQualityJobStatusSensor
+ :param fail_on_dq_failure: If set to true and not all Data Quality scan
rules have been passed,
+ an exception is thrown. If set to false and not all Data Quality scan
rules have been passed,
+ execution will finish with success.
+ :param run_timeout: The amount of time, in seconds, to wait for the result.
+
+ :return: Dataplex Data Quality scan job id.
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ asynchronous: bool = False,
+ fail_on_dq_failure: bool = False,
+ run_timeout: float | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+ self.asynchronous = asynchronous
+ self.fail_on_dq_failure = fail_on_dq_failure
+ self.run_timeout = run_timeout
+
+ def execute(self, context: Context) -> str:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ result = hook.run_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ job_id = result.job.name.split("/")[-1]
+ if not self.asynchronous:
+ hook.wait_for_data_scan_job(
+ job_id=job_id,
+ data_scan_id=self.data_scan_id,
+ project_id=self.project_id,
+ region=self.region,
+ fail_on_dq_failure=self.fail_on_dq_failure,
+ run_timeout=self.run_timeout,
+ )
+ return job_id
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+ """
+ Gets a Data Scan Job resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param job_id: Optional. Data Quality scan job identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :param fail_on_dq_failure: If set to true and not all Data Quality scan
rules have been passed,
+ an exception is thrown. If set to false and not all Data Quality scan
rules have been passed,
+ execution will finish with success.
+ :param wait_for_result: Wait for job to finish.
+ :param run_timeout: The amount of time, in seconds, to wait for the result.
+
+ :return: Dict representing DataScanJob.
+ When the job completes with a successful status, information about the
Data Quality result
+ is available.
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ job_id: str | None = None,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ fail_on_dq_failure: bool = False,
+ wait_for_results: bool = True,
+ fail_on_job_failure: bool = False,
+ run_timeout: float | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.job_id = job_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+ self.fail_on_dq_failure = fail_on_dq_failure
+ self.fail_on_job_failure = fail_on_job_failure
+ self.wait_for_results = wait_for_results
+ self.run_timeout = run_timeout
+
+ def execute(self, context: Context) -> dict:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+ # fetch the last job
+ if not self.job_id:
+ jobs = hook.list_data_scan_jobs(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ job_ids = [DataScanJob.to_dict(job) for job in jobs]
+ if not job_ids:
+ raise AirflowException("There are no jobs, you should create
one before.")
+ job_id = job_ids[0]["name"]
+ self.job_id = job_id.split("/")[-1]
+
+ if self.wait_for_results:
Review Comment:
i can't see the implementation of waiting logic here:
If no results available wait until timeout for the results to be available
##########
airflow/providers/google/cloud/operators/dataplex.py:
##########
@@ -610,3 +612,740 @@ def execute(self, context: Context) -> None:
DataplexLakeLink.persist(context=context, task_instance=self)
hook.wait_for_operation(timeout=self.timeout, operation=operation)
self.log.info("Dataplex lake %s deleted successfully!", self.lake_id)
+
+
+class DataplexCreateDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Creates a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param body: Required. The Request body contains an instance of DataScan.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: Dataplex data scan id
+ """
+
+ template_fields = ("project_id", "data_scan_id", "body",
"impersonation_chain")
+ template_fields_renderers = {"body": "json"}
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ body: dict[str, Any] | DataScan,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.body = body
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context):
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Creating Dataplex Data Quality scan %s",
self.data_scan_id)
+ try:
+ operation = hook.create_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ body=self.body,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ except AlreadyExists:
+ raise AirflowException("Data Quality scan already exists: %s",
{self.data_scan_id})
+ except GoogleAPICallError as e:
+ raise AirflowException(f"Error creating Data Quality scan
{self.data_scan_id}", e)
+
+ self.log.info("Dataplex Data Quality scan %s created successfully!",
self.data_scan_id)
+ return self.data_scan_id
+
+
+class DataplexDeleteDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Deletes a DataScan resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+
+ :return: None
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+
+ def execute(self, context: Context) -> None:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ self.log.info("Deleting Dataplex Data Quality Scan: %s",
self.data_scan_id)
+
+ operation = hook.delete_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
+ self.log.info("Dataplex Data Quality scan %s deleted successfully!",
self.data_scan_id)
+
+
+class DataplexRunDataQualityScanOperator(GoogleCloudBaseOperator):
+ """
+ Runs an on-demand execution of a DataScan.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
+ Note that if `retry` is specified, the timeout applies to each
individual attempt.
+ :param metadata: Additional metadata that is provided to the method.
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
+ :param impersonation_chain: Optional service account to impersonate using
short-term
+ credentials, or chained list of accounts required to get the
access_token
+ of the last account in the list, which will be impersonated in the
request.
+ If set as a string, the account must grant the originating account
+ the Service Account Token Creator IAM role.
+ If set as a sequence, the identities from the list must grant
+ Service Account Token Creator IAM role to the directly preceding
identity, with first
+ account from the list granting this role to the originating account
(templated).
+ :param asynchronous: Flag informing should the Dataplex job be run
asynchronously.
+ This is useful for submitting long-running jobs and
+ waiting on them asynchronously using the
DataplexDataQualityJobStatusSensor
+ :param fail_on_dq_failure: If set to true and not all Data Quality scan
rules have been passed,
+ an exception is thrown. If set to false and not all Data Quality scan
rules have been passed,
+ execution will finish with success.
+ :param run_timeout: The amount of time, in seconds, to wait for the result.
+
+ :return: Dataplex Data Quality scan job id.
+ """
+
+ template_fields = ("project_id", "data_scan_id", "impersonation_chain")
+
+ def __init__(
+ self,
+ project_id: str,
+ region: str,
+ data_scan_id: str,
+ api_version: str = "v1",
+ retry: Retry | _MethodDefault = DEFAULT,
+ timeout: float | None = None,
+ metadata: Sequence[tuple[str, str]] = (),
+ gcp_conn_id: str = "google_cloud_default",
+ impersonation_chain: str | Sequence[str] | None = None,
+ asynchronous: bool = False,
+ fail_on_dq_failure: bool = False,
+ run_timeout: float | None = None,
+ *args,
+ **kwargs,
+ ) -> None:
+
+ super().__init__(*args, **kwargs)
+ self.project_id = project_id
+ self.region = region
+ self.data_scan_id = data_scan_id
+ self.api_version = api_version
+ self.retry = retry
+ self.timeout = timeout
+ self.metadata = metadata
+ self.gcp_conn_id = gcp_conn_id
+ self.impersonation_chain = impersonation_chain
+ self.asynchronous = asynchronous
+ self.fail_on_dq_failure = fail_on_dq_failure
+ self.run_timeout = run_timeout
+
+ def execute(self, context: Context) -> str:
+ hook = DataplexHook(
+ gcp_conn_id=self.gcp_conn_id,
+ api_version=self.api_version,
+ impersonation_chain=self.impersonation_chain,
+ )
+
+ result = hook.run_data_scan(
+ project_id=self.project_id,
+ region=self.region,
+ data_scan_id=self.data_scan_id,
+ retry=self.retry,
+ timeout=self.timeout,
+ metadata=self.metadata,
+ )
+ job_id = result.job.name.split("/")[-1]
+ if not self.asynchronous:
+ hook.wait_for_data_scan_job(
+ job_id=job_id,
+ data_scan_id=self.data_scan_id,
+ project_id=self.project_id,
+ region=self.region,
+ fail_on_dq_failure=self.fail_on_dq_failure,
+ run_timeout=self.run_timeout,
+ )
+ return job_id
+
+
+class DataplexGetDataQualityScanResultOperator(GoogleCloudBaseOperator):
+ """
+ Gets a Data Scan Job resource.
+
+ :param project_id: Required. The ID of the Google Cloud project that the
lake belongs to.
+ :param region: Required. The ID of the Google Cloud region that the lake
belongs to.
+ :param data_scan_id: Required. Data Quality scan identifier.
+ :param job_id: Optional. Data Quality scan job identifier.
+ :param api_version: The version of the api that will be requested for
example 'v1'.
+ :param retry: A retry object used to retry requests. If `None` is
specified, requests
+ will not be retried.
+ :param timeout: The amount of time, in seconds, to wait for the request to
complete.
Review Comment:
Is this timeout variable really for request?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]