sunank200 commented on code in PR #31293:
URL: https://github.com/apache/airflow/pull/31293#discussion_r1197751629
##########
airflow/providers/google/cloud/operators/bigquery.py:
##########
@@ -1177,14 +1178,56 @@ def execute(self, context: Context):
]
else:
raise AirflowException(f"argument 'sql' of type {type(str)} is
neither a string nor an iterable")
- context["task_instance"].xcom_push(key="job_id", value=job_id)
+ context["task_instance"].xcom_push(key="job_id", value=self.job_id)
def on_kill(self) -> None:
super().on_kill()
if self.hook is not None:
self.log.info("Cancelling running query")
self.hook.cancel_job(self.hook.running_job_id)
+ def get_openlineage_facets_on_complete(self, task_instance):
+ try:
+ from openlineage.client.facet import SqlJobFacet
+ from openlineage.common.provider.bigquery import
BigQueryDatasetsProvider
+
+ from airflow.providers.openlineage.extractors import
OperatorLineage
+ from airflow.providers.openlineage.utils.utils import normalize_sql
+ except ImportError:
+ return None
Review Comment:
Should this return None for import error? Should we not raise this?
##########
airflow/providers/google/cloud/operators/bigquery.py:
##########
@@ -1177,14 +1178,56 @@ def execute(self, context: Context):
]
else:
raise AirflowException(f"argument 'sql' of type {type(str)} is
neither a string nor an iterable")
- context["task_instance"].xcom_push(key="job_id", value=job_id)
+ context["task_instance"].xcom_push(key="job_id", value=self.job_id)
def on_kill(self) -> None:
super().on_kill()
if self.hook is not None:
self.log.info("Cancelling running query")
self.hook.cancel_job(self.hook.running_job_id)
+ def get_openlineage_facets_on_complete(self, task_instance):
+ try:
+ from openlineage.client.facet import SqlJobFacet
+ from openlineage.common.provider.bigquery import
BigQueryDatasetsProvider
+
+ from airflow.providers.openlineage.extractors import
OperatorLineage
+ from airflow.providers.openlineage.utils.utils import normalize_sql
+ except ImportError:
+ return None
+
+ if not self.job_id:
+ return OperatorLineage()
Review Comment:
what are we trying to do here?
##########
tests/providers/google/cloud/operators/test_bigquery.py:
##########
@@ -17,13 +17,22 @@
# under the License.
from __future__ import annotations
+import json
from unittest import mock
from unittest.mock import ANY, MagicMock
import pandas as pd
import pytest
from google.cloud.bigquery import DEFAULT_RETRY
from google.cloud.exceptions import Conflict
+from openlineage.client.facet import (
Review Comment:
Should this import not be optional here for openlineage?
##########
airflow/providers/google/cloud/operators/bigquery.py:
##########
@@ -1177,14 +1178,56 @@ def execute(self, context: Context):
]
else:
raise AirflowException(f"argument 'sql' of type {type(str)} is
neither a string nor an iterable")
- context["task_instance"].xcom_push(key="job_id", value=job_id)
+ context["task_instance"].xcom_push(key="job_id", value=self.job_id)
def on_kill(self) -> None:
super().on_kill()
if self.hook is not None:
self.log.info("Cancelling running query")
self.hook.cancel_job(self.hook.running_job_id)
+ def get_openlineage_facets_on_complete(self, task_instance):
+ try:
+ from openlineage.client.facet import SqlJobFacet
+ from openlineage.common.provider.bigquery import
BigQueryDatasetsProvider
+
+ from airflow.providers.openlineage.extractors import
OperatorLineage
+ from airflow.providers.openlineage.utils.utils import normalize_sql
+ except ImportError:
+ return None
+
+ if not self.job_id:
+ return OperatorLineage()
+
+ client = self.hook.get_client(project_id=self.hook.project_id)
+ if isinstance(self.job_id, list):
+ inputs, outputs, run_facets = {}, {}, {}
+ for job_id in self.job_id:
+ stats =
BigQueryDatasetsProvider(client=client).get_facets(job_id=job_id)
+ print(stats)
+ for input in stats.inputs:
+ input = input.to_openlineage_dataset()
+ inputs[input.name] = input
+ if stats.output:
+ output = stats.output.to_openlineage_dataset()
+ outputs[output.name] = output
+ for key, value in stats.run_facets.items():
+ run_facets[key] = value
Review Comment:
What about lets say there is error in job, should that not be part of job
facet as well?
##########
airflow/providers/google/cloud/operators/bigquery.py:
##########
@@ -1177,14 +1178,56 @@ def execute(self, context: Context):
]
else:
raise AirflowException(f"argument 'sql' of type {type(str)} is
neither a string nor an iterable")
- context["task_instance"].xcom_push(key="job_id", value=job_id)
+ context["task_instance"].xcom_push(key="job_id", value=self.job_id)
def on_kill(self) -> None:
super().on_kill()
if self.hook is not None:
self.log.info("Cancelling running query")
self.hook.cancel_job(self.hook.running_job_id)
+ def get_openlineage_facets_on_complete(self, task_instance):
+ try:
+ from openlineage.client.facet import SqlJobFacet
+ from openlineage.common.provider.bigquery import
BigQueryDatasetsProvider
+
+ from airflow.providers.openlineage.extractors import
OperatorLineage
+ from airflow.providers.openlineage.utils.utils import normalize_sql
+ except ImportError:
+ return None
+
+ if not self.job_id:
+ return OperatorLineage()
+
+ client = self.hook.get_client(project_id=self.hook.project_id)
+ if isinstance(self.job_id, list):
+ inputs, outputs, run_facets = {}, {}, {}
+ for job_id in self.job_id:
+ stats =
BigQueryDatasetsProvider(client=client).get_facets(job_id=job_id)
+ print(stats)
Review Comment:
This print statement can be removed
```suggestion
```
##########
tests/providers/google/cloud/operators/test_bigquery.py:
##########
@@ -641,6 +650,140 @@ def test_bigquery_operator_defaults(self, mock_hook,
create_task_instance_of_ope
ti.render_templates()
assert isinstance(ti.task.sql, str)
+
@mock.patch("airflow.providers.google.cloud.operators.bigquery.BigQueryHook")
Review Comment:
We should add one more test when a operator raised error and it should show
as lineage in job facets.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]