jbbqqf commented on code in PR #30658:
URL: https://github.com/apache/airflow/pull/30658#discussion_r1174545448


##########
tests/system/providers/google/cloud/bigquery/example_bigquery_to_mysql.py:
##########
@@ -0,0 +1,99 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example Airflow DAG for Google BigQuery service.
+"""
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+import pytest
+
+from airflow import models
+from airflow.providers.google.cloud.operators.bigquery import (
+    BigQueryCreateEmptyDatasetOperator,
+    BigQueryCreateEmptyTableOperator,
+    BigQueryDeleteDatasetOperator,
+)
+
+try:
+    from airflow.providers.google.cloud.transfers.bigquery_to_mysql import 
BigQueryToMySqlOperator
+except ImportError:
+    pytest.skip("MySQL not available", allow_module_level=True)
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_bigquery_to_mysql"
+
+DATASET_NAME = f"dataset_{DAG_ID}_{ENV_ID}"
+DATA_EXPORT_BUCKET_NAME = os.environ.get("GCP_BIGQUERY_EXPORT_BUCKET_NAME", 
"INVALID BUCKET NAME")
+TABLE = "table_42"
+destination_table = "mysql_table_test"
+
+with models.DAG(
+    DAG_ID,
+    schedule="@once",  # Override to match your needs
+    start_date=datetime(2021, 1, 1),
+    catchup=False,
+    tags=["example", "bigquery"],
+) as dag:
+    # [START howto_operator_bigquery_to_mysql]
+    bigquery_to_mysql = BigQueryToMySqlOperator(

Review Comment:
   I don't understand what you'd suggest



##########
airflow/providers/google/cloud/transfers/bigquery_to_mssql.py:
##########
@@ -39,86 +37,39 @@ class BigQueryToMsSqlOperator(BaseOperator):
         For more information on how to use this operator, take a look at the 
guide:
         :ref:`howto/operator:BigQueryToMsSqlOperator`
 
-    .. note::
-        If you pass fields to ``selected_fields`` which are in different order 
than the
-        order of columns already in
-        BQ table, the data will still be in the order of BQ table.
-        For example if the BQ table has 3 columns as
-        ``[A,B,C]`` and you pass 'B,A' in the ``selected_fields``
-        the data would still be of the form ``'A,B'`` and passed through this 
form
-        to MSSQL
-
-    **Example**: ::
-
-       transfer_data = BigQueryToMsSqlOperator(
-            task_id='task_id',
-            source_project_dataset_table='my-project.mydataset.mytable',
-            mssql_table='dest_table_name',
-            replace=True,
-        )
-
     :param source_project_dataset_table: A dotted 
``<project>.<dataset>.<table>``:
         the big query table of origin
-    :param selected_fields: List of fields to return (comma-separated). If
-        unspecified, all fields are returned.
-    :param gcp_conn_id: reference to a specific Google Cloud hook.
+    :param mssql_table: target MsSQL table (templated)

Review Comment:
   Sure. I changed that.



##########
airflow/providers/google/cloud/transfers/bigquery_to_mssql.py:
##########
@@ -39,86 +37,39 @@ class BigQueryToMsSqlOperator(BaseOperator):
         For more information on how to use this operator, take a look at the 
guide:
         :ref:`howto/operator:BigQueryToMsSqlOperator`
 
-    .. note::
-        If you pass fields to ``selected_fields`` which are in different order 
than the
-        order of columns already in
-        BQ table, the data will still be in the order of BQ table.
-        For example if the BQ table has 3 columns as
-        ``[A,B,C]`` and you pass 'B,A' in the ``selected_fields``
-        the data would still be of the form ``'A,B'`` and passed through this 
form
-        to MSSQL
-
-    **Example**: ::
-
-       transfer_data = BigQueryToMsSqlOperator(
-            task_id='task_id',
-            source_project_dataset_table='my-project.mydataset.mytable',
-            mssql_table='dest_table_name',
-            replace=True,
-        )
-
     :param source_project_dataset_table: A dotted 
``<project>.<dataset>.<table>``:
         the big query table of origin
-    :param selected_fields: List of fields to return (comma-separated). If
-        unspecified, all fields are returned.
-    :param gcp_conn_id: reference to a specific Google Cloud hook.
+    :param mssql_table: target MsSQL table (templated)
     :param mssql_conn_id: reference to a specific mssql hook
-    :param database: name of database which overwrite defined one in connection
-    :param replace: Whether to replace instead of insert
-    :param batch_size: The number of rows to take in each batch
-    :param location: The location used for the operation.
-    :param impersonation_chain: Optional service account to impersonate using 
short-term
-        credentials, or chained list of accounts required to get the 
access_token
-        of the last account in the list, which will be impersonated in the 
request.
-        If set as a string, the account must grant the originating account
-        the Service Account Token Creator IAM role.
-        If set as a sequence, the identities from the list must grant
-        Service Account Token Creator IAM role to the directly preceding 
identity, with first
-        account from the list granting this role to the originating account 
(templated).
     """
 
-    template_fields: Sequence[str] = ("source_project_dataset_table", 
"mssql_table", "impersonation_chain")
+    template_fields: Sequence[str] = 
tuple(BaseBigQueryToSqlOperator.template_fields) + (
+        "source_project_dataset_table",
+    )
     operator_extra_links = (BigQueryTableLink(),)
 
     def __init__(
         self,
         *,
         source_project_dataset_table: str,
         mssql_table: str,
-        selected_fields: list[str] | str | None = None,
-        gcp_conn_id: str = "google_cloud_default",
         mssql_conn_id: str = "mssql_default",
-        database: str | None = None,
-        replace: bool = False,
-        batch_size: int = 1000,
-        location: str | None = None,
-        impersonation_chain: str | Sequence[str] | None = None,
         **kwargs,
     ) -> None:
-        super().__init__(**kwargs)
-        self.selected_fields = selected_fields
-        self.gcp_conn_id = gcp_conn_id
-        self.mssql_conn_id = mssql_conn_id
-        self.database = database
-        self.mssql_table = mssql_table
-        self.replace = replace
-        self.batch_size = batch_size
-        self.location = location
-        self.impersonation_chain = impersonation_chain
         try:
-            _, self.dataset_id, self.table_id = 
source_project_dataset_table.split(".")
+            _, dataset_id, table_id = source_project_dataset_table.split(".")
         except ValueError:
             raise ValueError(
                 f"Could not parse {source_project_dataset_table} as 
<project>.<dataset>.<table>"
             ) from None
+        super().__init__(sql_table=mssql_table, 
dataset_table=f"{dataset_id}.{table_id}", **kwargs)

Review Comment:
   If you compare the old vs new version, it would introduce a slight 
difference of behavior between the two versions. It can lead to production 
issues.
   
   I'd keep it this way because, unless I am mistaken, this version achieves 
100% backward compatibility.
   
   I agree that it doesn't make sense to have two distinct parameters for this 
use case. I searched in the `google` provider for helper functions to parse 
`project.dataset.table` vs `dataset.table`. I didn't find any.
   
   If you insist on deprecating something here I'd vote to deprecate it in 
another PR because this one is already becoming quite complex to manage / 
review :v: 



##########
airflow/providers/google/cloud/transfers/bigquery_to_sql.py:
##########
@@ -0,0 +1,126 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Base operator for BigQuery to SQL operators."""
+from __future__ import annotations
+
+import abc
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.common.sql.hooks.sql import DbApiHook
+from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook
+from airflow.providers.google.cloud.utils.bigquery_get_data import 
bigquery_get_data
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+class BaseBigQueryToSqlOperator(BaseOperator):
+    """
+    Fetches the data from a BigQuery table (alternatively fetch data for 
selected columns)
+    and insert that data into a SQL table.
+
+    .. note::
+        If you pass fields to ``selected_fields`` which are in different order 
than the
+        order of columns already in
+        BQ table, the data will still be in the order of BQ table.
+        For example if the BQ table has 3 columns as
+        ``[A,B,C]`` and you pass 'B,A' in the ``selected_fields``
+        the data would still be of the form ``'A,B'`` and passed through this 
form
+        to the SQL database.
+
+    :param dataset_table: A dotted ``<dataset>.<table>``: the big query table 
of origin
+    :param selected_fields: List of fields to return (comma-separated). If
+        unspecified, all fields are returned.
+    :param gcp_conn_id: reference to a specific Google Cloud hook.
+    :param database: name of database which overwrite defined one in connection
+    :param replace: Whether to replace instead of insert
+    :param batch_size: The number of rows to take in each batch
+    :param location: The location used for the operation.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    """
+
+    template_fields: Sequence[str] = (
+        "sql_table",
+        "impersonation_chain",
+    )
+
+    def __init__(
+        self,
+        *,
+        dataset_table: str,
+        sql_table: str,
+        selected_fields: list[str] | str | None = None,
+        gcp_conn_id: str = "google_cloud_default",
+        database: str | None = None,
+        replace: bool = False,
+        batch_size: int = 1000,
+        location: str | None = None,
+        impersonation_chain: str | Sequence[str] | None = None,
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.selected_fields = selected_fields
+        self.gcp_conn_id = gcp_conn_id
+        self.database = database
+        self.sql_table = sql_table
+        self.replace = replace
+        self.batch_size = batch_size
+        self.location = location
+        self.impersonation_chain = impersonation_chain
+        try:
+            self.dataset_id, self.table_id = dataset_table.split(".")
+        except ValueError:
+            raise ValueError(f"Could not parse {dataset_table} as 
<dataset>.<table>") from None
+
+    @abc.abstractmethod
+    def get_sql_hook(self) -> DbApiHook:
+        pass
+
+    def persist_links(self, context: Context) -> None:
+        pass

Review Comment:
   As is it trivial to reference all children implementation / calls to this 
function the best added value I could provide was to inform future readers what 
the history of this method is.
   
   If you have another opinion, can you propose a docstring you think would fit?



##########
airflow/providers/google/cloud/transfers/bigquery_to_sql.py:
##########
@@ -0,0 +1,126 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Base operator for BigQuery to SQL operators."""
+from __future__ import annotations
+
+import abc
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.common.sql.hooks.sql import DbApiHook
+from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook
+from airflow.providers.google.cloud.utils.bigquery_get_data import 
bigquery_get_data
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+class BaseBigQueryToSqlOperator(BaseOperator):
+    """
+    Fetches the data from a BigQuery table (alternatively fetch data for 
selected columns)
+    and insert that data into a SQL table.

Review Comment:
   Done



##########
airflow/providers/google/cloud/transfers/bigquery_to_sql.py:
##########
@@ -0,0 +1,126 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Base operator for BigQuery to SQL operators."""
+from __future__ import annotations
+
+import abc
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.common.sql.hooks.sql import DbApiHook
+from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook
+from airflow.providers.google.cloud.utils.bigquery_get_data import 
bigquery_get_data
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+class BaseBigQueryToSqlOperator(BaseOperator):
+    """
+    Fetches the data from a BigQuery table (alternatively fetch data for 
selected columns)
+    and insert that data into a SQL table.
+
+    .. note::
+        If you pass fields to ``selected_fields`` which are in different order 
than the
+        order of columns already in
+        BQ table, the data will still be in the order of BQ table.
+        For example if the BQ table has 3 columns as
+        ``[A,B,C]`` and you pass 'B,A' in the ``selected_fields``
+        the data would still be of the form ``'A,B'`` and passed through this 
form
+        to the SQL database.
+
+    :param dataset_table: A dotted ``<dataset>.<table>``: the big query table 
of origin
+    :param selected_fields: List of fields to return (comma-separated). If
+        unspecified, all fields are returned.
+    :param gcp_conn_id: reference to a specific Google Cloud hook.
+    :param database: name of database which overwrite defined one in connection
+    :param replace: Whether to replace instead of insert
+    :param batch_size: The number of rows to take in each batch
+    :param location: The location used for the operation.
+    :param impersonation_chain: Optional service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    """
+
+    template_fields: Sequence[str] = (
+        "sql_table",
+        "impersonation_chain",
+    )
+
+    def __init__(
+        self,
+        *,
+        dataset_table: str,
+        sql_table: str,
+        selected_fields: list[str] | str | None = None,
+        gcp_conn_id: str = "google_cloud_default",
+        database: str | None = None,
+        replace: bool = False,
+        batch_size: int = 1000,
+        location: str | None = None,
+        impersonation_chain: str | Sequence[str] | None = None,
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.selected_fields = selected_fields
+        self.gcp_conn_id = gcp_conn_id
+        self.database = database
+        self.sql_table = sql_table
+        self.replace = replace
+        self.batch_size = batch_size
+        self.location = location
+        self.impersonation_chain = impersonation_chain
+        try:
+            self.dataset_id, self.table_id = dataset_table.split(".")
+        except ValueError:
+            raise ValueError(f"Could not parse {dataset_table} as 
<dataset>.<table>") from None
+
+    @abc.abstractmethod
+    def get_sql_hook(self) -> DbApiHook:
+        pass

Review Comment:
   In my opinion it is unnecessary because concrete implementations are 
self-explanatory.
   
   I have nonetheless taken into account your remark by adding a docstring.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to