vincbeck commented on code in PR #23881: URL: https://github.com/apache/airflow/pull/23881#discussion_r887193619
########## airflow/providers/amazon/aws/transfers/s3_to_sql.py: ########## @@ -0,0 +1,115 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import List, Optional, Sequence, Union + +import numpy +import pandas + +from airflow.exceptions import AirflowException +from airflow.hooks.base import BaseHook +from airflow.models import BaseOperator +from airflow.providers.amazon.aws.hooks.s3 import S3Hook +from airflow.utils.context import Context + + +class S3ToSqlOperator(BaseOperator): + """ + + Moves data from s3 to sql. + + :param s3_key: path to s3 file + :param destination_table: target table on sql + :param file_format: input file format. CSV, JSON or Parquet + :param file_options: file reader options + :param source_conn_id: source connection + :param destination_conn_id: destination connection + :param preoperator: sql statement or list of statements to be + executed prior to loading the data. (templated) + :param insert_args: extra params for `insert_rows` method. + """ + + template_fields: Sequence[str] = ('s3_key', 'destination_table', 'file_format', 'preoperator') + template_ext: Sequence[str] = ( + '.sql', + '.hql', + ) + template_fields_renderers = {"preoperator": "sql"} + ui_color = '#b0f07c' + + def __init__( + self, + *, + s3_key: str, + destination_table: str, + file_format: str, + file_options: Optional[dict] = None, + source_conn_id: str = 'aws_default', + destination_conn_id: str = 'sql_default', + preoperator: Optional[Union[str, List[str]]] = None, + insert_args: Optional[dict] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.s3_key = s3_key + self.destination_table = destination_table + self.file_format = file_format + self.file_options = file_options or {} + self.source_conn_id = source_conn_id + self.destination_conn_id = destination_conn_id + self.preoperator = preoperator + self.insert_args = insert_args or {} + + @staticmethod + def fix_int_dtypes(df: pandas.DataFrame) -> None: + for col in df: + if "float" in df[col].dtype.name and df[col].hasnans: + notna_series = df[col].dropna().values + if numpy.equal(notna_series, notna_series.astype(int)).all(): + df[col] = numpy.where(df[col].isnull(), None, df[col]) + df[col] = df[col].astype('Int64') + elif numpy.isclose(notna_series, notna_series.astype(int)).all(): + df[col] = numpy.where(df[col].isnull(), None, df[col]) + df[col] = df[col].astype('float64') Review Comment: Following back on the conversation we had regarding this piece of code. Copy pasting code is never a good idea in my opinion. What I would do: - I would create a file `airflow/providers/amazon/aws/utils/s3.py` - I would move this function into this file - I would use this function in `SqlToS3Operator` and `S3ToSqlOperator` ########## docs/apache-airflow-providers-amazon/operators/transfer/s3_to_sql.rst: ########## @@ -0,0 +1,56 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +================ +Amazon S3 to SQL +================ + +Use ``S3ToSqlOperator`` to copy data from an Amazon Simple Storage Service (S3) file to a SQL server. +``S3ToSqlOperator`` is compatible with any SQL connection as long as the SQL hook has function that +converts the SQL result to `pandas dataframe <https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.html>`__ +(e.g. MySQL, Hive, ...). + +Prerequisite Tasks +------------------ + +.. include:: ../_partials/prerequisite_tasks.rst + +Operators +--------- + +.. _howto/operator:S3ToSqlOperator: + +Amazon S3 to SQL transfer operator +==================================== + +This example loads data from an Amazon S3 file to a MySQL server. Review Comment: ```suggestion This example loads data from an Amazon S3 file to a SQL server. ``` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
