kaxil closed pull request #4260: [AIRFLOW-XXX] Add missing GCP operators to Docs
URL: https://github.com/apache/incubator-airflow/pull/4260
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/airflow/contrib/operators/mysql_to_gcs.py 
b/airflow/contrib/operators/mysql_to_gcs.py
index d9fc932ff5..40d9c49fa2 100644
--- a/airflow/contrib/operators/mysql_to_gcs.py
+++ b/airflow/contrib/operators/mysql_to_gcs.py
@@ -38,6 +38,39 @@
 class MySqlToGoogleCloudStorageOperator(BaseOperator):
     """
     Copy data from MySQL to Google cloud storage in JSON format.
+
+    :param sql: The SQL to execute on the MySQL table.
+    :type sql: str
+    :param bucket: The bucket to upload to.
+    :type bucket: str
+    :param filename: The filename to use as the object name when uploading
+        to Google cloud storage. A {} should be specified in the filename
+        to allow the operator to inject file numbers in cases where the
+        file is split due to size.
+    :type filename: str
+    :param schema_filename: If set, the filename to use as the object name
+        when uploading a .json file containing the BigQuery schema fields
+        for the table that was dumped from MySQL.
+    :type schema_filename: str
+    :param approx_max_file_size_bytes: This operator supports the ability
+        to split large table dumps into multiple files (see notes in the
+        filenamed param docs above). Google cloud storage allows for files
+        to be a maximum of 4GB. This param allows developers to specify the
+        file size of the splits.
+    :type approx_max_file_size_bytes: long
+    :param mysql_conn_id: Reference to a specific MySQL hook.
+    :type mysql_conn_id: str
+    :param google_cloud_storage_conn_id: Reference to a specific Google
+        cloud storage hook.
+    :type google_cloud_storage_conn_id: str
+    :param schema: The schema to use, if any. Should be a list of dict or
+        a str. Pass a string if using Jinja template, otherwise, pass a list of
+        dict. Examples could be seen: https://cloud.google.com/bigquery/docs
+        /schemas#specifying_a_json_schema_file
+    :type schema: str or list
+    :param delegate_to: The account to impersonate, if any. For this to
+        work, the service account making the request must have domain-wide
+        delegation enabled.
     """
     template_fields = ('sql', 'bucket', 'filename', 'schema_filename', 
'schema')
     template_ext = ('.sql',)
@@ -56,40 +89,6 @@ def __init__(self,
                  delegate_to=None,
                  *args,
                  **kwargs):
-        """
-        :param sql: The SQL to execute on the MySQL table.
-        :type sql: str
-        :param bucket: The bucket to upload to.
-        :type bucket: str
-        :param filename: The filename to use as the object name when uploading
-            to Google cloud storage. A {} should be specified in the filename
-            to allow the operator to inject file numbers in cases where the
-            file is split due to size.
-        :type filename: str
-        :param schema_filename: If set, the filename to use as the object name
-            when uploading a .json file containing the BigQuery schema fields
-            for the table that was dumped from MySQL.
-        :type schema_filename: str
-        :param approx_max_file_size_bytes: This operator supports the ability
-            to split large table dumps into multiple files (see notes in the
-            filenamed param docs above). Google cloud storage allows for files
-            to be a maximum of 4GB. This param allows developers to specify the
-            file size of the splits.
-        :type approx_max_file_size_bytes: long
-        :param mysql_conn_id: Reference to a specific MySQL hook.
-        :type mysql_conn_id: str
-        :param google_cloud_storage_conn_id: Reference to a specific Google
-            cloud storage hook.
-        :type google_cloud_storage_conn_id: str
-        :param schema: The schema to use, if any. Should be a list of dict or
-            a str. Pass a string if using Jinja template, otherwise, pass a 
list of
-            dict. Examples could be seen: 
https://cloud.google.com/bigquery/docs
-            /schemas#specifying_a_json_schema_file
-        :type schema: str or list
-        :param delegate_to: The account to impersonate, if any. For this to
-            work, the service account making the request must have domain-wide
-            delegation enabled.
-        """
         super(MySqlToGoogleCloudStorageOperator, self).__init__(*args, 
**kwargs)
         self.sql = sql
         self.bucket = bucket
diff --git a/docs/integration.rst b/docs/integration.rst
index 00027f1311..7387fc25f4 100644
--- a/docs/integration.rst
+++ b/docs/integration.rst
@@ -442,10 +442,11 @@ BigQuery Operators
 - :ref:`BigQueryValueCheckOperator` : Performs a simple value check using SQL 
code.
 - :ref:`BigQueryIntervalCheckOperator` : Checks that the values of metrics 
given as SQL expressions are within a certain tolerance of the ones from 
days_back before.
 - :ref:`BigQueryGetDataOperator` : Fetches the data from a BigQuery table and 
returns data in a python list
+- :ref:`BigQueryCreateEmptyDatasetOperator` : Creates an empty BigQuery 
dataset.
 - :ref:`BigQueryCreateEmptyTableOperator` : Creates a new, empty table in the 
specified BigQuery dataset optionally with schema.
 - :ref:`BigQueryCreateExternalTableOperator` : Creates a new, external table 
in the dataset with the data in Google Cloud Storage.
 - :ref:`BigQueryDeleteDatasetOperator` : Deletes an existing BigQuery dataset.
-- :ref:`BigQueryCreateEmptyDatasetOperator` : Creates an empty BigQuery 
dataset.
+- :ref:`BigQueryTableDeleteOperator` : Deletes an existing BigQuery table.
 - :ref:`BigQueryOperator` : Executes BigQuery SQL queries in a specific 
BigQuery database.
 - :ref:`BigQueryToBigQueryOperator` : Copy a BigQuery table to another 
BigQuery table.
 - :ref:`BigQueryToCloudStorageOperator` : Transfers a BigQuery table to a 
Google Cloud Storage bucket
@@ -493,13 +494,6 @@ BigQueryCreateExternalTableOperator
 
 .. autoclass:: 
airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator
 
-.. _BigQueryDeleteDatasetOperator:
-
-BigQueryDeleteDatasetOperator
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-.. autoclass:: 
airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator
-
 .. _BigQueryCreateEmptyDatasetOperator:
 
 BigQueryCreateEmptyDatasetOperator
@@ -507,12 +501,12 @@ BigQueryCreateEmptyDatasetOperator
 
 .. autoclass:: 
airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator
 
-.. _BigQueryOperator:
+.. _BigQueryDeleteDatasetOperator:
 
-BigQueryOperator
-^^^^^^^^^^^^^^^^
+BigQueryDeleteDatasetOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryOperator
+.. autoclass:: 
airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator
 
 .. _BigQueryTableDeleteOperator:
 
@@ -521,6 +515,13 @@ BigQueryTableDeleteOperator
 
 .. autoclass:: 
airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator
 
+.. _BigQueryOperator:
+
+BigQueryOperator
+^^^^^^^^^^^^^^^^
+
+.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryOperator
+
 .. _BigQueryToBigQueryOperator:
 
 BigQueryToBigQueryOperator
@@ -996,6 +997,7 @@ Storage Operators
 - :ref:`GoogleCloudStorageObjectCreateAclEntryOperator` : Creates a new ACL 
entry on the specified object.
 - :ref:`GoogleCloudStorageToBigQueryOperator` : Loads files from Google cloud 
storage into BigQuery.
 - :ref:`GoogleCloudStorageToGoogleCloudStorageOperator` : Copies objects from 
a bucket to another, with renaming if requested.
+- :ref:`MySqlToGoogleCloudStorageOperator`: Copy data from any MySQL Database 
to Google cloud storage in JSON format.
 
 .. _FileToGoogleCloudStorageOperator:
 
@@ -1053,6 +1055,13 @@ GoogleCloudStorageToGoogleCloudStorageOperator
 
 .. autoclass:: 
airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator
 
+.. _MySqlToGoogleCloudStorageOperator:
+
+MySqlToGoogleCloudStorageOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. autoclass:: 
airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator
+
 GoogleCloudStorageHook
 """"""""""""""""""""""
 


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to