http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/hdfs_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/hdfs_sensor.html 
b/_modules/airflow/sensors/hdfs_sensor.html
index fdfec6d..2fb801d 100644
--- a/_modules/airflow/sensors/hdfs_sensor.html
+++ b/_modules/airflow/sensors/hdfs_sensor.html
@@ -306,20 +306,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/hive_partition_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/hive_partition_sensor.html 
b/_modules/airflow/sensors/hive_partition_sensor.html
index d70cbb5..4df1553 100644
--- a/_modules/airflow/sensors/hive_partition_sensor.html
+++ b/_modules/airflow/sensors/hive_partition_sensor.html
@@ -264,20 +264,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/http_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/http_sensor.html 
b/_modules/airflow/sensors/http_sensor.html
index 7fbbe5d..f5c5c8c 100644
--- a/_modules/airflow/sensors/http_sensor.html
+++ b/_modules/airflow/sensors/http_sensor.html
@@ -281,20 +281,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/metastore_partition_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/metastore_partition_sensor.html 
b/_modules/airflow/sensors/metastore_partition_sensor.html
index b9d57e6..fefe40f 100644
--- a/_modules/airflow/sensors/metastore_partition_sensor.html
+++ b/_modules/airflow/sensors/metastore_partition_sensor.html
@@ -272,20 +272,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/named_hive_partition_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/named_hive_partition_sensor.html 
b/_modules/airflow/sensors/named_hive_partition_sensor.html
index 96d0246..06fcb20 100644
--- a/_modules/airflow/sensors/named_hive_partition_sensor.html
+++ b/_modules/airflow/sensors/named_hive_partition_sensor.html
@@ -293,20 +293,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/s3_key_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/s3_key_sensor.html 
b/_modules/airflow/sensors/s3_key_sensor.html
index 67e32a3..1fe36d8 100644
--- a/_modules/airflow/sensors/s3_key_sensor.html
+++ b/_modules/airflow/sensors/s3_key_sensor.html
@@ -270,20 +270,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/s3_prefix_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/s3_prefix_sensor.html 
b/_modules/airflow/sensors/s3_prefix_sensor.html
index f6c3283..1eed929 100644
--- a/_modules/airflow/sensors/s3_prefix_sensor.html
+++ b/_modules/airflow/sensors/s3_prefix_sensor.html
@@ -199,6 +199,8 @@
 <span class="sd">    :param delimiter: The delimiter intended to show 
hierarchy.</span>
 <span class="sd">        Defaults to &#39;/&#39;.</span>
 <span class="sd">    :type delimiter: str</span>
+<span class="sd">    :param aws_conn_id: a reference to the s3 
connection</span>
+<span class="sd">    :type aws_conn_id: str</span>
 <span class="sd">    &quot;&quot;&quot;</span>
     <span class="n">template_fields</span> <span class="o">=</span> <span 
class="p">(</span><span class="s1">&#39;prefix&#39;</span><span 
class="p">,</span> <span class="s1">&#39;bucket_name&#39;</span><span 
class="p">)</span>
 
@@ -257,20 +259,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/sql_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/sql_sensor.html 
b/_modules/airflow/sensors/sql_sensor.html
index fd2494a..66207d3 100644
--- a/_modules/airflow/sensors/sql_sensor.html
+++ b/_modules/airflow/sensors/sql_sensor.html
@@ -247,20 +247,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/time_delta_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/time_delta_sensor.html 
b/_modules/airflow/sensors/time_delta_sensor.html
index 529cae7..1cebcbe 100644
--- a/_modules/airflow/sensors/time_delta_sensor.html
+++ b/_modules/airflow/sensors/time_delta_sensor.html
@@ -235,20 +235,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/time_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/time_sensor.html 
b/_modules/airflow/sensors/time_sensor.html
index a1597d8..5bdcc98 100644
--- a/_modules/airflow/sensors/time_sensor.html
+++ b/_modules/airflow/sensors/time_sensor.html
@@ -229,20 +229,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/airflow/sensors/web_hdfs_sensor.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/sensors/web_hdfs_sensor.html 
b/_modules/airflow/sensors/web_hdfs_sensor.html
index 9a4d478..d28a341 100644
--- a/_modules/airflow/sensors/web_hdfs_sensor.html
+++ b/_modules/airflow/sensors/web_hdfs_sensor.html
@@ -233,20 +233,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_modules/index.html
----------------------------------------------------------------------
diff --git a/_modules/index.html b/_modules/index.html
index aec8b56..f249647 100644
--- a/_modules/index.html
+++ b/_modules/index.html
@@ -157,8 +157,8 @@
            <div itemprop="articleBody">
             
   <h1>All modules for which code is available</h1>
-<ul><li><a 
href="airflow/contrib/executors/mesos_executor.html">airflow.contrib.executors.mesos_executor</a></li>
-<li><a 
href="airflow/contrib/hooks/aws_dynamodb_hook.html">airflow.contrib.hooks.aws_dynamodb_hook</a></li>
+<ul><li><a 
href="airflow/contrib/hooks/aws_dynamodb_hook.html">airflow.contrib.hooks.aws_dynamodb_hook</a></li>
+<li><a 
href="airflow/contrib/hooks/aws_firehose_hook.html">airflow.contrib.hooks.aws_firehose_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/aws_hook.html">airflow.contrib.hooks.aws_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/aws_lambda_hook.html">airflow.contrib.hooks.aws_lambda_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/azure_data_lake_hook.html">airflow.contrib.hooks.azure_data_lake_hook</a></li>
@@ -177,9 +177,12 @@
 <li><a 
href="airflow/contrib/hooks/gcp_container_hook.html">airflow.contrib.hooks.gcp_container_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/gcp_dataflow_hook.html">airflow.contrib.hooks.gcp_dataflow_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/gcp_dataproc_hook.html">airflow.contrib.hooks.gcp_dataproc_hook</a></li>
+<li><a 
href="airflow/contrib/hooks/gcp_function_hook.html">airflow.contrib.hooks.gcp_function_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/gcp_mlengine_hook.html">airflow.contrib.hooks.gcp_mlengine_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/gcp_pubsub_hook.html">airflow.contrib.hooks.gcp_pubsub_hook</a></li>
+<li><a 
href="airflow/contrib/hooks/gcp_sql_hook.html">airflow.contrib.hooks.gcp_sql_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/gcs_hook.html">airflow.contrib.hooks.gcs_hook</a></li>
+<li><a 
href="airflow/contrib/hooks/imap_hook.html">airflow.contrib.hooks.imap_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/jenkins_hook.html">airflow.contrib.hooks.jenkins_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/jira_hook.html">airflow.contrib.hooks.jira_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/mongo_hook.html">airflow.contrib.hooks.mongo_hook</a></li>
@@ -187,6 +190,7 @@
 <li><a 
href="airflow/contrib/hooks/qubole_hook.html">airflow.contrib.hooks.qubole_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/redis_hook.html">airflow.contrib.hooks.redis_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/redshift_hook.html">airflow.contrib.hooks.redshift_hook</a></li>
+<li><a 
href="airflow/contrib/hooks/sagemaker_hook.html">airflow.contrib.hooks.sagemaker_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/segment_hook.html">airflow.contrib.hooks.segment_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/sftp_hook.html">airflow.contrib.hooks.sftp_hook</a></li>
 <li><a 
href="airflow/contrib/hooks/slack_webhook_hook.html">airflow.contrib.hooks.slack_webhook_hook</a></li>
@@ -221,7 +225,10 @@
 <li><a 
href="airflow/contrib/operators/emr_terminate_job_flow_operator.html">airflow.contrib.operators.emr_terminate_job_flow_operator</a></li>
 <li><a 
href="airflow/contrib/operators/file_to_gcs.html">airflow.contrib.operators.file_to_gcs</a></li>
 <li><a 
href="airflow/contrib/operators/file_to_wasb.html">airflow.contrib.operators.file_to_wasb</a></li>
+<li><a 
href="airflow/contrib/operators/gcp_compute_operator.html">airflow.contrib.operators.gcp_compute_operator</a></li>
 <li><a 
href="airflow/contrib/operators/gcp_container_operator.html">airflow.contrib.operators.gcp_container_operator</a></li>
+<li><a 
href="airflow/contrib/operators/gcp_function_operator.html">airflow.contrib.operators.gcp_function_operator</a></li>
+<li><a 
href="airflow/contrib/operators/gcp_sql_operator.html">airflow.contrib.operators.gcp_sql_operator</a></li>
 <li><a 
href="airflow/contrib/operators/gcs_download_operator.html">airflow.contrib.operators.gcs_download_operator</a></li>
 <li><a 
href="airflow/contrib/operators/gcs_list_operator.html">airflow.contrib.operators.gcs_list_operator</a></li>
 <li><a 
href="airflow/contrib/operators/gcs_operator.html">airflow.contrib.operators.gcs_operator</a></li>
@@ -242,6 +249,13 @@
 <li><a 
href="airflow/contrib/operators/qubole_operator.html">airflow.contrib.operators.qubole_operator</a></li>
 <li><a 
href="airflow/contrib/operators/s3_list_operator.html">airflow.contrib.operators.s3_list_operator</a></li>
 <li><a 
href="airflow/contrib/operators/s3_to_gcs_operator.html">airflow.contrib.operators.s3_to_gcs_operator</a></li>
+<li><a 
href="airflow/contrib/operators/sagemaker_base_operator.html">airflow.contrib.operators.sagemaker_base_operator</a></li>
+<li><a 
href="airflow/contrib/operators/sagemaker_endpoint_config_operator.html">airflow.contrib.operators.sagemaker_endpoint_config_operator</a></li>
+<li><a 
href="airflow/contrib/operators/sagemaker_endpoint_operator.html">airflow.contrib.operators.sagemaker_endpoint_operator</a></li>
+<li><a 
href="airflow/contrib/operators/sagemaker_model_operator.html">airflow.contrib.operators.sagemaker_model_operator</a></li>
+<li><a 
href="airflow/contrib/operators/sagemaker_training_operator.html">airflow.contrib.operators.sagemaker_training_operator</a></li>
+<li><a 
href="airflow/contrib/operators/sagemaker_transform_operator.html">airflow.contrib.operators.sagemaker_transform_operator</a></li>
+<li><a 
href="airflow/contrib/operators/sagemaker_tuning_operator.html">airflow.contrib.operators.sagemaker_tuning_operator</a></li>
 <li><a 
href="airflow/contrib/operators/segment_track_event_operator.html">airflow.contrib.operators.segment_track_event_operator</a></li>
 <li><a 
href="airflow/contrib/operators/sftp_operator.html">airflow.contrib.operators.sftp_operator</a></li>
 <li><a 
href="airflow/contrib/operators/slack_webhook_operator.html">airflow.contrib.operators.slack_webhook_operator</a></li>
@@ -257,6 +271,7 @@
 <li><a 
href="airflow/contrib/sensors/aws_redshift_cluster_sensor.html">airflow.contrib.sensors.aws_redshift_cluster_sensor</a></li>
 <li><a 
href="airflow/contrib/sensors/bash_sensor.html">airflow.contrib.sensors.bash_sensor</a></li>
 <li><a 
href="airflow/contrib/sensors/bigquery_sensor.html">airflow.contrib.sensors.bigquery_sensor</a></li>
+<li><a 
href="airflow/contrib/sensors/cassandra_sensor.html">airflow.contrib.sensors.cassandra_sensor</a></li>
 <li><a 
href="airflow/contrib/sensors/datadog_sensor.html">airflow.contrib.sensors.datadog_sensor</a></li>
 <li><a 
href="airflow/contrib/sensors/emr_base_sensor.html">airflow.contrib.sensors.emr_base_sensor</a></li>
 <li><a 
href="airflow/contrib/sensors/emr_job_flow_sensor.html">airflow.contrib.sensors.emr_job_flow_sensor</a></li>
@@ -269,6 +284,11 @@
 <li><a 
href="airflow/contrib/sensors/pubsub_sensor.html">airflow.contrib.sensors.pubsub_sensor</a></li>
 <li><a 
href="airflow/contrib/sensors/qubole_sensor.html">airflow.contrib.sensors.qubole_sensor</a></li>
 <li><a 
href="airflow/contrib/sensors/redis_key_sensor.html">airflow.contrib.sensors.redis_key_sensor</a></li>
+<li><a 
href="airflow/contrib/sensors/sagemaker_base_sensor.html">airflow.contrib.sensors.sagemaker_base_sensor</a></li>
+<li><a 
href="airflow/contrib/sensors/sagemaker_endpoint_sensor.html">airflow.contrib.sensors.sagemaker_endpoint_sensor</a></li>
+<li><a 
href="airflow/contrib/sensors/sagemaker_training_sensor.html">airflow.contrib.sensors.sagemaker_training_sensor</a></li>
+<li><a 
href="airflow/contrib/sensors/sagemaker_transform_sensor.html">airflow.contrib.sensors.sagemaker_transform_sensor</a></li>
+<li><a 
href="airflow/contrib/sensors/sagemaker_tuning_sensor.html">airflow.contrib.sensors.sagemaker_tuning_sensor</a></li>
 <li><a 
href="airflow/contrib/sensors/sftp_sensor.html">airflow.contrib.sensors.sftp_sensor</a></li>
 <li><a 
href="airflow/contrib/sensors/wasb_sensor.html">airflow.contrib.sensors.wasb_sensor</a></li>
 <li><a 
href="airflow/executors/celery_executor.html">airflow.executors.celery_executor</a></li>
@@ -372,20 +392,13 @@
 
   
 
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../',
-            VERSION:'',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../_static/jquery.js"></script>
-      <script type="text/javascript" src="../_static/underscore.js"></script>
-      <script type="text/javascript" src="../_static/doctools.js"></script>
+    
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../" src="../_static/documentation_options.js"></script>
+        <script type="text/javascript" src="../_static/jquery.js"></script>
+        <script type="text/javascript" src="../_static/underscore.js"></script>
+        <script type="text/javascript" src="../_static/doctools.js"></script>
+    
 
   
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/api.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/api.rst.txt b/_sources/api.rst.txt
index 4ea19c8..194809a 100644
--- a/_sources/api.rst.txt
+++ b/_sources/api.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Experimental Rest API
 =====================
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/cli.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/cli.rst.txt b/_sources/cli.rst.txt
index f05cbfb..4d68d0e 100644
--- a/_sources/cli.rst.txt
+++ b/_sources/cli.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Command Line Interface
 ======================
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/code.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/code.rst.txt b/_sources/code.rst.txt
index 80ec761..afa0dc4 100644
--- a/_sources/code.rst.txt
+++ b/_sources/code.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 API Reference
 =============
 
@@ -153,7 +170,6 @@ Operators
 .. autoclass:: airflow.contrib.operators.file_to_wasb.FileToWasbOperator
 .. autoclass:: 
airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator
 .. autoclass:: 
airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator
-.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEPodOperator
 .. autoclass:: 
airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator
 .. autoclass:: 
airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator
 .. autoclass:: 
airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator
@@ -172,8 +188,6 @@ Operators
 .. autoclass:: 
airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator
 .. autoclass:: airflow.contrib.operators.mongo_to_s3.MongoToS3Operator
 .. autoclass:: 
airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator
-.. autoclass:: 
airflow.contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeTransfer
-.. autoclass:: 
airflow.contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer
 .. autoclass:: 
airflow.contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator
 .. autoclass:: 
airflow.contrib.operators.pubsub_operator.PubSubTopicCreateOperator
 .. autoclass:: 
airflow.contrib.operators.pubsub_operator.PubSubTopicDeleteOperator
@@ -185,10 +199,18 @@ Operators
 .. autoclass:: airflow.contrib.operators.qubole_operator.QuboleOperator
 .. autoclass:: airflow.contrib.operators.s3_list_operator.S3ListOperator
 .. autoclass:: 
airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageOperator
+.. autoclass:: 
airflow.contrib.operators.sagemaker_base_operator.SageMakerBaseOperator
+.. autoclass:: 
airflow.contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator
+.. autoclass:: 
airflow.contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator
+.. autoclass:: 
airflow.contrib.operators.sagemaker_model_operator.SageMakerModelOperator
+.. autoclass:: 
airflow.contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator
+.. autoclass:: 
airflow.contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator
+.. autoclass:: 
airflow.contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator
 .. autoclass:: 
airflow.contrib.operators.segment_track_event_operator.SegmentTrackEventOperator
 .. autoclass:: airflow.contrib.operators.sftp_operator.SFTPOperator
 .. autoclass:: 
airflow.contrib.operators.slack_webhook_operator.SlackWebhookOperator
 .. autoclass:: airflow.contrib.operators.snowflake_operator.SnowflakeOperator
+.. autoclass:: 
airflow.contrib.operators.sns_publish_operator.SnsPublishOperator
 .. autoclass:: airflow.contrib.operators.spark_jdbc_operator.SparkJDBCOperator
 .. autoclass:: airflow.contrib.operators.spark_sql_operator.SparkSqlOperator
 .. autoclass:: 
airflow.contrib.operators.spark_submit_operator.SparkSubmitOperator
@@ -204,8 +226,7 @@ Sensors
 .. autoclass:: 
airflow.contrib.sensors.aws_redshift_cluster_sensor.AwsRedshiftClusterSensor
 .. autoclass:: airflow.contrib.sensors.bash_sensor.BashSensor
 .. autoclass:: airflow.contrib.sensors.bigquery_sensor.BigQueryTableSensor
-.. autoclass:: 
airflow.contrib.sensors.cassandra_record_sensor.CassandraRecordSensor
-.. autoclass:: 
airflow.contrib.sensors.cassandra_table_sensor.CassandraTableSensor
+.. autoclass:: airflow.contrib.sensors.cassandra_sensor.CassandraRecordSensor
 .. autoclass:: airflow.contrib.sensors.datadog_sensor.DatadogSensor
 .. autoclass:: airflow.contrib.sensors.emr_base_sensor.EmrBaseSensor
 .. autoclass:: airflow.contrib.sensors.emr_job_flow_sensor.EmrJobFlowSensor
@@ -222,6 +243,11 @@ Sensors
 .. autoclass:: airflow.contrib.sensors.pubsub_sensor.PubSubPullSensor
 .. autoclass:: airflow.contrib.sensors.qubole_sensor.QuboleSensor
 .. autoclass:: airflow.contrib.sensors.redis_key_sensor.RedisKeySensor
+.. autoclass:: 
airflow.contrib.sensors.sagemaker_base_sensor.SageMakerBaseSensor
+.. autoclass:: 
airflow.contrib.sensors.sagemaker_endpoint_sensor.SageMakerEndpointSensor
+.. autoclass:: 
airflow.contrib.sensors.sagemaker_training_sensor.SageMakerTrainingSensor
+.. autoclass:: 
airflow.contrib.sensors.sagemaker_transform_sensor.SageMakerTransformSensor
+.. autoclass:: 
airflow.contrib.sensors.sagemaker_tuning_sensor.SageMakerTuningSensor
 .. autoclass:: airflow.contrib.sensors.sftp_sensor.SFTPSensor
 .. autoclass:: airflow.contrib.sensors.wasb_sensor.WasbBlobSensor
 
@@ -371,8 +397,10 @@ Community contributed hooks
 '''''''''''''''''''''''''''
 .. Alphabetize this list
 .. autoclass:: airflow.contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook
+.. autoclass:: airflow.contrib.hooks.aws_firehose_hook.AwsFirehoseHook
 .. autoclass:: airflow.contrib.hooks.aws_hook.AwsHook
 .. autoclass:: airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook
+.. autoclass:: airflow.contrib.hooks.aws_sns_hook.AwsSnsHook
 .. autoclass:: airflow.contrib.hooks.azure_data_lake_hook.AzureDataLakeHook
 .. autoclass:: airflow.contrib.hooks.azure_fileshare_hook.AzureFileShareHook
 .. autoclass:: airflow.contrib.hooks.bigquery_hook.BigQueryHook
@@ -393,6 +421,7 @@ Community contributed hooks
 .. autoclass:: airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook
 .. autoclass:: airflow.contrib.hooks.gcp_pubsub_hook.PubSubHook
 .. autoclass:: airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook
+.. autoclass:: airflow.contrib.hooks.imap_hook.ImapHook
 .. autoclass:: airflow.contrib.hooks.jenkins_hook.JenkinsHook
 .. autoclass:: airflow.contrib.hooks.jira_hook.JiraHook
 .. autoclass:: airflow.contrib.hooks.mongo_hook.MongoHook
@@ -400,6 +429,7 @@ Community contributed hooks
 .. autoclass:: airflow.contrib.hooks.qubole_hook.QuboleHook
 .. autoclass:: airflow.contrib.hooks.redis_hook.RedisHook
 .. autoclass:: airflow.contrib.hooks.redshift_hook.RedshiftHook
+.. autoclass:: airflow.contrib.hooks.sagemaker_hook.SageMakerHook
 .. autoclass:: airflow.contrib.hooks.salesforce_hook.SalesforceHook
 .. autoclass:: airflow.contrib.hooks.segment_hook.SegmentHook
 .. autoclass:: airflow.contrib.hooks.sftp_hook.SFTPHook

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/concepts.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/concepts.rst.txt b/_sources/concepts.rst.txt
index 50c18c9..95f6e6a 100644
--- a/_sources/concepts.rst.txt
+++ b/_sources/concepts.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Concepts
 ########
 
@@ -320,7 +337,7 @@ Connections
 ===========
 
 The connection information to external systems is stored in the Airflow
-metadata database and managed in the UI (``Menu -> Admin -> Connections``)
+metadata database and managed in the UI (``Menu -> Admin -> Connections``).
 A ``conn_id`` is defined there and hostname / login / password / schema
 information attached to it. Airflow pipelines can simply refer to the
 centrally managed ``conn_id`` without having to hard code any of this
@@ -332,17 +349,6 @@ from ``BaseHook``, Airflow will choose one connection 
randomly, allowing
 for some basic load balancing and fault tolerance when used in conjunction
 with retries.
 
-Airflow also has the ability to reference connections via environment
-variables from the operating system. But it only supports URI format. If you
-need to specify ``extra`` for your connection, please use web UI.
-
-If connections with the same ``conn_id`` are defined in both Airflow metadata
-database and environment variables, only the one in environment variables
-will be referenced by Airflow (for example, given ``conn_id`` 
``postgres_master``,
-Airflow will search for ``AIRFLOW_CONN_POSTGRES_MASTER``
-in environment variables first and directly reference it if found,
-before it starts to search in metadata database).
-
 Many hooks have a default ``conn_id``, where operators using that hook do not
 need to supply an explicit connection ID. For example, the default
 ``conn_id`` for the :class:`~airflow.hooks.postgres_hook.PostgresHook` is
@@ -353,7 +359,7 @@ See :doc:`howto/manage-connections` for how to create and 
manage connections.
 Queues
 ======
 
-When using the CeleryExecutor, the celery queues that tasks are sent to
+When using the CeleryExecutor, the Celery queues that tasks are sent to
 can be specified. ``queue`` is an attribute of BaseOperator, so any
 task can be assigned to any queue. The default queue for the environment
 is defined in the ``airflow.cfg``'s ``celery -> default_queue``. This defines
@@ -361,7 +367,7 @@ the queue that tasks get assigned to when not specified, as 
well as which
 queue Airflow workers listen to when started.
 
 Workers can listen to one or multiple queues of tasks. When a worker is
-started (using the command ``airflow worker``), a set of comma delimited
+started (using the command ``airflow worker``), a set of comma-delimited
 queue names can be specified (e.g. ``airflow worker -q spark``). This worker
 will then only pick up tasks wired to the specified queue(s).
 
@@ -863,3 +869,32 @@ do the same, but then it is more to use a virtualenv and 
pip.
    to be available on the system if a module needs those. In other words only
    pure python modules can be packaged.
 
+
+.airflowignore
+''''''''''''''
+
+A ``.airflowignore`` file specifies the directories or files in ``DAG_FOLDER``
+that Airflow should intentionally ignore. Each line in ``.airflowignore``
+specifies a regular expression pattern, and directories or files whose names
+(not DAG id) match any of the patterns would be ignored (under the hood,
+``re.findall()`` is used to match the pattern). Overall it works like a
+``.gitignore`` file.
+
+``.airflowignore`` file should be put in your ``DAG_FOLDER``.
+For example, you can prepare a ``.airflowignore`` file with contents
+
+.. code::
+
+    project_a
+    tenant_[\d]
+
+
+Then files like "project_a_dag_1.py", "TESTING_project_a.py", "tenant_1.py",
+"project_a/dag_1.py", and "tenant_1/dag_1.py" in your ``DAG_FOLDER`` would be 
ignored
+(If a directory's name matches any of the patterns, this directory and all its 
subfolders
+would not be scanned by Airflow at all. This improves efficiency of DAG 
finding).
+
+The scope of a ``.airflowignore`` file is the directory it is in plus all its 
subfolders.
+You can also prepare ``.airflowignore`` file for a subfolder in ``DAG_FOLDER`` 
and it
+would only be applicable for that subfolder.
+

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/faq.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/faq.rst.txt b/_sources/faq.rst.txt
index 4621208..42dfb27 100644
--- a/_sources/faq.rst.txt
+++ b/_sources/faq.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 FAQ
 ========
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/executor/use-celery.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/executor/use-celery.rst.txt 
b/_sources/howto/executor/use-celery.rst.txt
index fd6db96..71cae66 100644
--- a/_sources/howto/executor/use-celery.rst.txt
+++ b/_sources/howto/executor/use-celery.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Scaling Out with Celery
 =======================
 
@@ -44,4 +61,4 @@ Some caveats:
 
 - Make sure to use a database backed result backend
 - Make sure to set a visibility timeout in [celery_broker_transport_options] 
that exceeds the ETA of your longest running task
-- Tasks can and consume resources, make sure your worker as enough resources 
to run `worker_concurrency` tasks
+- Tasks can consume resources. Make sure your worker has enough resources to 
run `worker_concurrency` tasks

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/executor/use-dask.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/executor/use-dask.rst.txt 
b/_sources/howto/executor/use-dask.rst.txt
index 769ce17..6d3efcb 100644
--- a/_sources/howto/executor/use-dask.rst.txt
+++ b/_sources/howto/executor/use-dask.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Scaling Out with Dask
 =====================
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/executor/use-mesos.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/executor/use-mesos.rst.txt 
b/_sources/howto/executor/use-mesos.rst.txt
index c3bf95a..d67579d 100644
--- a/_sources/howto/executor/use-mesos.rst.txt
+++ b/_sources/howto/executor/use-mesos.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Scaling Out with Mesos (community contributed)
 ==============================================
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/index.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/index.rst.txt b/_sources/howto/index.rst.txt
index f9f160e..8a19a9e 100644
--- a/_sources/howto/index.rst.txt
+++ b/_sources/howto/index.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 How-to Guides
 =============
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/initialize-database.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/initialize-database.rst.txt 
b/_sources/howto/initialize-database.rst.txt
index c85142f..fad75be 100644
--- a/_sources/howto/initialize-database.rst.txt
+++ b/_sources/howto/initialize-database.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Initializing a Database Backend
 ===============================
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/manage-connections.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/manage-connections.rst.txt 
b/_sources/howto/manage-connections.rst.txt
index f520315..9c324d2 100644
--- a/_sources/howto/manage-connections.rst.txt
+++ b/_sources/howto/manage-connections.rst.txt
@@ -1,9 +1,26 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Managing Connections
 =====================
 
 Airflow needs to know how to connect to your environment. Information
 such as hostname, port, login and passwords to other systems and services is
-handled in the ``Admin->Connection`` section of the UI. The pipeline code you
+handled in the ``Admin->Connections`` section of the UI. The pipeline code you
 will author will reference the 'conn_id' of the Connection objects.
 
 .. image:: ../img/connections.png
@@ -17,7 +34,7 @@ more information.
 Creating a Connection with the UI
 ---------------------------------
 
-Open the ``Admin->Connection`` section of the UI. Click the ``Create`` link
+Open the ``Admin->Connections`` section of the UI. Click the ``Create`` link
 to create a new connection.
 
 .. image:: ../img/connection_create.png
@@ -34,7 +51,7 @@ to create a new connection.
 Editing a Connection with the UI
 --------------------------------
 
-Open the ``Admin->Connection`` section of the UI. Click the pencil icon next
+Open the ``Admin->Connections`` section of the UI. Click the pencil icon next
 to the connection you wish to edit in the connection list.
 
 .. image:: ../img/connection_edit.png
@@ -133,3 +150,81 @@ Scopes (comma separated)
         Scopes are ignored when using application default credentials. See
         issue `AIRFLOW-2522
         <https://issues.apache.org/jira/browse/AIRFLOW-2522>`_.
+
+MySQL
+~~~~~
+The MySQL connect type allows to connect with MySQL database.
+
+Configuring the Connection
+''''''''''''''''''''''''''
+Host (required)
+    The host to connect to.
+
+Schema (optional)
+    Specify the schema name to be used in the database.
+
+Login (required)
+    Specify the user name to connect.
+    
+Password (required)
+    Specify the password to connect.    
+    
+Extra (optional)
+    Specify the extra parameters (as json dictionary) that can be used in mysql
+    connection. The following parameters are supported:
+
+    * **charset**: specify charset of the connection
+    * **cursor**: one of "sscursor", "dictcursor, "ssdictcursor" - specifies 
cursor class to be
+      used
+    * **local_infile**: controls MySQL's LOCAL capability (permitting local 
data loading by
+      clients). See `MySQLdb docs 
<https://mysqlclient.readthedocs.io/user_guide.html>`_
+      for details.
+    * **unix_socket**: UNIX socket used instead of the default socket
+    * **ssl**: Dictionary of SSL parameters that control connecting using SSL 
(those
+      parameters are server specific and should contain "ca", "cert", "key", 
"capath",
+      "cipher" parameters. See
+      `MySQLdb docs <https://mysqlclient.readthedocs.io/user_guide.html>`_ for 
details.
+      Note that in order to be useful in URL notation, this parameter might 
also be
+      a string where the SSL dictionary is a string-encoded JSON dictionary.
+
+    Example "extras" field:
+
+    .. code-block:: json
+
+       {
+          "charset": "utf8",
+          "cursorclass": "sscursor",
+          "local_infile": true,
+          "unix_socket": "/var/socket",
+          "ssl": {
+            "cert": "/tmp/client-cert.pem",
+            "ca": "/tmp/server-ca.pem'",
+            "key": "/tmp/client-key.pem"
+          }
+       }
+
+    or
+
+    .. code-block:: json
+
+       {
+          "charset": "utf8",
+          "cursorclass": "sscursor",
+          "local_infile": true,
+          "unix_socket": "/var/socket",
+          "ssl": "{\"cert\": \"/tmp/client-cert.pem\", \"ca\": 
\"/tmp/server-ca.pem\", \"key\": \"/tmp/client-key.pem\"}"
+       }
+
+    When specifying the connection as URI (in AIRFLOW_CONN_* variable) you 
should specify it
+    following the standard syntax of DB connections, where extras as passed as 
parameters
+    of the URI (note that all components of the URI should be URL-encoded).
+
+    For example:
+
+    .. code-block:: bash
+
+       
mysql://mysql_user:XXXXXXXXXXXX@1.1.1.1:3306/mysqldb?ssl=%7B%22cert%22%3A+%22%2Ftmp%2Fclient-cert.pem%22%2C+%22ca%22%3A+%22%2Ftmp%2Fserver-ca.pem%22%2C+%22key%22%3A+%22%2Ftmp%2Fclient-key.pem%22%7D
+
+    .. note::
+        If encounter UnicodeDecodeError while working with MySQL connection 
check
+        the charset defined is matched to the database charset.

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/operator.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/operator.rst.txt b/_sources/howto/operator.rst.txt
index efd544e..2c84a7f 100644
--- a/_sources/howto/operator.rst.txt
+++ b/_sources/howto/operator.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Using Operators
 ===============
 
@@ -101,3 +118,479 @@ to execute a BigQuery load job.
     :dedent: 4
     :start-after: [START howto_operator_gcs_to_bq]
     :end-before: [END howto_operator_gcs_to_bq]
+
+GceInstanceStartOperator
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+Allows to start an existing Google Compute Engine instance.
+
+In this example parameter values are extracted from Airflow variables.
+Moreover, the ``default_args`` dict is used to pass common arguments to all 
operators in a single DAG.
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_compute.py
+    :language: python
+    :start-after: [START howto_operator_gce_args]
+    :end-before: [END howto_operator_gce_args]
+
+
+Define the :class:`~airflow.contrib.operators.gcp_compute_operator
+.GceInstanceStartOperator` by passing the required arguments to the 
constructor.
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_compute.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_gce_start]
+    :end-before: [END howto_operator_gce_start]
+
+GceInstanceStopOperator
+^^^^^^^^^^^^^^^^^^^^^^^
+
+Allows to stop an existing Google Compute Engine instance.
+
+For parameter definition take a look at 
:class:`~airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator`
 above.
+
+Define the :class:`~airflow.contrib.operators.gcp_compute_operator
+.GceInstanceStopOperator` by passing the required arguments to the constructor.
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_compute.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_gce_stop]
+    :end-before: [END howto_operator_gce_stop]
+
+GceSetMachineTypeOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Allows to change the machine type for a stopped instance to the specified 
machine type.
+
+For parameter definition take a look at 
:class:`~airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator`
 above.
+
+Define the :class:`~airflow.contrib.operators.gcp_compute_operator
+.GceSetMachineTypeOperator` by passing the required arguments to the 
constructor.
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_compute.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_gce_set_machine_type]
+    :end-before: [END howto_operator_gce_set_machine_type]
+
+
+GcfFunctionDeleteOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Use the ``default_args`` dict to pass arguments to the operator.
+
+.. literalinclude:: 
../../airflow/contrib/example_dags/example_gcp_function_delete.py
+    :language: python
+    :start-after: [START howto_operator_gcf_delete_args]
+    :end-before: [END howto_operator_gcf_delete_args]
+
+
+Use the 
:class:`~airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator`
+to delete a function from Google Cloud Functions.
+
+.. literalinclude:: 
../../airflow/contrib/example_dags/example_gcp_function_delete.py
+    :language: python
+    :start-after: [START howto_operator_gcf_delete]
+    :end-before: [END howto_operator_gcf_delete]
+
+Troubleshooting
+"""""""""""""""
+If you want to run or deploy an operator using a service account and get 
“forbidden 403”
+errors, it means that your service account does not have the correct
+Cloud IAM permissions.
+
+1. Assign your Service Account the Cloud Functions Developer role.
+2. Grant the user the Cloud IAM Service Account User role on the Cloud 
Functions runtime
+   service account.
+
+The typical way of assigning Cloud IAM permissions with `gcloud` is
+shown below. Just replace PROJECT_ID with ID of your Google Cloud Platform 
project
+and SERVICE_ACCOUNT_EMAIL with the email ID of your service account.
+
+
+.. code-block:: bash
+
+  gcloud iam service-accounts add-iam-policy-binding \
+    project...@appspot.gserviceaccount.com \
+    --member="serviceAccount:[SERVICE_ACCOUNT_EMAIL]" \
+    --role="roles/iam.serviceAccountUser"
+
+
+See `Adding the IAM service agent user role to the runtime service 
<https://cloud.google.com/functions/docs/reference/iam/roles#adding_the_iam_service_agent_user_role_to_the_runtime_service_account>`_
  for details
+
+GcfFunctionDeployOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Use the 
:class:`~airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator`
+to deploy a function from Google Cloud Functions.
+
+The following examples of Airflow variables show various variants and 
combinations
+of default_args that you can use. The variables are defined as follows:
+
+.. literalinclude:: 
../../airflow/contrib/example_dags/example_gcp_function_deploy_delete.py
+    :language: python
+    :start-after: [START howto_operator_gcf_deploy_variables]
+    :end-before: [END howto_operator_gcf_deploy_variables]
+
+With those variables you can define the body of the request:
+
+.. literalinclude:: 
../../airflow/contrib/example_dags/example_gcp_function_deploy_delete.py
+    :language: python
+    :start-after: [START howto_operator_gcf_deploy_body]
+    :end-before: [END howto_operator_gcf_deploy_body]
+
+When you create a DAG, the default_args dictionary can be used to pass the 
body and
+other arguments:
+
+.. literalinclude:: 
../../airflow/contrib/example_dags/example_gcp_function_deploy_delete.py
+    :language: python
+    :start-after: [START howto_operator_gcf_deploy_args]
+    :end-before: [END howto_operator_gcf_deploy_args]
+
+Note that the neither the body nor the default args are complete in the above 
examples.
+Depending on the set variables, there might be different variants on how to 
pass source
+code related fields. Currently, you can pass either sourceArchiveUrl, 
sourceRepository
+or sourceUploadUrl as described in the
+`CloudFunction API specification 
<https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions#CloudFunction>`_.
+Additionally, default_args might contain zip_path parameter to run the extra 
step of
+uploading the source code before deploying it. In the last case, you also need 
to
+provide an empty `sourceUploadUrl` parameter in the body.
+
+Based on the variables defined above, example logic of setting the source code
+related fields is shown here:
+
+.. literalinclude:: 
../../airflow/contrib/example_dags/example_gcp_function_deploy_delete.py
+    :language: python
+    :start-after: [START howto_operator_gcf_deploy_variants]
+    :end-before: [END howto_operator_gcf_deploy_variants]
+
+The code to create the operator:
+
+.. literalinclude:: 
../../airflow/contrib/example_dags/example_gcp_function_deploy_delete.py
+    :language: python
+    :start-after: [START howto_operator_gcf_deploy]
+    :end-before: [END howto_operator_gcf_deploy]
+
+Troubleshooting
+"""""""""""""""
+
+If you want to run or deploy an operator using a service account and get 
“forbidden 403”
+errors, it means that your service account does not have the correct
+Cloud IAM permissions.
+
+1. Assign your Service Account the Cloud Functions Developer role.
+2. Grant the user the Cloud IAM Service Account User role on the Cloud 
Functions runtime
+   service account.
+
+The typical way of assigning Cloud IAM permissions with `gcloud` is
+shown below. Just replace PROJECT_ID with ID of your Google Cloud Platform 
project
+and SERVICE_ACCOUNT_EMAIL with the email ID of your service account.
+
+.. code-block:: bash
+
+  gcloud iam service-accounts add-iam-policy-binding \
+    project...@appspot.gserviceaccount.com \
+    --member="serviceAccount:[SERVICE_ACCOUNT_EMAIL]" \
+    --role="roles/iam.serviceAccountUser"
+
+
+See `Adding the IAM service agent user role to the runtime service 
<https://cloud.google.com/functions/docs/reference/iam/roles#adding_the_iam_service_agent_user_role_to_the_runtime_service_account>`_
  for details
+
+If the source code for your function is in Google Source Repository, make sure 
that
+your service account has the Source Repository Viewer role so that the source 
code
+can be downloaded if necessary.
+
+CloudSqlInstanceDatabaseCreateOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Creates a new database inside a Cloud SQL instance.
+
+For parameter definition take a look at
+:class:`~airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator`.
+
+Arguments
+"""""""""
+
+Some arguments in the example DAG are taken from environment variables:
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :start-after: [START howto_operator_cloudsql_arguments]
+    :end-before: [END howto_operator_cloudsql_arguments]
+
+Using the operator
+""""""""""""""""""
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_cloudsql_db_create]
+    :end-before: [END howto_operator_cloudsql_db_create]
+
+Example request body:
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :start-after: [START howto_operator_cloudsql_db_create_body]
+    :end-before: [END howto_operator_cloudsql_db_create_body]
+
+Templating
+""""""""""
+
+.. literalinclude:: ../../airflow/contrib/operators/gcp_sql_operator.py
+  :language: python
+  :dedent: 4
+  :start-after: [START gcp_sql_db_create_template_fields]
+  :end-before: [END gcp_sql_db_create_template_fields]
+
+More information
+""""""""""""""""
+
+See `Google Cloud SQL API documentation for database insert
+<https://cloud.google.com/sql/docs/mysql/admin-api/v1beta4/databases/insert>`_.
+
+CloudSqlInstanceDatabaseDeleteOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Deletes a database from a Cloud SQL instance.
+
+For parameter definition take a look at
+:class:`~airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator`.
+
+Arguments
+"""""""""
+
+Some arguments in the example DAG are taken from environment variables:
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :start-after: [START howto_operator_cloudsql_arguments]
+    :end-before: [END howto_operator_cloudsql_arguments]
+
+Using the operator
+""""""""""""""""""
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_cloudsql_db_delete]
+    :end-before: [END howto_operator_cloudsql_db_delete]
+
+Templating
+""""""""""
+
+.. literalinclude:: ../../airflow/contrib/operators/gcp_sql_operator.py
+  :language: python
+  :dedent: 4
+  :start-after: [START gcp_sql_db_delete_template_fields]
+  :end-before: [END gcp_sql_db_delete_template_fields]
+
+More information
+""""""""""""""""
+
+See `Google Cloud SQL API documentation for database delete
+<https://cloud.google.com/sql/docs/mysql/admin-api/v1beta4/databases/delete>`_.
+
+CloudSqlInstanceDatabasePatchOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Updates a resource containing information about a database inside a Cloud SQL 
instance
+using patch semantics.
+See: 
https://cloud.google.com/sql/docs/mysql/admin-api/how-tos/performance#patch
+
+For parameter definition take a look at
+:class:`~airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator`.
+
+Arguments
+"""""""""
+
+Some arguments in the example DAG are taken from environment variables:
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :start-after: [START howto_operator_cloudsql_arguments]
+    :end-before: [END howto_operator_cloudsql_arguments]
+
+Using the operator
+""""""""""""""""""
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_cloudsql_db_patch]
+    :end-before: [END howto_operator_cloudsql_db_patch]
+
+Example request body:
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :start-after: [START howto_operator_cloudsql_db_patch_body]
+    :end-before: [END howto_operator_cloudsql_db_patch_body]
+
+Templating
+""""""""""
+
+.. literalinclude:: ../../airflow/contrib/operators/gcp_sql_operator.py
+  :language: python
+  :dedent: 4
+  :start-after: [START gcp_sql_db_patch_template_fields]
+  :end-before: [END gcp_sql_db_patch_template_fields]
+
+More information
+""""""""""""""""
+
+See `Google Cloud SQL API documentation for database patch
+<https://cloud.google.com/sql/docs/mysql/admin-api/v1beta4/databases/patch>`_.
+
+CloudSqlInstanceDeleteOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Deletes a Cloud SQL instance in Google Cloud Platform.
+
+For parameter definition take a look at
+:class:`~airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator`.
+
+Arguments
+"""""""""
+
+Some arguments in the example DAG are taken from environment variables:
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :start-after: [START howto_operator_cloudsql_arguments]
+    :end-before: [END howto_operator_cloudsql_arguments]
+
+Using the operator
+""""""""""""""""""
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_cloudsql_delete]
+    :end-before: [END howto_operator_cloudsql_delete]
+
+Templating
+""""""""""
+
+.. literalinclude:: ../../airflow/contrib/operators/gcp_sql_operator.py
+  :language: python
+  :dedent: 4
+  :start-after: [START gcp_sql_delete_template_fields]
+  :end-before: [END gcp_sql_delete_template_fields]
+
+More information
+""""""""""""""""
+
+See `Google Cloud SQL API documentation for delete
+<https://cloud.google.com/sql/docs/mysql/admin-api/v1beta4/instances/delete>`_.
+
+.. _CloudSqlInstanceCreateOperator:
+
+CloudSqlInstanceCreateOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Creates a new Cloud SQL instance in Google Cloud Platform.
+
+For parameter definition take a look at
+:class:`~airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator`.
+
+If an instance with the same name exists, no action will be taken and the 
operator
+will succeed.
+
+Arguments
+"""""""""
+
+Some arguments in the example DAG are taken from environment variables:
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :start-after: [START howto_operator_cloudsql_arguments]
+    :end-before: [END howto_operator_cloudsql_arguments]
+
+Example body defining the instance:
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :start-after: [START howto_operator_cloudsql_create_body]
+    :end-before: [END howto_operator_cloudsql_create_body]
+
+Using the operator
+""""""""""""""""""
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_cloudsql_create]
+    :end-before: [END howto_operator_cloudsql_create]
+
+Templating
+""""""""""
+
+.. literalinclude:: ../../airflow/contrib/operators/gcp_sql_operator.py
+  :language: python
+  :dedent: 4
+  :start-after: [START gcp_sql_create_template_fields]
+  :end-before: [END gcp_sql_create_template_fields]
+
+More information
+""""""""""""""""
+
+See `Google Cloud SQL API documentation for insert <https://cloud.google
+.com/sql/docs/mysql/admin-api/v1beta4/instances/insert>`_.
+
+
+.. _CloudSqlInstancePatchOperator:
+
+CloudSqlInstancePatchOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Updates settings of a Cloud SQL instance in Google Cloud Platform (partial 
update).
+
+For parameter definition take a look at
+:class:`~airflow.contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator`.
+
+This is a partial update, so only values for the settings specified in the body
+will be set / updated. The rest of the existing instance's configuration will 
remain
+unchanged.
+
+Arguments
+"""""""""
+
+Some arguments in the example DAG are taken from environment variables:
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :start-after: [START howto_operator_cloudsql_arguments]
+    :end-before: [END howto_operator_cloudsql_arguments]
+
+Example body defining the instance:
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :start-after: [START howto_operator_cloudsql_patch_body]
+    :end-before: [END howto_operator_cloudsql_patch_body]
+
+Using the operator
+""""""""""""""""""
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_sql.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_cloudsql_patch]
+    :end-before: [END howto_operator_cloudsql_patch]
+
+Templating
+""""""""""
+
+.. literalinclude:: ../../airflow/contrib/operators/gcp_sql_operator.py
+  :language: python
+  :dedent: 4
+  :start-after: [START gcp_sql_patch_template_fields]
+  :end-before: [END gcp_sql_patch_template_fields]
+
+More information
+""""""""""""""""
+
+See `Google Cloud SQL API documentation for patch <https://cloud.google
+.com/sql/docs/mysql/admin-api/v1beta4/instances/patch>`_.

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/run-with-systemd.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/run-with-systemd.rst.txt 
b/_sources/howto/run-with-systemd.rst.txt
index 1dc1383..131fc3d 100644
--- a/_sources/howto/run-with-systemd.rst.txt
+++ b/_sources/howto/run-with-systemd.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Running Airflow with systemd
 ============================
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/run-with-upstart.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/run-with-upstart.rst.txt 
b/_sources/howto/run-with-upstart.rst.txt
index c18a203..adfb85a 100644
--- a/_sources/howto/run-with-upstart.rst.txt
+++ b/_sources/howto/run-with-upstart.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Running Airflow with upstart
 ============================
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/secure-connections.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/secure-connections.rst.txt 
b/_sources/howto/secure-connections.rst.txt
index f9e252c..719db2f 100644
--- a/_sources/howto/secure-connections.rst.txt
+++ b/_sources/howto/secure-connections.rst.txt
@@ -1,16 +1,34 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Securing Connections
 ====================
 
 By default, Airflow will save the passwords for the connection in plain text
 within the metadata database. The ``crypto`` package is highly recommended
 during installation. The ``crypto`` package does require that your operating
-system have libffi-dev installed.
+system has ``libffi-dev`` installed.
+
+If ``crypto`` package was not installed initially, it means that your Fernet 
key in ``airflow.cfg`` is empty.
 
-If ``crypto`` package was not installed initially, you can still enable 
encryption for
-connections by following steps below:
+You can still enable encryption for passwords within connections by following 
below steps:
 
 1. Install crypto package ``pip install apache-airflow[crypto]``
-2. Generate fernet_key, using this code snippet below. fernet_key must be a 
base64-encoded 32-byte key.
+2. Generate fernet_key, using this code snippet below. ``fernet_key`` must be 
a base64-encoded 32-byte key.
 
 .. code:: python
 
@@ -26,7 +44,7 @@ variable over the value in ``airflow.cfg``:
 .. code-block:: bash
 
   # Note the double underscores
-  EXPORT AIRFLOW__CORE__FERNET_KEY = your_fernet_key
+  export AIRFLOW__CORE__FERNET_KEY=your_fernet_key
 
 4. Restart Airflow webserver.
 5. For existing connections (the ones that you had defined before installing 
``airflow[crypto]`` and creating a Fernet key), you need to open each 
connection in the connection admin UI, re-type the password, and save it.

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/set-config.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/set-config.rst.txt 
b/_sources/howto/set-config.rst.txt
index 2caf5d5..cedce11 100644
--- a/_sources/howto/set-config.rst.txt
+++ b/_sources/howto/set-config.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Setting Configuration Options
 =============================
 
@@ -20,16 +37,30 @@ or by creating a corresponding environment variable:
 
     AIRFLOW__CORE__SQL_ALCHEMY_CONN=my_conn_string
 
-You can also derive the connection string at run time by appending ``_cmd`` to 
the key like this:
+You can also derive the connection string at run time by appending ``_cmd`` to
+the key like this:
 
 .. code-block:: bash
 
     [core]
     sql_alchemy_conn_cmd = bash_command_to_run
 
--But only three such configuration elements namely sql_alchemy_conn, 
broker_url and result_backend can be fetched as a command. The idea behind this 
is to not store passwords on boxes in plain text files. The order of precedence 
is as follows -
+The following config options support this ``_cmd`` version:
+
+* ``sql_alchemy_conn`` in ``[core]`` section
+* ``fernet_key`` in ``[core]`` section
+* ``broker_url`` in ``[celery]`` section
+* ``result_backend`` in ``[celery]`` section
+* ``password`` in ``[atlas]`` section
+* ``smtp_password`` in ``[smtp]`` section
+* ``bind_password`` in ``[ldap]`` section
+* ``git_password`` in ``[kubernetes]`` section
+
+The idea behind this is to not store passwords on boxes in plain text files.
+
+The order of precedence for all connfig options is as follows -
 
 1. environment variable
 2. configuration in airflow.cfg
 3. command in airflow.cfg
-4. default
+4. Airflow's built in defaults

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/use-test-config.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/use-test-config.rst.txt 
b/_sources/howto/use-test-config.rst.txt
index 5cb4790..4f272af 100644
--- a/_sources/howto/use-test-config.rst.txt
+++ b/_sources/howto/use-test-config.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Using the Test Mode Configuration
 =================================
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/howto/write-logs.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/write-logs.rst.txt 
b/_sources/howto/write-logs.rst.txt
index ac30e84..935e797 100644
--- a/_sources/howto/write-logs.rst.txt
+++ b/_sources/howto/write-logs.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 Writing Logs
 ============
 
@@ -11,7 +28,7 @@ directory.
 In addition, users can supply a remote location for storing logs and log
 backups in cloud storage.
 
-In the Airflow Web UI, local logs take precedence over remote logs. If local 
logs
+In the Airflow Web UI, local logs take precedance over remote logs. If local 
logs
 can not be found or accessed, the remote logs will be displayed. Note that logs
 are only sent to remote storage once a task completes (including failure). In 
other
 words, remote logs for running tasks are unavailable. Logs are stored in the 
log
@@ -89,54 +106,22 @@ Writing Logs to Google Cloud Storage
 
 Follow the steps below to enable Google Cloud Storage logging.
 
-#. Airflow's logging system requires a custom .py file to be located in the 
``PYTHONPATH``, so that it's importable from Airflow. Start by creating a 
directory to store the config file. ``$AIRFLOW_HOME/config`` is recommended.
-#. Create empty files called ``$AIRFLOW_HOME/config/log_config.py`` and 
``$AIRFLOW_HOME/config/__init__.py``.
-#. Copy the contents of ``airflow/config_templates/airflow_local_settings.py`` 
into the ``log_config.py`` file that was just created in the step above.
-#. Customize the following portions of the template:
-
-    .. code-block:: bash
-
-        # Add this variable to the top of the file. Note the trailing slash.
-        GCS_LOG_FOLDER = 'gs://<bucket where logs should be persisted>/'
-
-        # Rename DEFAULT_LOGGING_CONFIG to LOGGING CONFIG
-        LOGGING_CONFIG = ...
-
-        # Add a GCSTaskHandler to the 'handlers' block of the LOGGING_CONFIG 
variable
-        'gcs.task': {
-            'class': 'airflow.utils.log.gcs_task_handler.GCSTaskHandler',
-            'formatter': 'airflow.task',
-            'base_log_folder': os.path.expanduser(BASE_LOG_FOLDER),
-            'gcs_log_folder': GCS_LOG_FOLDER,
-            'filename_template': FILENAME_TEMPLATE,
-        },
-
-        # Update the airflow.task and airflow.task_runner blocks to be 
'gcs.task' instead of 'file.task'.
-        'loggers': {
-            'airflow.task': {
-                'handlers': ['gcs.task'],
-                ...
-            },
-            'airflow.task_runner': {
-                'handlers': ['gcs.task'],
-                ...
-            },
-            'airflow': {
-                'handlers': ['console'],
-                ...
-            },
-        }
-
-#. Make sure a Google Cloud Platform connection hook has been defined in 
Airflow. The hook should have read and write access to the Google Cloud Storage 
bucket defined above in ``GCS_LOG_FOLDER``.
-
-#. Update ``$AIRFLOW_HOME/airflow.cfg`` to contain:
-
-    .. code-block:: bash
+To enable this feature, ``airflow.cfg`` must be configured as in this
+example:
 
-        task_log_reader = gcs.task
-        logging_config_class = log_config.LOGGING_CONFIG
-        remote_log_conn_id = <name of the Google cloud platform hook>
+.. code-block:: bash
 
+    [core]
+    # Airflow can store logs remotely in AWS S3, Google Cloud Storage or 
Elastic Search.
+    # Users must supply an Airflow connection id that provides access to the 
storage
+    # location. If remote_logging is set to true, see UPDATING.md for 
additional
+    # configuration requirements.
+    remote_logging = True
+    remote_base_log_folder = gs://my-bucket/path/to/logs
+    remote_log_conn_id = MyGCSConn
+
+#. Install the ``gcp_api`` package first, like so: ``pip install 
apache-airflow[gcp_api]``.
+#. Make sure a Google Cloud Platform connection hook has been defined in 
Airflow. The hook should have read and write access to the Google Cloud Storage 
bucket defined above in ``remote_base_log_folder``.
 #. Restart the Airflow webserver and scheduler, and trigger (or wait for) a 
new task execution.
 #. Verify that logs are showing up for newly executed tasks in the bucket 
you've defined.
 #. Verify that the Google Cloud Storage viewer is working in the UI. Pull up a 
newly executed task, and verify that you see something like:
@@ -150,10 +135,3 @@ Follow the steps below to enable Google Cloud Storage 
logging.
         [2017-10-03 21:57:51,306] {base_task_runner.py:98} INFO - Subtask: 
[2017-10-03 21:57:51,306] {models.py:186} INFO - Filling up the DagBag from 
/airflow/dags/example_dags/example_bash_operator.py
 
 Note the top line that says it's reading from the remote log file.
-
-Please be aware that if you were persisting logs to Google Cloud Storage
-using the old-style airflow.cfg configuration method, the old logs will no
-longer be visible in the Airflow UI, though they'll still exist in Google
-Cloud Storage. This is a backwards incompatbile change. If you are unhappy
-with it, you can change the ``FILENAME_TEMPLATE`` to reflect the old-style
-log filename format.

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/1f06fa0e/_sources/index.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/index.rst.txt b/_sources/index.rst.txt
index 4cd96ea..4c18ce5 100644
--- a/_sources/index.rst.txt
+++ b/_sources/index.rst.txt
@@ -1,3 +1,20 @@
+..  Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+..    http://www.apache.org/licenses/LICENSE-2.0
+
+..  Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
 
 .. image:: img/pin_large.png
     :width: 100


Reply via email to