[AIRFLOW-2429] Fix contrib folder's flake8 errors

Fix contrib/ folder's flake8 errors

Closes #3394 from kaxil/AIRFLOW-2429


Project: http://git-wip-us.apache.org/repos/asf/incubator-airflow/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-airflow/commit/06b62c42
Tree: http://git-wip-us.apache.org/repos/asf/incubator-airflow/tree/06b62c42
Diff: http://git-wip-us.apache.org/repos/asf/incubator-airflow/diff/06b62c42

Branch: refs/heads/master
Commit: 06b62c42b0b55ea55b86b130317594738d2f36a2
Parents: 1db3073
Author: Kaxil Naik <[email protected]>
Authored: Mon May 21 19:40:07 2018 +0100
Committer: Kaxil Naik <[email protected]>
Committed: Mon May 21 19:40:07 2018 +0100

----------------------------------------------------------------------
 airflow/contrib/__init__.py                     |   5 +-
 airflow/contrib/auth/__init__.py                |   5 +-
 airflow/contrib/auth/backends/__init__.py       |   5 +-
 .../auth/backends/github_enterprise_auth.py     |  14 +--
 airflow/contrib/auth/backends/google_auth.py    |  10 +-
 airflow/contrib/auth/backends/kerberos_auth.py  |  11 +-
 airflow/contrib/auth/backends/ldap_auth.py      |  24 ++---
 airflow/contrib/auth/backends/password_auth.py  |   4 +-
 .../example_dags/example_databricks_operator.py |   4 +-
 .../example_emr_job_flow_automatic_steps.py     |   7 +-
 .../example_emr_job_flow_manual_steps.py        |  13 ++-
 .../contrib/example_dags/example_pubsub_flow.py |   4 +-
 .../example_dags/example_qubole_operator.py     |  23 +++--
 .../example_dags/example_qubole_sensor.py       |  30 +++---
 .../contrib/example_dags/example_twitter_dag.py |   4 +-
 .../contrib/executors/kubernetes_executor.py    |   2 +-
 airflow/contrib/executors/mesos_executor.py     |  43 +++++---
 airflow/contrib/hooks/__init__.py               |   9 +-
 airflow/contrib/hooks/aws_dynamodb_hook.py      |  10 +-
 airflow/contrib/hooks/aws_hook.py               |  16 +--
 airflow/contrib/hooks/aws_lambda_hook.py        |   7 +-
 airflow/contrib/hooks/bigquery_hook.py          |   4 +-
 airflow/contrib/hooks/cloudant_hook.py          |   8 +-
 airflow/contrib/hooks/databricks_hook.py        |  15 +--
 airflow/contrib/hooks/datadog_hook.py           |  18 ++--
 airflow/contrib/hooks/datastore_hook.py         |  40 ++++---
 airflow/contrib/hooks/discord_webhook_hook.py   |   4 +-
 airflow/contrib/hooks/emr_hook.py               |  10 +-
 airflow/contrib/hooks/fs_hook.py                |   4 +-
 airflow/contrib/hooks/ftp_hook.py               |   6 +-
 airflow/contrib/hooks/gcp_api_base_hook.py      |   4 +-
 airflow/contrib/hooks/gcp_dataflow_hook.py      |   4 +-
 airflow/contrib/hooks/gcp_dataproc_hook.py      |   4 +-
 airflow/contrib/hooks/gcp_pubsub_hook.py        |   4 +-
 airflow/contrib/hooks/gcs_hook.py               |  26 +++--
 airflow/contrib/hooks/jenkins_hook.py           |   4 +-
 airflow/contrib/hooks/jira_hook.py              |   4 +-
 airflow/contrib/hooks/qubole_hook.py            |  14 +--
 airflow/contrib/hooks/redis_hook.py             |   4 +-
 airflow/contrib/hooks/redshift_hook.py          |   4 +-
 airflow/contrib/hooks/salesforce_hook.py        |   4 +-
 airflow/contrib/hooks/sftp_hook.py              |   4 +-
 airflow/contrib/hooks/slack_webhook_hook.py     |   4 +-
 airflow/contrib/hooks/snowflake_hook.py         |   4 +-
 airflow/contrib/hooks/spark_jdbc_hook.py        |   4 +-
 airflow/contrib/hooks/spark_jdbc_script.py      |   4 +-
 airflow/contrib/hooks/spark_sql_hook.py         |  11 +-
 airflow/contrib/hooks/sqoop_hook.py             |  25 +++--
 airflow/contrib/hooks/ssh_hook.py               |   7 +-
 airflow/contrib/hooks/vertica_hook.py           |   4 +-
 airflow/contrib/hooks/wasb_hook.py              |   4 +-
 airflow/contrib/kubernetes/kube_client.py       |   1 +
 airflow/contrib/kubernetes/pod_generator.py     |   2 -
 airflow/contrib/operators/__init__.py           |   7 +-
 airflow/contrib/operators/awsbatch_operator.py  |  17 +--
 .../operators/bigquery_check_operator.py        |   7 +-
 airflow/contrib/operators/bigquery_get_data.py  |   4 +-
 airflow/contrib/operators/bigquery_operator.py  |   4 +-
 .../operators/bigquery_table_delete_operator.py |   4 +-
 .../contrib/operators/bigquery_to_bigquery.py   |   4 +-
 airflow/contrib/operators/bigquery_to_gcs.py    |   4 +-
 .../contrib/operators/databricks_operator.py    |  15 +--
 airflow/contrib/operators/dataflow_operator.py  |   4 +-
 airflow/contrib/operators/dataproc_operator.py  |  22 ++--
 .../operators/datastore_export_operator.py      |  15 +--
 .../operators/datastore_import_operator.py      |   9 +-
 .../operators/discord_webhook_operator.py       |   4 +-
 airflow/contrib/operators/druid_operator.py     |  13 ++-
 airflow/contrib/operators/ecs_operator.py       |  23 +++--
 .../contrib/operators/emr_add_steps_operator.py |   4 +-
 .../operators/emr_create_job_flow_operator.py   |   7 +-
 .../emr_terminate_job_flow_operator.py          |   4 +-
 airflow/contrib/operators/file_to_gcs.py        |   8 +-
 airflow/contrib/operators/file_to_wasb.py       |  10 +-
 .../contrib/operators/gcs_download_operator.py  |  11 +-
 airflow/contrib/operators/gcs_list_operator.py  |   4 +-
 airflow/contrib/operators/gcs_operator.py       |   4 +-
 airflow/contrib/operators/gcs_to_bq.py          |   9 +-
 airflow/contrib/operators/gcs_to_s3.py          |   4 +-
 airflow/contrib/operators/hipchat_operator.py   |   4 +-
 airflow/contrib/operators/hive_to_dynamodb.py   |   4 +-
 .../operators/jenkins_job_trigger_operator.py   |   4 +-
 airflow/contrib/operators/jira_operator.py      |   4 +-
 .../operators/kubernetes_pod_operator.py        |   1 +
 .../operators/mlengine_operator_utils.py        |   1 +
 .../operators/postgres_to_gcs_operator.py       |   4 +-
 airflow/contrib/operators/pubsub_operator.py    |   4 +-
 airflow/contrib/operators/qubole_operator.py    |   4 +-
 airflow/contrib/operators/s3_list_operator.py   |   4 +-
 airflow/contrib/operators/sftp_operator.py      |   7 +-
 .../contrib/operators/slack_webhook_operator.py |   4 +-
 airflow/contrib/operators/snowflake_operator.py |   4 +-
 .../contrib/operators/spark_jdbc_operator.py    |   4 +-
 airflow/contrib/operators/spark_sql_operator.py |  10 +-
 airflow/contrib/operators/sqoop_operator.py     |  24 +++--
 airflow/contrib/operators/ssh_operator.py       |   8 +-
 airflow/contrib/operators/vertica_operator.py   |   4 +-
 airflow/contrib/operators/vertica_to_hive.py    |  11 +-
 airflow/contrib/operators/vertica_to_mysql.py   |  19 +++-
 .../contrib/plugins/metastore_browser/main.py   |   7 +-
 airflow/contrib/sensors/__init__.py             |   4 +-
 .../sensors/aws_redshift_cluster_sensor.py      |   4 +-
 airflow/contrib/sensors/bash_sensor.py          |   5 +-
 airflow/contrib/sensors/bigquery_sensor.py      |  45 ++++----
 airflow/contrib/sensors/datadog_sensor.py       |   4 +-
 airflow/contrib/sensors/emr_base_sensor.py      |   4 +-
 airflow/contrib/sensors/emr_job_flow_sensor.py  |   7 +-
 airflow/contrib/sensors/emr_step_sensor.py      |   4 +-
 airflow/contrib/sensors/file_sensor.py          |   4 +-
 airflow/contrib/sensors/ftp_sensor.py           |   4 +-
 airflow/contrib/sensors/gcs_sensor.py           | 103 +++++++++----------
 airflow/contrib/sensors/hdfs_sensor.py          |   4 +-
 airflow/contrib/sensors/jira_sensor.py          |  16 +--
 airflow/contrib/sensors/pubsub_sensor.py        |   4 +-
 airflow/contrib/sensors/redis_key_sensor.py     |   4 +-
 airflow/contrib/sensors/sftp_sensor.py          |   4 +-
 airflow/contrib/sensors/wasb_sensor.py          |   4 +-
 airflow/contrib/task_runner/__init__.py         |   4 +-
 .../contrib/task_runner/cgroup_task_runner.py   |  13 +--
 airflow/contrib/utils/__init__.py               |   5 +-
 120 files changed, 616 insertions(+), 496 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/__init__.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/__init__.py b/airflow/contrib/__init__.py
index f0f8b68..114d189 100644
--- a/airflow/contrib/__init__.py
+++ b/airflow/contrib/__init__.py
@@ -7,13 +7,12 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/auth/__init__.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/__init__.py b/airflow/contrib/auth/__init__.py
index f0f8b68..114d189 100644
--- a/airflow/contrib/auth/__init__.py
+++ b/airflow/contrib/auth/__init__.py
@@ -7,13 +7,12 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/auth/backends/__init__.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/__init__.py 
b/airflow/contrib/auth/backends/__init__.py
index f0f8b68..114d189 100644
--- a/airflow/contrib/auth/backends/__init__.py
+++ b/airflow/contrib/auth/backends/__init__.py
@@ -7,13 +7,12 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/auth/backends/github_enterprise_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/github_enterprise_auth.py 
b/airflow/contrib/auth/backends/github_enterprise_auth.py
index e131ec4..a7e6b16 100644
--- a/airflow/contrib/auth/backends/github_enterprise_auth.py
+++ b/airflow/contrib/auth/backends/github_enterprise_auth.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -150,8 +150,9 @@ class GHEAuthBackend(object):
                                  get_config_param('allowed_teams').split(',')]
             except ValueError:
                 # this is to deprecate using the string name for a team
-                raise ValueError('it appears that you are using the string 
name for a team, '
-                                 'please use the id number instead')
+                raise ValueError(
+                    'it appears that you are using the string name for a team, 
'
+                    'please use the id number instead')
 
         except AirflowConfigException:
             # No allowed teams defined, let anyone in GHE in.
@@ -175,8 +176,8 @@ class GHEAuthBackend(object):
                 return True
 
         log.debug('Denying access for user "%s", not a member of "%s"',
-                   username,
-                   str(allowed_teams))
+                  username,
+                  str(allowed_teams))
 
         return False
 
@@ -230,6 +231,7 @@ class GHEAuthBackend(object):
 
         return redirect(next_url)
 
+
 login_manager = GHEAuthBackend()
 
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/auth/backends/google_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/google_auth.py 
b/airflow/contrib/auth/backends/google_auth.py
index 259874f..d7f78a9 100644
--- a/airflow/contrib/auth/backends/google_auth.py
+++ b/airflow/contrib/auth/backends/google_auth.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -118,8 +118,9 @@ class GoogleAuthBackend(object):
             state=request.args.get('next') or request.referrer or None)
 
     def get_google_user_profile_info(self, google_token):
-        resp = 
self.google_oauth.get('https://www.googleapis.com/oauth2/v1/userinfo',
-                                    token=(google_token, ''))
+        resp = self.google_oauth.get(
+            'https://www.googleapis.com/oauth2/v1/userinfo',
+            token=(google_token, ''))
 
         if not resp or resp.status != 200:
             raise AuthenticationError(
@@ -184,6 +185,7 @@ class GoogleAuthBackend(object):
 
         return redirect(next_url)
 
+
 login_manager = GoogleAuthBackend()
 
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/auth/backends/kerberos_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/kerberos_auth.py 
b/airflow/contrib/auth/backends/kerberos_auth.py
index 0dc8bd4..95773cf 100644
--- a/airflow/contrib/auth/backends/kerberos_auth.py
+++ b/airflow/contrib/auth/backends/kerberos_auth.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -62,10 +62,13 @@ class KerberosUser(models.User, LoggingMixin):
 
         try:
             # this is pykerberos specific, verify = True is needed to prevent 
KDC spoofing
-            if not kerberos.checkPassword(user_principal, password, 
service_principal, realm, True):
+            if not kerberos.checkPassword(user_principal,
+                                          password,
+                                          service_principal, realm, True):
                 raise AuthenticationError()
         except kerberos.KrbError as e:
-            logging.error('Password validation for principal %s failed %s', 
user_principal, e)
+            logging.error(
+                'Password validation for principal %s failed %s', 
user_principal, e)
             raise AuthenticationError(e)
 
         return

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/auth/backends/ldap_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/ldap_auth.py 
b/airflow/contrib/auth/backends/ldap_auth.py
index 1ab5fcd..8490b25 100644
--- a/airflow/contrib/auth/backends/ldap_auth.py
+++ b/airflow/contrib/auth/backends/ldap_auth.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -83,7 +83,8 @@ def group_contains_user(conn, search_base, group_filter, 
user_name_attr, usernam
         log.warning("Unable to find group for %s %s", search_base, 
search_filter)
     else:
         for entry in conn.entries:
-            if username.lower() in map(lambda attr: attr.lower(), 
getattr(entry, user_name_attr).values):
+            if username.lower() in map(lambda attr: attr.lower(),
+                                       getattr(entry, user_name_attr).values):
                 return True
 
     return False
@@ -191,12 +192,6 @@ class LdapUser(models.User):
             username
         )
 
-        search_scopes = {
-            "LEVEL": LEVEL,
-            "SUBTREE": SUBTREE,
-            "BASE": BASE
-        }
-
         search_scope = LEVEL
         if configuration.conf.has_option("ldap", "search_scope"):
             if configuration.conf.get("ldap", "search_scope") == "SUBTREE":
@@ -226,12 +221,16 @@ class LdapUser(models.User):
 
         try:
             conn = get_ldap_connection(entry['dn'], password)
-        except KeyError as e:
+        except KeyError:
             log.error("""
-            Unable to parse LDAP structure. If you're using Active Directory 
and not specifying an OU, you must set search_scope=SUBTREE in airflow.cfg.
+            Unable to parse LDAP structure. If you're using Active Directory
+            and not specifying an OU, you must set search_scope=SUBTREE in 
airflow.cfg.
             %s
             """ % traceback.format_exc())
-            raise LdapException("Could not parse LDAP structure. Try setting 
search_scope in airflow.cfg, or check logs")
+            raise LdapException(
+                "Could not parse LDAP structure. "
+                "Try setting search_scope in airflow.cfg, or check logs"
+            )
 
         if not conn:
             log.info("Password incorrect for user %s", username)
@@ -272,6 +271,7 @@ def load_user(userid, session=None):
     user = session.query(models.User).filter(models.User.id == 
int(userid)).first()
     return LdapUser(user)
 
+
 @provide_session
 def login(self, request, session=None):
     if current_user.is_authenticated():

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/auth/backends/password_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/password_auth.py 
b/airflow/contrib/auth/backends/password_auth.py
index 26afd98..1c5169d 100644
--- a/airflow/contrib/auth/backends/password_auth.py
+++ b/airflow/contrib/auth/backends/password_auth.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/example_dags/example_databricks_operator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/example_dags/example_databricks_operator.py 
b/airflow/contrib/example_dags/example_databricks_operator.py
index 2b44f8c..bc827d4 100644
--- a/airflow/contrib/example_dags/example_databricks_operator.py
+++ b/airflow/contrib/example_dags/example_databricks_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/example_dags/example_emr_job_flow_automatic_steps.py
----------------------------------------------------------------------
diff --git 
a/airflow/contrib/example_dags/example_emr_job_flow_automatic_steps.py 
b/airflow/contrib/example_dags/example_emr_job_flow_automatic_steps.py
index 0eeaeb2..098a7a0 100644
--- a/airflow/contrib/example_dags/example_emr_job_flow_automatic_steps.py
+++ b/airflow/contrib/example_dags/example_emr_job_flow_automatic_steps.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -20,7 +20,8 @@
 from datetime import timedelta
 import airflow
 from airflow import DAG
-from airflow.contrib.operators.emr_create_job_flow_operator import 
EmrCreateJobFlowOperator
+from airflow.contrib.operators.emr_create_job_flow_operator \
+    import EmrCreateJobFlowOperator
 from airflow.contrib.sensors.emr_job_flow_sensor import EmrJobFlowSensor
 
 DEFAULT_ARGS = {

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/example_dags/example_emr_job_flow_manual_steps.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/example_dags/example_emr_job_flow_manual_steps.py 
b/airflow/contrib/example_dags/example_emr_job_flow_manual_steps.py
index 2574544..48a178a 100644
--- a/airflow/contrib/example_dags/example_emr_job_flow_manual_steps.py
+++ b/airflow/contrib/example_dags/example_emr_job_flow_manual_steps.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -21,10 +21,13 @@ from datetime import timedelta
 
 import airflow
 from airflow import DAG
-from airflow.contrib.operators.emr_create_job_flow_operator import 
EmrCreateJobFlowOperator
-from airflow.contrib.operators.emr_add_steps_operator import 
EmrAddStepsOperator
+from airflow.contrib.operators.emr_create_job_flow_operator \
+    import EmrCreateJobFlowOperator
+from airflow.contrib.operators.emr_add_steps_operator \
+    import EmrAddStepsOperator
 from airflow.contrib.sensors.emr_step_sensor import EmrStepSensor
-from airflow.contrib.operators.emr_terminate_job_flow_operator import 
EmrTerminateJobFlowOperator
+from airflow.contrib.operators.emr_terminate_job_flow_operator \
+    import EmrTerminateJobFlowOperator
 
 DEFAULT_ARGS = {
     'owner': 'airflow',

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/example_dags/example_pubsub_flow.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/example_dags/example_pubsub_flow.py 
b/airflow/contrib/example_dags/example_pubsub_flow.py
index f61bbe4..6c13ec1 100644
--- a/airflow/contrib/example_dags/example_pubsub_flow.py
+++ b/airflow/contrib/example_dags/example_pubsub_flow.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/example_dags/example_qubole_operator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/example_dags/example_qubole_operator.py 
b/airflow/contrib/example_dags/example_qubole_operator.py
index e78467b..bfa8355 100644
--- a/airflow/contrib/example_dags/example_qubole_operator.py
+++ b/airflow/contrib/example_dags/example_qubole_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -49,20 +49,26 @@ dag = DAG('example_qubole_operator', 
default_args=default_args, schedule_interva
 
 dag.doc_md = __doc__
 
+
 def compare_result(ds, **kwargs):
     ti = kwargs['ti']
     r1 = t1.get_results(ti)
     r2 = t2.get_results(ti)
     return filecmp.cmp(r1, r2)
 
+
 t1 = QuboleOperator(
     task_id='hive_show_table',
     command_type='hivecmd',
     query='show tables',
     cluster_label='default',
-    fetch_logs=True, # If true, will fetch qubole command logs and concatenate 
them into corresponding airflow task logs
-    tags='aiflow_example_run',  # To attach tags to qubole command, auto 
attach 3 tags - dag_id, task_id, run_id
-    qubole_conn_id='qubole_default',  # Connection id to submit commands 
inside QDS, if not set "qubole_default" is used
+    fetch_logs=True,
+    # If `fetch_logs`=true, will fetch qubole command logs and concatenate
+    # them into corresponding airflow task logs
+    tags='aiflow_example_run',
+    # To attach tags to qubole command, auto attach 3 tags - dag_id, task_id, 
run_id
+    qubole_conn_id='qubole_default',
+    # Connection id to submit commands inside QDS, if not set "qubole_default" 
is used
     dag=dag)
 
 t2 = QuboleOperator(
@@ -103,7 +109,12 @@ join = DummyOperator(
 t4 = QuboleOperator(
     task_id='hadoop_jar_cmd',
     command_type='hadoopcmd',
-    sub_command='jar 
s3://paid-qubole/HadoopAPIExamples/jars/hadoop-0.20.1-dev-streaming.jar -mapper 
wc -numReduceTasks 0 -input s3://paid-qubole/HadoopAPITests/data/3.tsv -output 
s3://paid-qubole/HadoopAPITests/data/3_wc',
+    sub_command='jar s3://paid-qubole/HadoopAPIExamples/'
+                'jars/hadoop-0.20.1-dev-streaming.jar '
+                '-mapper wc '
+                '-numReduceTasks 0 -input s3://paid-qubole/HadoopAPITests/'
+                'data/3.tsv -output '
+                's3://paid-qubole/HadoopAPITests/data/3_wc',
     cluster_label='default',
     fetch_logs=True,
     dag=dag)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/example_dags/example_qubole_sensor.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/example_dags/example_qubole_sensor.py 
b/airflow/contrib/example_dags/example_qubole_sensor.py
index 9061cdd..3922d3e 100644
--- a/airflow/contrib/example_dags/example_qubole_sensor.py
+++ b/airflow/contrib/example_dags/example_qubole_sensor.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -50,11 +50,13 @@ t1 = QuboleFileSensor(
     qubole_conn_id='qubole_default',
     poke_interval=60,
     timeout=600,
-    data={"files":
-              
["s3://paid-qubole/HadoopAPIExamples/jars/hadoop-0.20.1-dev-streaming.jar",
-               "s3://paid-qubole/HadoopAPITests/data/{{ ds.split('-')[2] 
}}.tsv"
-               ] # will check for availability of all the files in array
-          },
+    data={
+        "files":
+            [
+                
"s3://paid-qubole/HadoopAPIExamples/jars/hadoop-0.20.1-dev-streaming.jar",
+                "s3://paid-qubole/HadoopAPITests/data/{{ ds.split('-')[2] 
}}.tsv"
+            ]  # will check for availability of all the files in array
+    },
     dag=dag
 )
 
@@ -62,12 +64,14 @@ t2 = QubolePartitionSensor(
     task_id='check_hive_partition',
     poke_interval=10,
     timeout=60,
-    data={"schema":"default",
-          "table":"my_partitioned_table",
-          "columns":[
-              {"column" : "month", "values" : ["{{ ds.split('-')[1] }}"]},
-              {"column" : "day", "values" : ["{{ ds.split('-')[2] }}" , "{{ 
yesterday_ds.split('-')[2] }}"]}
-          ]# will check for partitions like [month=12/day=12,month=12/day=13]
+    data={"schema": "default",
+          "table": "my_partitioned_table",
+          "columns": [
+              {"column": "month", "values":
+                  ["{{ ds.split('-')[1] }}"]},
+              {"column": "day", "values":
+                  ["{{ ds.split('-')[2] }}", "{{ yesterday_ds.split('-')[2] 
}}"]}
+          ]  # will check for partitions like [month=12/day=12,month=12/day=13]
           },
     dag=dag
 )

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/example_dags/example_twitter_dag.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/example_dags/example_twitter_dag.py 
b/airflow/contrib/example_dags/example_twitter_dag.py
index 3015160..c2c8f4d 100644
--- a/airflow/contrib/example_dags/example_twitter_dag.py
+++ b/airflow/contrib/example_dags/example_twitter_dag.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/executors/kubernetes_executor.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/executors/kubernetes_executor.py 
b/airflow/contrib/executors/kubernetes_executor.py
index 17b2908..dff0099 100644
--- a/airflow/contrib/executors/kubernetes_executor.py
+++ b/airflow/contrib/executors/kubernetes_executor.py
@@ -581,7 +581,7 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
             try:
                 self.log.info('Deleted pod: %s', str(key))
                 self.running.pop(key)
-            except KeyError as _:
+            except KeyError:
                 self.log.debug('Could not find key: %s', str(key))
                 pass
         self.event_buffer[key] = state

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/executors/mesos_executor.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/executors/mesos_executor.py 
b/airflow/contrib/executors/mesos_executor.py
index cf71939..ff974ff 100644
--- a/airflow/contrib/executors/mesos_executor.py
+++ b/airflow/contrib/executors/mesos_executor.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -22,7 +22,7 @@ from future import standard_library
 from airflow.utils.log.logging_mixin import LoggingMixin
 from airflow.www.utils import LoginMixin
 
-standard_library.install_aliases()
+
 from builtins import str
 from queue import Queue
 
@@ -36,7 +36,7 @@ from airflow.settings import Session
 from airflow.utils.state import State
 from airflow.exceptions import AirflowException
 
-
+standard_library.install_aliases()
 DEFAULT_FRAMEWORK_NAME = 'Airflow'
 FRAMEWORK_CONNID_PREFIX = 'mesos_framework_'
 
@@ -74,7 +74,8 @@ class AirflowMesosScheduler(mesos.interface.Scheduler, 
LoggingMixin):
             )
 
     def registered(self, driver, frameworkId, masterInfo):
-        self.log.info("AirflowScheduler registered to Mesos with framework ID 
%s", frameworkId.value)
+        self.log.info("AirflowScheduler registered to Mesos with framework ID 
%s",
+                      frameworkId.value)
 
         if configuration.conf.getboolean('mesos', 'CHECKPOINT') and \
                 configuration.conf.get('mesos', 'FAILOVER_TIMEOUT'):
@@ -128,14 +129,15 @@ class AirflowMesosScheduler(mesos.interface.Scheduler, 
LoggingMixin):
                 elif resource.name == "mem":
                     offerMem += resource.scalar.value
 
-            self.log.info("Received offer %s with cpus: %s and mem: %s", 
offer.id.value, offerCpus, offerMem)
+            self.log.info("Received offer %s with cpus: %s and mem: %s",
+                          offer.id.value, offerCpus, offerMem)
 
             remainingCpus = offerCpus
             remainingMem = offerMem
 
             while (not self.task_queue.empty()) and \
-                  remainingCpus >= self.task_cpu and \
-                  remainingMem >= self.task_mem:
+                    remainingCpus >= self.task_cpu and \
+                    remainingMem >= self.task_mem:
                 key, cmd = self.task_queue.get()
                 tid = self.task_counter
                 self.task_counter += 1
@@ -194,7 +196,8 @@ class AirflowMesosScheduler(mesos.interface.Scheduler, 
LoggingMixin):
         try:
             key = self.task_key_map[update.task_id.value]
         except KeyError:
-            # The map may not contain an item if the framework re-registered 
after a failover.
+            # The map may not contain an item if the framework re-registered
+            # after a failover.
             # Discard these tasks.
             self.log.warning("Unrecognised task key %s", update.task_id.value)
             return
@@ -257,7 +260,8 @@ class MesosExecutor(BaseExecutor, LoginMixin):
                 session = Session()
                 connection = 
session.query(Connection).filter_by(conn_id=conn_id).first()
                 if connection is not None:
-                    # Set the Framework ID to let the scheduler reconnect with 
running tasks.
+                    # Set the Framework ID to let the scheduler reconnect
+                    # with running tasks.
                     framework.id.value = connection.extra
 
                 framework.failover_timeout = configuration.conf.getint(
@@ -268,7 +272,8 @@ class MesosExecutor(BaseExecutor, LoginMixin):
 
         self.log.info(
             'MesosFramework master : %s, name : %s, cpu : %s, mem : %s, 
checkpoint : %s',
-            master, framework.name, str(task_cpu), str(task_memory), 
str(framework.checkpoint)
+            master, framework.name,
+            str(task_cpu), str(task_memory), str(framework.checkpoint)
         )
 
         implicit_acknowledgements = 1
@@ -276,10 +281,12 @@ class MesosExecutor(BaseExecutor, LoginMixin):
         if configuration.conf.getboolean('mesos', 'AUTHENTICATE'):
             if not configuration.conf.get('mesos', 'DEFAULT_PRINCIPAL'):
                 self.log.error("Expecting authentication principal in the 
environment")
-                raise AirflowException("mesos.default_principal not provided 
in authenticated mode")
+                raise AirflowException(
+                    "mesos.default_principal not provided in authenticated 
mode")
             if not configuration.conf.get('mesos', 'DEFAULT_SECRET'):
                 self.log.error("Expecting authentication secret in the 
environment")
-                raise AirflowException("mesos.default_secret not provided in 
authenticated mode")
+                raise AirflowException(
+                    "mesos.default_secret not provided in authenticated mode")
 
             credential = mesos_pb2.Credential()
             credential.principal = configuration.conf.get('mesos', 
'DEFAULT_PRINCIPAL')
@@ -288,7 +295,10 @@ class MesosExecutor(BaseExecutor, LoginMixin):
             framework.principal = credential.principal
 
             driver = mesos.native.MesosSchedulerDriver(
-                AirflowMesosScheduler(self.task_queue, self.result_queue, 
task_cpu, task_memory),
+                AirflowMesosScheduler(self.task_queue,
+                                      self.result_queue,
+                                      task_cpu,
+                                      task_memory),
                 framework,
                 master,
                 implicit_acknowledgements,
@@ -296,7 +306,10 @@ class MesosExecutor(BaseExecutor, LoginMixin):
         else:
             framework.principal = 'Airflow'
             driver = mesos.native.MesosSchedulerDriver(
-                AirflowMesosScheduler(self.task_queue, self.result_queue, 
task_cpu, task_memory),
+                AirflowMesosScheduler(self.task_queue,
+                                      self.result_queue,
+                                      task_cpu,
+                                      task_memory),
                 framework,
                 master,
                 implicit_acknowledgements)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/__init__.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/__init__.py 
b/airflow/contrib/hooks/__init__.py
index 1c92deb..29a52da 100644
--- a/airflow/contrib/hooks/__init__.py
+++ b/airflow/contrib/hooks/__init__.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -24,7 +24,7 @@
 
 
 import sys
-
+import os as _os
 
 # ------------------------------------------------------------------------
 #
@@ -62,7 +62,8 @@ _hooks = {
     'azure_data_lake_hook': ['AzureDataLakeHook'],
 }
 
-import os as _os
+
 if not _os.environ.get('AIRFLOW_USE_NEW_IMPORTS', False):
     from airflow.utils.helpers import AirflowImporter
+
     airflow_importer = AirflowImporter(sys.modules[__name__], _hooks)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/aws_dynamodb_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/aws_dynamodb_hook.py 
b/airflow/contrib/hooks/aws_dynamodb_hook.py
index 57f42d2..69324c6 100644
--- a/airflow/contrib/hooks/aws_dynamodb_hook.py
+++ b/airflow/contrib/hooks/aws_dynamodb_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -33,7 +33,11 @@ class AwsDynamoDBHook(AwsHook):
     :type region_name: str
     """
 
-    def __init__(self, table_keys=None, table_name=None, region_name=None, 
*args, **kwargs):
+    def __init__(self,
+                 table_keys=None,
+                 table_name=None,
+                 region_name=None,
+                 *args, **kwargs):
         self.table_keys = table_keys
         self.table_name = table_name
         self.region_name = region_name

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/aws_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/aws_hook.py 
b/airflow/contrib/hooks/aws_hook.py
index c8ded4d..c712d2d 100644
--- a/airflow/contrib/hooks/aws_hook.py
+++ b/airflow/contrib/hooks/aws_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -101,13 +101,16 @@ class AwsHook(BaseHook):
                     aws_secret_access_key = connection_object.password
 
                 elif 'aws_secret_access_key' in connection_object.extra_dejson:
-                    aws_access_key_id = 
connection_object.extra_dejson['aws_access_key_id']
-                    aws_secret_access_key = 
connection_object.extra_dejson['aws_secret_access_key']
+                    aws_access_key_id = connection_object.extra_dejson[
+                        'aws_access_key_id']
+                    aws_secret_access_key = connection_object.extra_dejson[
+                        'aws_secret_access_key']
 
                 elif 's3_config_file' in connection_object.extra_dejson:
                     aws_access_key_id, aws_secret_access_key = \
-                        
_parse_s3_config(connection_object.extra_dejson['s3_config_file'],
-                                         
connection_object.extra_dejson.get('s3_config_format'))
+                        _parse_s3_config(
+                            connection_object.extra_dejson['s3_config_file'],
+                            
connection_object.extra_dejson.get('s3_config_format'))
 
                 if region_name is None:
                     region_name = 
connection_object.extra_dejson.get('region_name')
@@ -118,7 +121,6 @@ class AwsHook(BaseHook):
 
                 if role_arn is None and aws_account_id is not None and \
                         aws_iam_role is not None:
-
                     role_arn = "arn:aws:iam::" + aws_account_id + ":role/" + 
aws_iam_role
 
                 if role_arn is not None:

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/aws_lambda_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/aws_lambda_hook.py 
b/airflow/contrib/hooks/aws_lambda_hook.py
index 1b57d8d..57cf716 100644
--- a/airflow/contrib/hooks/aws_lambda_hook.py
+++ b/airflow/contrib/hooks/aws_lambda_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -36,7 +36,8 @@ class AwsLambdaHook(AwsHook):
     :type invocation_type: str
     """
 
-    def __init__(self, function_name, region_name=None, log_type='None', 
qualifier='$LATEST',
+    def __init__(self, function_name, region_name=None,
+                 log_type='None', qualifier='$LATEST',
                  invocation_type='RequestResponse', *args, **kwargs):
         self.function_name = function_name
         self.region_name = region_name

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/bigquery_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/bigquery_hook.py 
b/airflow/contrib/hooks/bigquery_hook.py
index 4983d9d..c0a70ab 100644
--- a/airflow/contrib/hooks/bigquery_hook.py
+++ b/airflow/contrib/hooks/bigquery_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/cloudant_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/cloudant_hook.py 
b/airflow/contrib/hooks/cloudant_hook.py
index 6e6d426..5d39f3f 100644
--- a/airflow/contrib/hooks/cloudant_hook.py
+++ b/airflow/contrib/hooks/cloudant_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -42,8 +42,8 @@ class CloudantHook(BaseHook):
             if isinstance(s, unicode):
                 log = LoggingMixin().log
                 log.debug(
-                    'cloudant-python does not support unicode. Encoding %s as 
ascii using "ignore".',
-                    s
+                    'cloudant-python does not support unicode. Encoding %s as '
+                    'ascii using "ignore".', s
                 )
                 return s.encode('ascii', 'ignore')
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/databricks_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/databricks_hook.py 
b/airflow/contrib/hooks/databricks_hook.py
index f7caf57..1443ff4 100644
--- a/airflow/contrib/hooks/databricks_hook.py
+++ b/airflow/contrib/hooks/databricks_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -117,7 +117,7 @@ class DatabricksHook(BaseHook, LoggingMixin):
         else:
             raise AirflowException('Unexpected HTTP Method: ' + method)
 
-        for attempt_num in range(1, self.retry_limit+1):
+        for attempt_num in range(1, self.retry_limit + 1):
             try:
                 response = request_func(
                     url,
@@ -195,10 +195,11 @@ class RunState:
     @property
     def is_terminal(self):
         if self.life_cycle_state not in RUN_LIFE_CYCLE_STATES:
-            raise AirflowException(('Unexpected life cycle state: {}: If the 
state has '
-                            'been introduced recently, please check the 
Databricks user '
-                            'guide for troubleshooting information').format(
-                                self.life_cycle_state))
+            raise AirflowException(
+                ('Unexpected life cycle state: {}: If the state has '
+                 'been introduced recently, please check the Databricks user '
+                 'guide for troubleshooting information').format(
+                    self.life_cycle_state))
         return self.life_cycle_state in ('TERMINATED', 'SKIPPED', 
'INTERNAL_ERROR')
 
     @property

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/datadog_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/datadog_hook.py 
b/airflow/contrib/hooks/datadog_hook.py
index d92fd04..3dfeb78 100644
--- a/airflow/contrib/hooks/datadog_hook.py
+++ b/airflow/contrib/hooks/datadog_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -48,9 +48,11 @@ class DatadogHook(BaseHook, LoggingMixin):
         self.host = conn.host
 
         if self.api_key is None:
-            raise AirflowException("api_key must be specified in the Datadog 
connection details")
+            raise AirflowException("api_key must be specified in the "
+                                   "Datadog connection details")
         if self.app_key is None:
-            raise AirflowException("app_key must be specified in the Datadog 
connection details")
+            raise AirflowException("app_key must be specified in the "
+                                   "Datadog connection details")
 
         self.log.info("Setting up api keys for Datadog")
         options = {
@@ -89,8 +91,8 @@ class DatadogHook(BaseHook, LoggingMixin):
                      from_seconds_ago,
                      to_seconds_ago):
         """
-        Queries datadog for a specific metric, potentially with some function 
applied to it
-        and returns the results.
+        Queries datadog for a specific metric, potentially with some
+        function applied to it and returns the results.
 
         :param query: The datadog query to execute (see datadog docs)
         :type query: string
@@ -112,8 +114,8 @@ class DatadogHook(BaseHook, LoggingMixin):
     def post_event(self, title, text, tags=None, alert_type=None, 
aggregation_key=None):
         """
         Posts an event to datadog (processing finished, potentially alerts, 
other issues)
-        Think about this as a means to maintain persistence of alerts, rather 
than alerting
-        itself.
+        Think about this as a means to maintain persistence of alerts, rather 
than
+        alerting itself.
 
         :param title: The title of the event
         :type title: string

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/datastore_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/datastore_hook.py 
b/airflow/contrib/hooks/datastore_hook.py
index 96d9079..bbd1199 100644
--- a/airflow/contrib/hooks/datastore_hook.py
+++ b/airflow/contrib/hooks/datastore_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,7 +18,6 @@
 # under the License.
 #
 
-import json
 import time
 from apiclient.discovery import build
 from airflow.contrib.hooks.gcp_api_base_hook import GoogleCloudBaseHook
@@ -55,17 +54,22 @@ class DatastoreHook(GoogleCloudBaseHook):
         :param partialKeys: a list of partial keys
         :return: a list of full keys.
         """
-        resp = 
self.connection.projects().allocateIds(projectId=self.project_id, body={'keys': 
partialKeys}).execute()
+        resp = self.connection.projects().allocateIds(
+            projectId=self.project_id, body={'keys': partialKeys}
+        ).execute()
         return resp['keys']
 
     def begin_transaction(self):
         """
         Get a new transaction handle
-        see 
https://cloud.google.com/datastore/docs/reference/rest/v1/projects/beginTransaction
+
+            .. seealso::
+                
https://cloud.google.com/datastore/docs/reference/rest/v1/projects/beginTransaction
 
         :return: a transaction handle
         """
-        resp = 
self.connection.projects().beginTransaction(projectId=self.project_id, 
body={}).execute()
+        resp = self.connection.projects().beginTransaction(
+            projectId=self.project_id, body={}).execute()
         return resp['transaction']
 
     def commit(self, body):
@@ -78,7 +82,8 @@ class DatastoreHook(GoogleCloudBaseHook):
         :param body: the body of the commit request
         :return: the response body of the commit request
         """
-        resp = self.connection.projects().commit(projectId=self.project_id, 
body=body).execute()
+        resp = self.connection.projects().commit(
+            projectId=self.project_id, body=body).execute()
         return resp
 
     def lookup(self, keys, read_consistency=None, transaction=None):
@@ -99,7 +104,8 @@ class DatastoreHook(GoogleCloudBaseHook):
             body['readConsistency'] = read_consistency
         if transaction:
             body['transaction'] = transaction
-        return self.connection.projects().lookup(projectId=self.project_id, 
body=body).execute()
+        return self.connection.projects().lookup(
+            projectId=self.project_id, body=body).execute()
 
     def rollback(self, transaction):
         """
@@ -110,7 +116,8 @@ class DatastoreHook(GoogleCloudBaseHook):
 
         :param transaction: the transaction to roll back
         """
-        self.connection.projects().rollback(projectId=self.project_id, 
body={'transaction': transaction})\
+        self.connection.projects().rollback(
+            projectId=self.project_id, body={'transaction': transaction})\
             .execute()
 
     def run_query(self, body):
@@ -123,7 +130,8 @@ class DatastoreHook(GoogleCloudBaseHook):
         :param body: the body of the query request
         :return: the batch of query results.
         """
-        resp = self.connection.projects().runQuery(projectId=self.project_id, 
body=body).execute()
+        resp = self.connection.projects().runQuery(
+            projectId=self.project_id, body=body).execute()
         return resp['batch']
 
     def get_operation(self, name):
@@ -158,7 +166,8 @@ class DatastoreHook(GoogleCloudBaseHook):
             else:
                 return result
 
-    def export_to_storage_bucket(self, bucket, namespace=None, 
entity_filter=None, labels=None):
+    def export_to_storage_bucket(self, bucket, namespace=None,
+                                 entity_filter=None, labels=None):
         """
         Export entities from Cloud Datastore to Cloud Storage for backup
         """
@@ -172,10 +181,12 @@ class DatastoreHook(GoogleCloudBaseHook):
             'entityFilter': entity_filter,
             'labels': labels,
         }
-        resp = 
self.admin_connection.projects().export(projectId=self.project_id, 
body=body).execute()
+        resp = self.admin_connection.projects().export(
+            projectId=self.project_id, body=body).execute()
         return resp
 
-    def import_from_storage_bucket(self, bucket, file, namespace=None, 
entity_filter=None, labels=None):
+    def import_from_storage_bucket(self, bucket, file,
+                                   namespace=None, entity_filter=None, 
labels=None):
         """
         Import a backup from Cloud Storage to Cloud Datastore
         """
@@ -189,5 +200,6 @@ class DatastoreHook(GoogleCloudBaseHook):
             'entityFilter': entity_filter,
             'labels': labels,
         }
-        resp = 
self.admin_connection.projects().import_(projectId=self.project_id, 
body=body).execute()
+        resp = self.admin_connection.projects().import_(
+            projectId=self.project_id, body=body).execute()
         return resp

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/discord_webhook_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/discord_webhook_hook.py 
b/airflow/contrib/hooks/discord_webhook_hook.py
index 16be086..731d9d5 100644
--- a/airflow/contrib/hooks/discord_webhook_hook.py
+++ b/airflow/contrib/hooks/discord_webhook_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/emr_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/emr_hook.py 
b/airflow/contrib/hooks/emr_hook.py
index 4347b5b..6cd92c6 100644
--- a/airflow/contrib/hooks/emr_hook.py
+++ b/airflow/contrib/hooks/emr_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -23,7 +23,8 @@ from airflow.contrib.hooks.aws_hook import AwsHook
 
 class EmrHook(AwsHook):
     """
-    Interact with AWS EMR. emr_conn_id is only neccessary for using the 
create_job_flow method.
+    Interact with AWS EMR. emr_conn_id is only neccessary for using the
+    create_job_flow method.
     """
 
     def __init__(self, emr_conn_id=None, *args, **kwargs):
@@ -37,7 +38,8 @@ class EmrHook(AwsHook):
     def create_job_flow(self, job_flow_overrides):
         """
         Creates a job flow using the config from the EMR connection.
-        Keys of the json extra hash may have the arguments of the boto3 
run_job_flow method.
+        Keys of the json extra hash may have the arguments of the boto3
+        run_job_flow method.
         Overrides for this config may be passed as the job_flow_overrides.
         """
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/fs_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/fs_hook.py b/airflow/contrib/hooks/fs_hook.py
index 9d13463..d74525f 100644
--- a/airflow/contrib/hooks/fs_hook.py
+++ b/airflow/contrib/hooks/fs_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/ftp_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/ftp_hook.py 
b/airflow/contrib/hooks/ftp_hook.py
index 4f97918..8beefb3 100644
--- a/airflow/contrib/hooks/ftp_hook.py
+++ b/airflow/contrib/hooks/ftp_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -250,7 +250,7 @@ class FTPSHook(FTPHook):
             params = self.get_connection(self.ftp_conn_id)
 
             if params.port:
-               ftplib.FTP_TLS.port=params.port
+                ftplib.FTP_TLS.port = params.port
 
             self.conn = ftplib.FTP_TLS(
                 params.host, params.login, params.password

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/gcp_api_base_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/gcp_api_base_hook.py 
b/airflow/contrib/hooks/gcp_api_base_hook.py
index b1c8dc8..852854b 100644
--- a/airflow/contrib/hooks/gcp_api_base_hook.py
+++ b/airflow/contrib/hooks/gcp_api_base_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/gcp_dataflow_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/gcp_dataflow_hook.py 
b/airflow/contrib/hooks/gcp_dataflow_hook.py
index 3354cda..a7c75e1 100644
--- a/airflow/contrib/hooks/gcp_dataflow_hook.py
+++ b/airflow/contrib/hooks/gcp_dataflow_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/gcp_dataproc_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/gcp_dataproc_hook.py 
b/airflow/contrib/hooks/gcp_dataproc_hook.py
index 7849e17..ce65b2b 100644
--- a/airflow/contrib/hooks/gcp_dataproc_hook.py
+++ b/airflow/contrib/hooks/gcp_dataproc_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/gcp_pubsub_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/gcp_pubsub_hook.py 
b/airflow/contrib/hooks/gcp_pubsub_hook.py
index b186174..e5b969e 100644
--- a/airflow/contrib/hooks/gcp_pubsub_hook.py
+++ b/airflow/contrib/hooks/gcp_pubsub_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/gcs_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/gcs_hook.py 
b/airflow/contrib/hooks/gcs_hook.py
index 0d11c12..81fb1c2 100644
--- a/airflow/contrib/hooks/gcs_hook.py
+++ b/airflow/contrib/hooks/gcs_hook.py
@@ -67,8 +67,9 @@ class GoogleCloudStorageHook(GoogleCloudBaseHook):
         """
         destination_bucket = destination_bucket or source_bucket
         destination_object = destination_object or source_object
-        if (source_bucket == destination_bucket and
-            source_object == destination_object):
+        if source_bucket == destination_bucket and \
+                source_object == destination_object:
+
             raise ValueError(
                 'Either source/destination bucket or source/destination object 
'
                 'must be different, not both the same: bucket=%s, object=%s' %
@@ -184,10 +185,16 @@ class GoogleCloudStorageHook(GoogleCloudBaseHook):
         """
         service = self.get_conn()
         media = MediaFileUpload(filename, mime_type)
-        response = service \
-            .objects() \
-            .insert(bucket=bucket, name=object, media_body=media) \
-            .execute()
+        try:
+            service \
+                .objects() \
+                .insert(bucket=bucket, name=object, media_body=media) \
+                .execute()
+            return True
+        except errors.HttpError as ex:
+            if ex.resp['status'] == '404':
+                return False
+            raise
 
     # pylint:disable=redefined-builtin
     def exists(self, bucket, object):
@@ -287,7 +294,8 @@ class GoogleCloudStorageHook(GoogleCloudBaseHook):
         :type versions: boolean
         :param maxResults: max count of items to return in a single page of 
responses
         :type maxResults: integer
-        :param prefix: prefix string which filters objects whose name begin 
with this prefix
+        :param prefix: prefix string which filters objects whose name begin 
with
+            this prefix
         :type prefix: string
         :param delimiter: filters objects based on the delimiter (for e.g 
'.csv')
         :type delimiter: string
@@ -339,7 +347,9 @@ class GoogleCloudStorageHook(GoogleCloudBaseHook):
         :type object: string
 
         """
-        self.log.info('Checking the file size of object: %s in bucket: %s', 
object, bucket)
+        self.log.info('Checking the file size of object: %s in bucket: %s',
+                      object,
+                      bucket)
         service = self.get_conn()
         try:
             response = service.objects().get(

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/jenkins_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/jenkins_hook.py 
b/airflow/contrib/hooks/jenkins_hook.py
index 55e88ba..b1bea30 100644
--- a/airflow/contrib/hooks/jenkins_hook.py
+++ b/airflow/contrib/hooks/jenkins_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/jira_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/jira_hook.py 
b/airflow/contrib/hooks/jira_hook.py
index 37c7414..449b6e5 100644
--- a/airflow/contrib/hooks/jira_hook.py
+++ b/airflow/contrib/hooks/jira_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/qubole_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/qubole_hook.py 
b/airflow/contrib/hooks/qubole_hook.py
index d59fcdf..fb45862 100755
--- a/airflow/contrib/hooks/qubole_hook.py
+++ b/airflow/contrib/hooks/qubole_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -94,7 +94,7 @@ class QuboleHook(BaseHook, LoggingMixin):
                 log = LoggingMixin().log
                 if cmd.status == 'done':
                     log.info('Command ID: %s has been succeeded, hence marking 
this '
-                                'TI as Success.', cmd_id)
+                             'TI as Success.', cmd_id)
                     ti.state = State.SUCCESS
                 elif cmd.status == 'running':
                     log.info('Cancelling the Qubole Command Id: %s', cmd_id)
@@ -188,10 +188,10 @@ class QuboleHook(BaseHook, LoggingMixin):
         inplace_args = None
         tags = set([self.dag_id, self.task_id, context['run_id']])
 
-        for k,v in self.kwargs.items():
+        for k, v in self.kwargs.items():
             if k in COMMAND_ARGS[cmd_type]:
                 if k in HYPHEN_ARGS:
-                    args.append("--{0}={1}".format(k.replace('_', '-'),v))
+                    args.append("--{0}={1}".format(k.replace('_', '-'), v))
                 elif k in POSITIONAL_ARGS:
                     inplace_args = v
                 elif k == 'tags':
@@ -201,12 +201,12 @@ class QuboleHook(BaseHook, LoggingMixin):
                         for val in v:
                             tags.add(val)
                 else:
-                    args.append("--{0}={1}".format(k,v))
+                    args.append("--{0}={1}".format(k, v))
 
             if k == 'notify' and v is True:
                 args.append("--notify")
 
-        args.append("--tags={0}".format(','.join(filter(None,tags))))
+        args.append("--tags={0}".format(','.join(filter(None, tags))))
 
         if inplace_args is not None:
             args += inplace_args.split(' ')

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/redis_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/redis_hook.py 
b/airflow/contrib/hooks/redis_hook.py
index 8060d64..1de75db 100644
--- a/airflow/contrib/hooks/redis_hook.py
+++ b/airflow/contrib/hooks/redis_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/redshift_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/redshift_hook.py 
b/airflow/contrib/hooks/redshift_hook.py
index 914699f..fcce683 100644
--- a/airflow/contrib/hooks/redshift_hook.py
+++ b/airflow/contrib/hooks/redshift_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/salesforce_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/salesforce_hook.py 
b/airflow/contrib/hooks/salesforce_hook.py
index de645e4..ee18b35 100644
--- a/airflow/contrib/hooks/salesforce_hook.py
+++ b/airflow/contrib/hooks/salesforce_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/sftp_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/sftp_hook.py 
b/airflow/contrib/hooks/sftp_hook.py
index bdbf713..33c8b19 100644
--- a/airflow/contrib/hooks/sftp_hook.py
+++ b/airflow/contrib/hooks/sftp_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/slack_webhook_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/slack_webhook_hook.py 
b/airflow/contrib/hooks/slack_webhook_hook.py
index f434b0e..670d401 100644
--- a/airflow/contrib/hooks/slack_webhook_hook.py
+++ b/airflow/contrib/hooks/slack_webhook_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/snowflake_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/snowflake_hook.py 
b/airflow/contrib/hooks/snowflake_hook.py
index 87bbbce..5394d10 100644
--- a/airflow/contrib/hooks/snowflake_hook.py
+++ b/airflow/contrib/hooks/snowflake_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/spark_jdbc_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/spark_jdbc_hook.py 
b/airflow/contrib/hooks/spark_jdbc_hook.py
index b048f04..b55e4ef 100644
--- a/airflow/contrib/hooks/spark_jdbc_hook.py
+++ b/airflow/contrib/hooks/spark_jdbc_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/spark_jdbc_script.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/spark_jdbc_script.py 
b/airflow/contrib/hooks/spark_jdbc_script.py
index 55839c2..8e27f36 100644
--- a/airflow/contrib/hooks/spark_jdbc_script.py
+++ b/airflow/contrib/hooks/spark_jdbc_script.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/spark_sql_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/spark_sql_hook.py 
b/airflow/contrib/hooks/spark_sql_hook.py
index 82ede48..c1fd2ce 100644
--- a/airflow/contrib/hooks/spark_sql_hook.py
+++ b/airflow/contrib/hooks/spark_sql_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -21,7 +21,6 @@ import subprocess
 
 from airflow.hooks.base_hook import BaseHook
 from airflow.exceptions import AirflowException
-from airflow.utils.log.logging_mixin import LoggingMixin
 
 
 class SparkSqlHook(BaseHook):
@@ -34,9 +33,11 @@ class SparkSqlHook(BaseHook):
     :type conf: str (format: PROP=VALUE)
     :param conn_id: connection_id string
     :type conn_id: str
-    :param total_executor_cores: (Standalone & Mesos only) Total cores for all 
executors (Default: all the available cores on the worker)
+    :param total_executor_cores: (Standalone & Mesos only) Total cores for all 
executors
+        (Default: all the available cores on the worker)
     :type total_executor_cores: int
-    :param executor_cores: (Standalone & YARN only) Number of cores per 
executor (Default: 2)
+    :param executor_cores: (Standalone & YARN only) Number of cores per
+        executor (Default: 2)
     :type executor_cores: int
     :param executor_memory: Memory per executor (e.g. 1000M, 2G) (Default: 1G)
     :type executor_memory: str

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/sqoop_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/sqoop_hook.py 
b/airflow/contrib/hooks/sqoop_hook.py
index 95c9668..74cddc2 100644
--- a/airflow/contrib/hooks/sqoop_hook.py
+++ b/airflow/contrib/hooks/sqoop_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -71,7 +71,11 @@ class SqoopHook(BaseHook, LoggingMixin):
         self.verbose = verbose
         self.num_mappers = num_mappers
         self.properties = properties or {}
-        self.log.info("Using connection to: {}:{}/{}".format(self.conn.host, 
self.conn.port, self.conn.schema))
+        self.log.info(
+            "Using connection to: {}:{}/{}".format(
+                self.conn.host, self.conn.port, self.conn.schema
+            )
+        )
 
     def get_conn(self):
         return self.conn
@@ -95,10 +99,11 @@ class SqoopHook(BaseHook, LoggingMixin):
         """
         masked_cmd = ' '.join(self.cmd_mask_password(cmd))
         self.log.info("Executing command: {}".format(masked_cmd))
-        self.sp = subprocess.Popen(cmd,
-                              stdout=subprocess.PIPE,
-                              stderr=subprocess.STDOUT,
-                              **kwargs)
+        self.sp = subprocess.Popen(
+            cmd,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT,
+            **kwargs)
 
         for line in iter(self.sp.stdout):
             self.log.info(line.strip())
@@ -189,7 +194,8 @@ class SqoopHook(BaseHook, LoggingMixin):
         if extra_import_options:
             for key, value in extra_import_options.items():
                 cmd += ['--{}'.format(key)]
-                if value: cmd += [value]
+                if value:
+                    cmd += [value]
 
         return cmd
 
@@ -295,7 +301,8 @@ class SqoopHook(BaseHook, LoggingMixin):
         if extra_export_options:
             for key, value in extra_export_options.items():
                 cmd += ['--{}'.format(key)]
-                if value: cmd += [value]
+                if value:
+                    cmd += [value]
 
         # The required option
         cmd += ["--table", table]

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/ssh_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/ssh_hook.py 
b/airflow/contrib/hooks/ssh_hook.py
index 2183af2..f51f0fb 100755
--- a/airflow/contrib/hooks/ssh_hook.py
+++ b/airflow/contrib/hooks/ssh_hook.py
@@ -10,9 +10,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -54,7 +54,8 @@ class SSHHook(BaseHook, LoggingMixin):
     :type port: int
     :param timeout: timeout for the attempt to connect to the remote_host.
     :type timeout: int
-    :param keepalive_interval: send a keepalive packet to remote host every 
keepalive_interval seconds
+    :param keepalive_interval: send a keepalive packet to remote host every
+        keepalive_interval seconds
     :type keepalive_interval: int
     """
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/vertica_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/vertica_hook.py 
b/airflow/contrib/hooks/vertica_hook.py
index 69b172c..040e91a 100644
--- a/airflow/contrib/hooks/vertica_hook.py
+++ b/airflow/contrib/hooks/vertica_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/hooks/wasb_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/wasb_hook.py 
b/airflow/contrib/hooks/wasb_hook.py
index 30f5f7f..1d73abd 100644
--- a/airflow/contrib/hooks/wasb_hook.py
+++ b/airflow/contrib/hooks/wasb_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/kubernetes/kube_client.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/kubernetes/kube_client.py 
b/airflow/contrib/kubernetes/kube_client.py
index 1d3cc9d..35e8410 100644
--- a/airflow/contrib/kubernetes/kube_client.py
+++ b/airflow/contrib/kubernetes/kube_client.py
@@ -16,6 +16,7 @@
 # under the License.
 from airflow.configuration import conf
 
+
 def _load_kube_config(in_cluster):
     from kubernetes import config, client
     if in_cluster:

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/kubernetes/pod_generator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/kubernetes/pod_generator.py 
b/airflow/contrib/kubernetes/pod_generator.py
index 0f9dabd..6d8d83e 100644
--- a/airflow/contrib/kubernetes/pod_generator.py
+++ b/airflow/contrib/kubernetes/pod_generator.py
@@ -15,8 +15,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-import os
-
 from airflow.contrib.kubernetes.pod import Pod
 import uuid
 from airflow.contrib.kubernetes.volume_mount import VolumeMount  # noqa

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/06b62c42/airflow/contrib/operators/__init__.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/__init__.py 
b/airflow/contrib/operators/__init__.py
index 436878b..3485aaa 100644
--- a/airflow/contrib/operators/__init__.py
+++ b/airflow/contrib/operators/__init__.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -24,7 +24,7 @@
 
 
 import sys
-
+import os as _os
 
 # ------------------------------------------------------------------------
 #
@@ -48,7 +48,6 @@ _operators = {
     'hive_to_dynamodb': ['HiveToDynamoDBTransferOperator']
 }
 
-import os as _os
 if not _os.environ.get('AIRFLOW_USE_NEW_IMPORTS', False):
     from airflow.utils.helpers import AirflowImporter
     airflow_importer = AirflowImporter(sys.modules[__name__], _operators)


Reply via email to