Repository: incubator-airflow
Updated Branches:
  refs/heads/master 5d3242cbc -> 1f0a717b6


[AIRFLOW-2502] Change Single triple quotes to double for docstrings

- Changed single triple quotes to double quote
characters to be consistent with the docstring
convention in PEP 257

Closes #3396 from kaxil/AIRFLOW-2502


Project: http://git-wip-us.apache.org/repos/asf/incubator-airflow/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-airflow/commit/1f0a717b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-airflow/tree/1f0a717b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-airflow/diff/1f0a717b

Branch: refs/heads/master
Commit: 1f0a717b65e0ea7e0127708b084baff0697f0946
Parents: 5d3242c
Author: Kaxil Naik <[email protected]>
Authored: Mon May 21 23:22:35 2018 +0200
Committer: Fokko Driesprong <[email protected]>
Committed: Mon May 21 23:22:35 2018 +0200

----------------------------------------------------------------------
 .../auth/backends/github_enterprise_auth.py     | 12 +++++------
 airflow/contrib/auth/backends/google_auth.py    | 12 +++++------
 airflow/contrib/auth/backends/kerberos_auth.py  | 12 +++++------
 airflow/contrib/auth/backends/ldap_auth.py      | 12 +++++------
 airflow/contrib/auth/backends/password_auth.py  | 12 +++++------
 airflow/contrib/hooks/fs_hook.py                |  4 ++--
 airflow/contrib/hooks/snowflake_hook.py         | 12 +++++------
 airflow/contrib/hooks/vertica_hook.py           |  4 ++--
 airflow/default_login.py                        | 22 ++++++++++----------
 airflow/example_dags/docker_copy_data.py        | 14 +++++++------
 airflow/hooks/postgres_hook.py                  |  8 +++----
 airflow/hooks/samba_hook.py                     |  8 +++----
 airflow/jobs.py                                 |  8 +++----
 airflow/macros/hive.py                          | 12 +++++------
 airflow/models.py                               |  8 +++----
 airflow/operators/check_operator.py             |  8 +++----
 airflow/utils/db.py                             |  4 ++--
 airflow/www/utils.py                            | 20 +++++++++---------
 airflow/www_rbac/decorators.py                  | 12 +++++------
 airflow/www_rbac/utils.py                       |  8 +++----
 tests/contrib/hooks/test_spark_sql_hook.py      |  6 +++---
 21 files changed, 110 insertions(+), 108 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/contrib/auth/backends/github_enterprise_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/github_enterprise_auth.py 
b/airflow/contrib/auth/backends/github_enterprise_auth.py
index a7e6b16..3bbcf35 100644
--- a/airflow/contrib/auth/backends/github_enterprise_auth.py
+++ b/airflow/contrib/auth/backends/github_enterprise_auth.py
@@ -48,27 +48,27 @@ class GHEUser(models.User):
         self.user = user
 
     def is_active(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_authenticated(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_anonymous(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return False
 
     def get_id(self):
-        '''Returns the current user id as required by flask_login'''
+        """Returns the current user id as required by flask_login"""
         return self.user.get_id()
 
     def data_profiling(self):
-        '''Provides access to data profiling tools'''
+        """Provides access to data profiling tools"""
         return True
 
     def is_superuser(self):
-        '''Access all the things'''
+        """Access all the things"""
         return True
 
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/contrib/auth/backends/google_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/google_auth.py 
b/airflow/contrib/auth/backends/google_auth.py
index d7f78a9..d1a3579 100644
--- a/airflow/contrib/auth/backends/google_auth.py
+++ b/airflow/contrib/auth/backends/google_auth.py
@@ -47,27 +47,27 @@ class GoogleUser(models.User):
         self.user = user
 
     def is_active(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_authenticated(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_anonymous(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return False
 
     def get_id(self):
-        '''Returns the current user id as required by flask_login'''
+        """Returns the current user id as required by flask_login"""
         return self.user.get_id()
 
     def data_profiling(self):
-        '''Provides access to data profiling tools'''
+        """Provides access to data profiling tools"""
         return True
 
     def is_superuser(self):
-        '''Access all the things'''
+        """Access all the things"""
         return True
 
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/contrib/auth/backends/kerberos_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/kerberos_auth.py 
b/airflow/contrib/auth/backends/kerberos_auth.py
index 95773cf..08be299 100644
--- a/airflow/contrib/auth/backends/kerberos_auth.py
+++ b/airflow/contrib/auth/backends/kerberos_auth.py
@@ -74,27 +74,27 @@ class KerberosUser(models.User, LoggingMixin):
         return
 
     def is_active(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_authenticated(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_anonymous(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return False
 
     def get_id(self):
-        '''Returns the current user id as required by flask_login'''
+        """Returns the current user id as required by flask_login"""
         return self.user.get_id()
 
     def data_profiling(self):
-        '''Provides access to data profiling tools'''
+        """Provides access to data profiling tools"""
         return True
 
     def is_superuser(self):
-        '''Access all the things'''
+        """Access all the things"""
         return True
 
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/contrib/auth/backends/ldap_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/ldap_auth.py 
b/airflow/contrib/auth/backends/ldap_auth.py
index 8490b25..177a6de 100644
--- a/airflow/contrib/auth/backends/ldap_auth.py
+++ b/airflow/contrib/auth/backends/ldap_auth.py
@@ -237,27 +237,27 @@ class LdapUser(models.User):
             raise AuthenticationError("Invalid username or password")
 
     def is_active(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_authenticated(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_anonymous(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return False
 
     def get_id(self):
-        '''Returns the current user id as required by flask_login'''
+        """Returns the current user id as required by flask_login"""
         return self.user.get_id()
 
     def data_profiling(self):
-        '''Provides access to data profiling tools'''
+        """Provides access to data profiling tools"""
         return self.data_profiler
 
     def is_superuser(self):
-        '''Access all the things'''
+        """Access all the things"""
         return self.superuser
 
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/contrib/auth/backends/password_auth.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/auth/backends/password_auth.py 
b/airflow/contrib/auth/backends/password_auth.py
index 1c5169d..9e16bb6 100644
--- a/airflow/contrib/auth/backends/password_auth.py
+++ b/airflow/contrib/auth/backends/password_auth.py
@@ -72,27 +72,27 @@ class PasswordUser(models.User):
         return check_password_hash(self._password, plaintext)
 
     def is_active(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_authenticated(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_anonymous(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return False
 
     def get_id(self):
-        '''Returns the current user id as required by flask_login'''
+        """Returns the current user id as required by flask_login"""
         return str(self.id)
 
     def data_profiling(self):
-        '''Provides access to data profiling tools'''
+        """Provides access to data profiling tools"""
         return True
 
     def is_superuser(self):
-        '''Access all the things'''
+        """Access all the things"""
         return True
 
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/contrib/hooks/fs_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/fs_hook.py b/airflow/contrib/hooks/fs_hook.py
index d74525f..6832f20 100644
--- a/airflow/contrib/hooks/fs_hook.py
+++ b/airflow/contrib/hooks/fs_hook.py
@@ -22,7 +22,7 @@ from airflow.hooks.base_hook import BaseHook
 
 
 class FSHook(BaseHook):
-    '''
+    """
     Allows for interaction with an file server.
 
     Connection should have a name and a path specified under extra:
@@ -32,7 +32,7 @@ class FSHook(BaseHook):
     Conn Type: File (path)
     Host, Shchema, Login, Password, Port: empty
     Extra: {"path": "/tmp"}
-    '''
+    """
 
     def __init__(self, conn_id='fs_default'):
         conn = self.get_connection(conn_id)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/contrib/hooks/snowflake_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/snowflake_hook.py 
b/airflow/contrib/hooks/snowflake_hook.py
index 5394d10..4027a9a 100644
--- a/airflow/contrib/hooks/snowflake_hook.py
+++ b/airflow/contrib/hooks/snowflake_hook.py
@@ -41,10 +41,10 @@ class SnowflakeHook(DbApiHook):
         self.database = kwargs.pop("database", None)
 
     def _get_conn_params(self):
-        '''
+        """
         one method to fetch connection params as a dict
         used in get_uri() and get_connection()
-        '''
+        """
         conn = self.get_connection(self.snowflake_conn_id)
         account = conn.extra_dejson.get('account', None)
         warehouse = conn.extra_dejson.get('warehouse', None)
@@ -61,9 +61,9 @@ class SnowflakeHook(DbApiHook):
         return conn_config
 
     def get_uri(self):
-        '''
+        """
         override DbApiHook get_uri method for get_sqlalchemy_engine()
-        '''
+        """
         conn_config = self._get_conn_params()
         uri = 'snowflake://{user}:{password}@{account}/{database}/'
         uri += '{schema}?warehouse={warehouse}'
@@ -79,12 +79,12 @@ class SnowflakeHook(DbApiHook):
         return conn
 
     def _get_aws_credentials(self):
-        '''
+        """
         returns aws_access_key_id, aws_secret_access_key
         from extra
 
         intended to be used by external import and export statements
-        '''
+        """
         if self.snowflake_conn_id:
             connection_object = self.get_connection(self.snowflake_conn_id)
             if 'aws_secret_access_key' in connection_object.extra_dejson:

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/contrib/hooks/vertica_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/vertica_hook.py 
b/airflow/contrib/hooks/vertica_hook.py
index 040e91a..f3411de 100644
--- a/airflow/contrib/hooks/vertica_hook.py
+++ b/airflow/contrib/hooks/vertica_hook.py
@@ -24,9 +24,9 @@ from airflow.hooks.dbapi_hook import DbApiHook
 
 
 class VerticaHook(DbApiHook):
-    '''
+    """
     Interact with Vertica.
-    '''
+    """
 
     conn_name_attr = 'vertica_conn_id'
     default_conn_name = 'vertica_default'

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/default_login.py
----------------------------------------------------------------------
diff --git a/airflow/default_login.py b/airflow/default_login.py
index e6b1c05..d44dbf3 100644
--- a/airflow/default_login.py
+++ b/airflow/default_login.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -17,12 +17,12 @@
 # specific language governing permissions and limitations
 # under the License.
 #
-'''
+"""
 Override this file to handle your authenticating / login.
 
 Copy and alter this file and put in your PYTHONPATH as airflow_login.py,
 the new module will override this one.
-'''
+"""
 
 import flask_login
 from flask_login import login_required, current_user, logout_user
@@ -45,27 +45,27 @@ class DefaultUser(object):
         self.user = user
 
     def is_active(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_authenticated(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return True
 
     def is_anonymous(self):
-        '''Required by flask_login'''
+        """Required by flask_login"""
         return False
 
     def data_profiling(self):
-        '''Provides access to data profiling tools'''
+        """Provides access to data profiling tools"""
         return True
 
     def is_superuser(self):
-        '''Access all the things'''
+        """Access all the things"""
         return True
 
-#models.User = User  # hack!
-#del User
+# models.User = User  # hack!
+# del User
 
 
 @login_manager.user_loader

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/example_dags/docker_copy_data.py
----------------------------------------------------------------------
diff --git a/airflow/example_dags/docker_copy_data.py 
b/airflow/example_dags/docker_copy_data.py
index 87d1174..9f5ecb8 100644
--- a/airflow/example_dags/docker_copy_data.py
+++ b/airflow/example_dags/docker_copy_data.py
@@ -7,20 +7,22 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-'''
-This sample "listen to directory". move the new file and print it, using 
docker-containers.
-The following operators are being used: DockerOperator, BashOperator & 
ShortCircuitOperator.
+"""
+This sample "listen to directory". move the new file and print it,
+using docker-containers.
+The following operators are being used: DockerOperator, BashOperator &
+ShortCircuitOperator.
 TODO: Review the workflow, change it accordingly to to your environment & 
enable the code.
-'''
+"""
 
 # from __future__ import print_function
 #

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/hooks/postgres_hook.py
----------------------------------------------------------------------
diff --git a/airflow/hooks/postgres_hook.py b/airflow/hooks/postgres_hook.py
index 907e6b2..7e89d93 100644
--- a/airflow/hooks/postgres_hook.py
+++ b/airflow/hooks/postgres_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -60,10 +60,10 @@ class PostgresHook(DbApiHook):
         return self.conn
 
     def copy_expert(self, sql, filename, open=open):
-        '''
+        """
         Executes SQL using psycopg2 copy_expert method
         Necessary to execute COPY command without access to a superuser
-        '''
+        """
         f = open(filename, 'w')
         with closing(self.get_conn()) as conn:
             with closing(conn.cursor()) as cur:

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/hooks/samba_hook.py
----------------------------------------------------------------------
diff --git a/airflow/hooks/samba_hook.py b/airflow/hooks/samba_hook.py
index bfcecd3..0820500 100644
--- a/airflow/hooks/samba_hook.py
+++ b/airflow/hooks/samba_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -24,9 +24,9 @@ from airflow.hooks.base_hook import BaseHook
 
 
 class SambaHook(BaseHook):
-    '''
+    """
     Allows for interaction with an samba server.
-    '''
+    """
 
     def __init__(self, samba_conn_id):
         self.conn = self.get_connection(samba_conn_id)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/jobs.py
----------------------------------------------------------------------
diff --git a/airflow/jobs.py b/airflow/jobs.py
index 7f4f470..9d80a79 100644
--- a/airflow/jobs.py
+++ b/airflow/jobs.py
@@ -132,16 +132,16 @@ class BaseJob(Base, LoggingMixin):
         raise AirflowException("Job shut down externally.")
 
     def on_kill(self):
-        '''
+        """
         Will be called when an external kill command is received
-        '''
+        """
         pass
 
     def heartbeat_callback(self, session=None):
         pass
 
     def heartbeat(self):
-        '''
+        """
         Heartbeats update the job's entry in the database with a timestamp
         for the latest_heartbeat and allows for the job to be killed
         externally. This allows at the system level to monitor what is
@@ -158,7 +158,7 @@ class BaseJob(Base, LoggingMixin):
         will sleep 50 seconds to complete the 60 seconds and keep a steady
         heart rate. If you go over 60 seconds before calling it, it won't
         sleep at all.
-        '''
+        """
         with create_session() as session:
             job = session.query(BaseJob).filter_by(id=self.id).one()
             make_transient(job)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/macros/hive.py
----------------------------------------------------------------------
diff --git a/airflow/macros/hive.py b/airflow/macros/hive.py
index 39c13a8..bb60203 100644
--- a/airflow/macros/hive.py
+++ b/airflow/macros/hive.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -56,7 +56,7 @@ def max_partition(
 
 
 def _closest_date(target_dt, date_list, before_target=None):
-    '''
+    """
     This function finds the date in a list closest to the target date.
     An optional parameter can be given to get the closest before or after.
 
@@ -68,7 +68,7 @@ def _closest_date(target_dt, date_list, before_target=None):
     :type before_target: bool or None
     :returns: The closest date
     :rtype: datetime.date or None
-    '''
+    """
     fb = lambda d: target_dt - d if d <= target_dt else datetime.timedelta.max
     fa = lambda d: d - target_dt if d >= target_dt else datetime.timedelta.max
     fnone = lambda d: target_dt - d if d < target_dt else d - target_dt
@@ -83,7 +83,7 @@ def _closest_date(target_dt, date_list, before_target=None):
 def closest_ds_partition(
         table, ds, before=True, schema="default",
         metastore_conn_id='metastore_default'):
-    '''
+    """
     This function finds the date in a list closest to the target date.
     An optional parameter can be given to get the closest before or after.
 
@@ -99,7 +99,7 @@ def closest_ds_partition(
     >>> tbl = 'airflow.static_babynames_partitioned'
     >>> closest_ds_partition(tbl, '2015-01-02')
     '2015-01-01'
-    '''
+    """
     from airflow.hooks.hive_hooks import HiveMetastoreHook
     if '.' in table:
         schema, table = table.split('.')

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/models.py
----------------------------------------------------------------------
diff --git a/airflow/models.py b/airflow/models.py
index 4c1be8e..bcd12fb 100755
--- a/airflow/models.py
+++ b/airflow/models.py
@@ -4170,10 +4170,10 @@ class DAG(BaseDag, LoggingMixin):
         return qry.scalar()
 
     def test_cycle(self):
-        '''
+        """
         Check to see if there are any cycles in the DAG. Returns False if no 
cycle found,
         otherwise raises exception.
-        '''
+        """
 
         # default of int is 0 which corresponds to CYCLE_NEW
         visit_map = defaultdict(int)
@@ -4184,9 +4184,9 @@ class DAG(BaseDag, LoggingMixin):
         return False
 
     def _test_cycle_helper(self, visit_map, task_id):
-        '''
+        """
         Checks if a cycle exists from the input task using DFS traversal
-        '''
+        """
 
         # print('Inspecting %s' % task_id)
         if visit_map[task_id] == DagBag.CYCLE_DONE:

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/operators/check_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/check_operator.py 
b/airflow/operators/check_operator.py
index fc9dc16..6a2cbb0 100644
--- a/airflow/operators/check_operator.py
+++ b/airflow/operators/check_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -91,13 +91,13 @@ class CheckOperator(BaseOperator):
 
 
 def _convert_to_float_if_possible(s):
-    '''
+    """
     A small helper function to convert a string to a numeric value
     if appropriate
 
     :param s: the string to be converted
     :type s: str
-    '''
+    """
     try:
         ret = float(s)
     except (ValueError, TypeError):

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/utils/db.py
----------------------------------------------------------------------
diff --git a/airflow/utils/db.py b/airflow/utils/db.py
index 270939a..6604a13 100644
--- a/airflow/utils/db.py
+++ b/airflow/utils/db.py
@@ -343,9 +343,9 @@ def upgradedb():
 
 
 def resetdb(rbac):
-    '''
+    """
     Clear out the database
-    '''
+    """
     from airflow import models
 
     # alembic adds significant import time, so we import it lazily

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/www/utils.py
----------------------------------------------------------------------
diff --git a/airflow/www/utils.py b/airflow/www/utils.py
index 4a3ac2e..755a9e2 100644
--- a/airflow/www/utils.py
+++ b/airflow/www/utils.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -241,9 +241,9 @@ def epoch(dttm):
 
 
 def action_logging(f):
-    '''
+    """
     Decorator to log user actions
-    '''
+    """
     @functools.wraps(f)
     def wrapper(*args, **kwargs):
         if current_user and hasattr(current_user, 'username'):
@@ -272,9 +272,9 @@ def action_logging(f):
 
 
 def notify_owner(f):
-    '''
+    """
     Decorator to notify owner of actions taken on their DAGs by others
-    '''
+    """
     @functools.wraps(f)
     def wrapper(*args, **kwargs):
         """
@@ -329,9 +329,9 @@ def json_response(obj):
 
 
 def gzipped(f):
-    '''
+    """
     Decorator to make a view compressed
-    '''
+    """
     @functools.wraps(f)
     def view_func(*args, **kwargs):
         @after_this_request
@@ -366,9 +366,9 @@ def gzipped(f):
 
 
 def make_cache_key(*args, **kwargs):
-    '''
+    """
     Used by cache to get a unique key per URL
-    '''
+    """
     path = request.path
     args = str(hash(frozenset(request.args.items())))
     return (path + args).encode('ascii', 'ignore')

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/www_rbac/decorators.py
----------------------------------------------------------------------
diff --git a/airflow/www_rbac/decorators.py b/airflow/www_rbac/decorators.py
index 180b8a8..2dd1af4 100644
--- a/airflow/www_rbac/decorators.py
+++ b/airflow/www_rbac/decorators.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -26,9 +26,9 @@ from airflow import models, settings
 
 
 def action_logging(f):
-    '''
+    """
     Decorator to log user actions
-    '''
+    """
     @functools.wraps(f)
     def wrapper(*args, **kwargs):
         session = settings.Session()
@@ -58,9 +58,9 @@ def action_logging(f):
 
 
 def gzipped(f):
-    '''
+    """
     Decorator to make a view compressed
-    '''
+    """
     @functools.wraps(f)
     def view_func(*args, **kwargs):
         @after_this_request

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/airflow/www_rbac/utils.py
----------------------------------------------------------------------
diff --git a/airflow/www_rbac/utils.py b/airflow/www_rbac/utils.py
index dfba080..7bbdada 100644
--- a/airflow/www_rbac/utils.py
+++ b/airflow/www_rbac/utils.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -198,9 +198,9 @@ def json_response(obj):
 
 
 def make_cache_key(*args, **kwargs):
-    '''
+    """
     Used by cache to get a unique key per URL
-    '''
+    """
     path = request.path
     args = str(hash(frozenset(request.args.items())))
     return (path + args).encode('ascii', 'ignore')

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1f0a717b/tests/contrib/hooks/test_spark_sql_hook.py
----------------------------------------------------------------------
diff --git a/tests/contrib/hooks/test_spark_sql_hook.py 
b/tests/contrib/hooks/test_spark_sql_hook.py
index 11b5161..c95ee3f 100644
--- a/tests/contrib/hooks/test_spark_sql_hook.py
+++ b/tests/contrib/hooks/test_spark_sql_hook.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -32,7 +32,7 @@ from airflow.contrib.hooks.spark_sql_hook import SparkSqlHook
 
 
 def get_after(sentinel, iterable):
-    "Get the value after `sentinel` in an `iterable`"
+    """Get the value after `sentinel` in an `iterable`"""
     truncated = dropwhile(lambda el: el != sentinel, iterable)
     next(truncated)
     return next(truncated)

Reply via email to