Repository: incubator-airflow
Updated Branches:
  refs/heads/master 48fccefab -> cff8318b9


[AIRFLOW-2429] Fix operators folder flake8 error

Closes #3481 from feng-tao/flake8_p6


Project: http://git-wip-us.apache.org/repos/asf/incubator-airflow/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-airflow/commit/cff8318b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-airflow/tree/cff8318b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-airflow/diff/cff8318b

Branch: refs/heads/master
Commit: cff8318b9907416545c7bda517eb69ad3ccb6f4d
Parents: 48fccef
Author: Tao feng <tf...@lyft.com>
Authored: Tue Jun 12 22:47:31 2018 +0200
Committer: Fokko Driesprong <fokkodriespr...@godatadriven.com>
Committed: Tue Jun 12 22:47:31 2018 +0200

----------------------------------------------------------------------
 airflow/operators/__init__.py                   |  5 +-
 airflow/operators/check_operator.py             |  2 +-
 airflow/operators/docker_operator.py            | 44 ++++++-----
 airflow/operators/dummy_operator.py             |  4 +-
 airflow/operators/generic_transfer.py           |  5 +-
 airflow/operators/hive_operator.py              |  5 +-
 airflow/operators/hive_stats_operator.py        |  4 +-
 airflow/operators/hive_to_druid.py              | 18 +++--
 airflow/operators/hive_to_mysql.py              |  9 ++-
 airflow/operators/hive_to_samba_operator.py     |  5 +-
 airflow/operators/http_operator.py              |  4 +-
 airflow/operators/jdbc_operator.py              |  5 +-
 airflow/operators/latest_only_operator.py       |  5 +-
 airflow/operators/mssql_operator.py             |  5 +-
 airflow/operators/mssql_to_hive.py              |  7 +-
 airflow/operators/mysql_operator.py             |  5 +-
 airflow/operators/mysql_to_hive.py              |  4 +-
 airflow/operators/oracle_operator.py            |  5 +-
 airflow/operators/pig_operator.py               |  5 +-
 airflow/operators/postgres_operator.py          |  4 +-
 airflow/operators/presto_check_operator.py      |  7 +-
 airflow/operators/presto_to_mysql.py            |  5 +-
 airflow/operators/python_operator.py            | 77 ++++++++++++--------
 airflow/operators/redshift_to_s3_operator.py    |  5 +-
 airflow/operators/s3_file_transform_operator.py |  4 +-
 airflow/operators/s3_to_hive_operator.py        | 16 ++--
 airflow/operators/s3_to_redshift_operator.py    |  5 +-
 airflow/operators/sensors.py                    |  6 +-
 airflow/operators/slack_operator.py             | 19 +++--
 airflow/operators/sqlite_operator.py            |  5 +-
 30 files changed, 173 insertions(+), 126 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/__init__.py
----------------------------------------------------------------------
diff --git a/airflow/operators/__init__.py b/airflow/operators/__init__.py
index 003b855..efdfd3e 100644
--- a/airflow/operators/__init__.py
+++ b/airflow/operators/__init__.py
@@ -7,16 +7,15 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
 
 import sys
 import os

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/check_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/check_operator.py 
b/airflow/operators/check_operator.py
index 6a2cbb0..5a31737 100644
--- a/airflow/operators/check_operator.py
+++ b/airflow/operators/check_operator.py
@@ -217,7 +217,7 @@ class IntervalCheckOperator(BaseOperator):
         sqlt = ("SELECT {sqlexp} FROM {table}"
                 " WHERE {date_filter_column}=").format(**locals())
         self.sql1 = sqlt + "'{{ ds }}'"
-        self.sql2 = sqlt + "'{{ macros.ds_add(ds, "+str(self.days_back)+") }}'"
+        self.sql2 = sqlt + "'{{ macros.ds_add(ds, " + str(self.days_back) + ") 
}}'"
 
     def execute(self, context=None):
         hook = self.get_db_hook()

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/docker_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/docker_operator.py 
b/airflow/operators/docker_operator.py
index 421ce49..69dc1eb 100644
--- a/airflow/operators/docker_operator.py
+++ b/airflow/operators/docker_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -32,9 +32,11 @@ class DockerOperator(BaseOperator):
     """
     Execute a command inside a docker container.
 
-    A temporary directory is created on the host and mounted into a container 
to allow storing files
-    that together exceed the default disk size of 10GB in a container. The 
path to the mounted
-    directory can be accessed via the environment variable ``AIRFLOW_TMP_DIR``.
+    A temporary directory is created on the host and
+    mounted into a container to allow storing files
+    that together exceed the default disk size of 10GB in a container.
+    The path to the mounted directory can be accessed
+    via the environment variable ``AIRFLOW_TMP_DIR``.
 
     If a login to a private registry is required prior to pulling the image, a
     Docker connection needs to be configured in Airflow and the connection ID
@@ -58,37 +60,42 @@ class DockerOperator(BaseOperator):
     :type environment: dict
     :param force_pull: Pull the docker image on every run. Default is false.
     :type force_pull: bool
-    :param mem_limit: Maximum amount of memory the container can use. Either a 
float value, which
-        represents the limit in bytes, or a string like ``128m`` or ``1g``.
+    :param mem_limit: Maximum amount of memory the container can use.
+        Either a float value, which represents the limit in bytes,
+        or a string like ``128m`` or ``1g``.
     :type mem_limit: float or str
     :param network_mode: Network mode for the container.
     :type network_mode: str
-    :param tls_ca_cert: Path to a PEM-encoded certificate authority to secure 
the docker connection.
+    :param tls_ca_cert: Path to a PEM-encoded certificate authority
+        to secure the docker connection.
     :type tls_ca_cert: str
-    :param tls_client_cert: Path to the PEM-encoded certificate used to 
authenticate docker client.
+    :param tls_client_cert: Path to the PEM-encoded certificate
+        used to authenticate docker client.
     :type tls_client_cert: str
     :param tls_client_key: Path to the PEM-encoded key used to authenticate 
docker client.
     :type tls_client_key: str
-    :param tls_hostname: Hostname to match against the docker server 
certificate or False to
-        disable the check.
+    :param tls_hostname: Hostname to match against
+        the docker server certificate or False to disable the check.
     :type tls_hostname: str or bool
     :param tls_ssl_version: Version of SSL to use when communicating with 
docker daemon.
     :type tls_ssl_version: str
-    :param tmp_dir: Mount point inside the container to a temporary directory 
created on the host by
-        the operator. The path is also made available via the environment 
variable
+    :param tmp_dir: Mount point inside the container to
+        a temporary directory created on the host by the operator.
+        The path is also made available via the environment variable
         ``AIRFLOW_TMP_DIR`` inside the container.
     :type tmp_dir: str
     :param user: Default user inside the docker container.
     :type user: int or str
     :param volumes: List of volumes to mount into the container, e.g.
         ``['/host/path:/container/path', '/host/path2:/container/path2:ro']``.
-    :param working_dir: Working directory to set on the container (equivalent 
to the -w switch
-        the docker client)
+    :param working_dir: Working directory to
+        set on the container (equivalent to the -w switch the docker client)
     :type working_dir: str
     :param xcom_push: Does the stdout will be pushed to the next step using 
XCom.
-           The default is False.
+        The default is False.
     :type xcom_push: bool
-    :param xcom_all: Push all the stdout or just the last line. The default is 
False (last line).
+    :param xcom_all: Push all the stdout or just the last line.
+        The default is False (last line).
     :type xcom_all: bool
     :param docker_conn_id: ID of the Airflow connection to use
     :type docker_conn_id: str
@@ -216,7 +223,8 @@ class DockerOperator(BaseOperator):
                 raise AirflowException('docker container failed')
 
             if self.xcom_push_flag:
-                return self.cli.logs(container=self.container['Id']) if 
self.xcom_all else str(line)
+                return self.cli.logs(container=self.container['Id']) \
+                    if self.xcom_all else str(line)
 
     def get_command(self):
         if self.command is not None and self.command.strip().find('[') == 0:

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/dummy_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/dummy_operator.py 
b/airflow/operators/dummy_operator.py
index 5a5c616..025a242 100644
--- a/airflow/operators/dummy_operator.py
+++ b/airflow/operators/dummy_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/generic_transfer.py
----------------------------------------------------------------------
diff --git a/airflow/operators/generic_transfer.py 
b/airflow/operators/generic_transfer.py
index 8e0684d..7b1a64e 100644
--- a/airflow/operators/generic_transfer.py
+++ b/airflow/operators/generic_transfer.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
 from airflow.hooks.base_hook import BaseHook

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/hive_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/hive_operator.py 
b/airflow/operators/hive_operator.py
index c3b248a..bd72703 100644
--- a/airflow/operators/hive_operator.py
+++ b/airflow/operators/hive_operator.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from __future__ import unicode_literals
 
 import re

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/hive_stats_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/hive_stats_operator.py 
b/airflow/operators/hive_stats_operator.py
index 721149c..fe83284 100644
--- a/airflow/operators/hive_stats_operator.py
+++ b/airflow/operators/hive_stats_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/hive_to_druid.py
----------------------------------------------------------------------
diff --git a/airflow/operators/hive_to_druid.py 
b/airflow/operators/hive_to_druid.py
index c188ef1..9b4a881 100644
--- a/airflow/operators/hive_to_druid.py
+++ b/airflow/operators/hive_to_druid.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.hooks.hive_hooks import HiveCliHook, HiveMetastoreHook
 from airflow.hooks.druid_hook import DruidHook
 from airflow.models import BaseOperator
@@ -100,7 +101,9 @@ class HiveToDruidTransfer(BaseOperator):
         self.log.info("Extracting data from Hive")
         hive_table = 'druid.' + context['task_instance_key_str'].replace('.', 
'_')
         sql = self.sql.strip().strip(';')
-        tblproperties = ''.join([", '{}' = '{}'".format(k, v) for k, v in 
self.hive_tblproperties.items()])
+        tblproperties = ''.join([", '{}' = '{}'"
+                                .format(k, v)
+                                 for k, v in self.hive_tblproperties.items()])
         hql = """\
         SET mapred.output.compress=false;
         SET hive.exec.compress.output=false;
@@ -159,7 +162,8 @@ class HiveToDruidTransfer(BaseOperator):
         :type columns: list
         """
 
-        # backward compatibilty for num_shards, but target_partition_size is 
the default setting
+        # backward compatibilty for num_shards,
+        # but target_partition_size is the default setting
         # and overwrites the num_shards
         num_shards = self.num_shards
         target_partition_size = self.target_partition_size
@@ -171,7 +175,8 @@ class HiveToDruidTransfer(BaseOperator):
 
         metric_names = [m['fieldName'] for m in self.metric_spec if m['type'] 
!= 'count']
 
-        # Take all the columns, which are not the time dimension or a metric, 
as the dimension columns
+        # Take all the columns, which are not the time dimension
+        # or a metric, as the dimension columns
         dimensions = [c for c in columns if c not in metric_names and c != 
self.ts_dim]
 
         ingest_query_dict = {
@@ -227,6 +232,7 @@ class HiveToDruidTransfer(BaseOperator):
         }
 
         if self.hadoop_dependency_coordinates:
-            ingest_query_dict['hadoopDependencyCoordinates'] = 
self.hadoop_dependency_coordinates
+            ingest_query_dict['hadoopDependencyCoordinates'] \
+                = self.hadoop_dependency_coordinates
 
         return ingest_query_dict

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/hive_to_mysql.py
----------------------------------------------------------------------
diff --git a/airflow/operators/hive_to_mysql.py 
b/airflow/operators/hive_to_mysql.py
index 730bfb7..4dc25a6 100644
--- a/airflow/operators/hive_to_mysql.py
+++ b/airflow/operators/hive_to_mysql.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.hooks.hive_hooks import HiveServer2Hook
 from airflow.hooks.mysql_hook import MySqlHook
 from airflow.models import BaseOperator
@@ -56,7 +57,7 @@ class HiveToMySqlTransfer(BaseOperator):
     """
 
     template_fields = ('sql', 'mysql_table', 'mysql_preoperator',
-        'mysql_postoperator')
+                       'mysql_postoperator')
     template_ext = ('.sql',)
     ui_color = '#a0e08c'
 
@@ -87,7 +88,7 @@ class HiveToMySqlTransfer(BaseOperator):
         if self.bulk_load:
             tmpfile = NamedTemporaryFile()
             hive.to_csv(self.sql, tmpfile.name, delimiter='\t',
-                lineterminator='\n', output_header=False)
+                        lineterminator='\n', output_header=False)
         else:
             results = hive.get_records(self.sql)
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/hive_to_samba_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/hive_to_samba_operator.py 
b/airflow/operators/hive_to_samba_operator.py
index ef9d810..f6978ac 100644
--- a/airflow/operators/hive_to_samba_operator.py
+++ b/airflow/operators/hive_to_samba_operator.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 import tempfile
 
 from airflow.hooks.hive_hooks import HiveServer2Hook

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/http_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/http_operator.py 
b/airflow/operators/http_operator.py
index 0507431..2cfc9c0 100644
--- a/airflow/operators/http_operator.py
+++ b/airflow/operators/http_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/jdbc_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/jdbc_operator.py 
b/airflow/operators/jdbc_operator.py
index 67fd84d..9e7f24d 100644
--- a/airflow/operators/jdbc_operator.py
+++ b/airflow/operators/jdbc_operator.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.hooks.jdbc_hook import JdbcHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/latest_only_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/latest_only_operator.py 
b/airflow/operators/latest_only_operator.py
index 9362f85..1139774 100644
--- a/airflow/operators/latest_only_operator.py
+++ b/airflow/operators/latest_only_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -17,7 +17,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-
 from airflow.models import BaseOperator, SkipMixin
 from airflow.utils import timezone
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/mssql_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/mssql_operator.py 
b/airflow/operators/mssql_operator.py
index 9dd7cf6..1309be9 100644
--- a/airflow/operators/mssql_operator.py
+++ b/airflow/operators/mssql_operator.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.hooks.mssql_hook import MsSqlHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/mssql_to_hive.py
----------------------------------------------------------------------
diff --git a/airflow/operators/mssql_to_hive.py 
b/airflow/operators/mssql_to_hive.py
index e459fd2..4dff9eb 100644
--- a/airflow/operators/mssql_to_hive.py
+++ b/airflow/operators/mssql_to_hive.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -121,7 +121,8 @@ class MsSqlToHiveTransfer(BaseOperator):
             for field in cursor.description:
                 col_count += 1
                 col_position = "Column{position}".format(position=col_count)
-                field_dict[col_position if field[0] == '' else field[0]] = 
self.type_map(field[1])
+                field_dict[col_position if field[0] == '' else field[0]] \
+                    = self.type_map(field[1])
             csv_writer.writerows(cursor)
             f.flush()
             cursor.close()

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/mysql_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/mysql_operator.py 
b/airflow/operators/mysql_operator.py
index f7c94bf..2b940c7 100644
--- a/airflow/operators/mysql_operator.py
+++ b/airflow/operators/mysql_operator.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.hooks.mysql_hook import MySqlHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/mysql_to_hive.py
----------------------------------------------------------------------
diff --git a/airflow/operators/mysql_to_hive.py 
b/airflow/operators/mysql_to_hive.py
index 34b5337..94d6608 100644
--- a/airflow/operators/mysql_to_hive.py
+++ b/airflow/operators/mysql_to_hive.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/oracle_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/oracle_operator.py 
b/airflow/operators/oracle_operator.py
index c08908b..84820c0 100644
--- a/airflow/operators/oracle_operator.py
+++ b/airflow/operators/oracle_operator.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.hooks.oracle_hook import OracleHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/pig_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/pig_operator.py 
b/airflow/operators/pig_operator.py
index f7babcf..d22f19f 100644
--- a/airflow/operators/pig_operator.py
+++ b/airflow/operators/pig_operator.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 import re
 
 from airflow.hooks.pig_hook import PigCliHook

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/postgres_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/postgres_operator.py 
b/airflow/operators/postgres_operator.py
index 5935f83..5ff6e9e 100644
--- a/airflow/operators/postgres_operator.py
+++ b/airflow/operators/postgres_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/presto_check_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/presto_check_operator.py 
b/airflow/operators/presto_check_operator.py
index 5caf87e..608aebf 100644
--- a/airflow/operators/presto_check_operator.py
+++ b/airflow/operators/presto_check_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,7 +18,8 @@
 # under the License.
 
 from airflow.hooks.presto_hook import PrestoHook
-from airflow.operators.check_operator import CheckOperator, 
ValueCheckOperator, IntervalCheckOperator
+from airflow.operators.check_operator import CheckOperator, \
+    ValueCheckOperator, IntervalCheckOperator
 from airflow.utils.decorators import apply_defaults
 
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/presto_to_mysql.py
----------------------------------------------------------------------
diff --git a/airflow/operators/presto_to_mysql.py 
b/airflow/operators/presto_to_mysql.py
index 0e2e7f7..8c621b5 100644
--- a/airflow/operators/presto_to_mysql.py
+++ b/airflow/operators/presto_to_mysql.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.hooks.presto_hook import PrestoHook
 from airflow.hooks.mysql_hook import MySqlHook
 from airflow.models import BaseOperator

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/python_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/python_operator.py 
b/airflow/operators/python_operator.py
index 497bb76..88f3b1a 100644
--- a/airflow/operators/python_operator.py
+++ b/airflow/operators/python_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -164,14 +164,16 @@ class ShortCircuitOperator(PythonOperator, SkipMixin):
 
         self.log.info("Done.")
 
+
 class PythonVirtualenvOperator(PythonOperator):
     """
     Allows one to run a function in a virtualenv that is created and destroyed
     automatically (with certain caveats).
 
-    The function must be defined using def, and not be part of a class. All 
imports
-    must happen inside the function and no variables outside of the scope may 
be referenced.
-    A global scope variable named virtualenv_string_args will be available 
(populated by
+    The function must be defined using def, and not be
+    part of a class. All imports must happen inside the function
+    and no variables outside of the scope may be referenced. A global scope
+    variable named virtualenv_string_args will be available (populated by
     string_args). In addition, one can pass stuff through op_args and 
op_kwargs, and one
     can use a return value.
 
@@ -186,10 +188,12 @@ class PythonVirtualenvOperator(PythonOperator):
     :param python_version: The Python version to run the virtualenv with. Note 
that
         both 2 and 2.7 are acceptable forms.
     :type python_version: str
-    :param use_dill: Whether to use dill to serialize the args and result 
(pickle is default).
-        This allow more complex types but requires you to include dill in your 
requirements.
+    :param use_dill: Whether to use dill to serialize
+        the args and result (pickle is default). This allow more complex types
+        but requires you to include dill in your requirements.
     :type use_dill: bool
-    :param system_site_packages: Whether to include system_site_packages in 
your virtualenv.
+    :param system_site_packages: Whether to include
+        system_site_packages in your virtualenv.
         See virtualenv documentation for more information.
     :type system_site_packages: bool
     :param op_args: A list of positional arguments to pass to python_callable.
@@ -209,8 +213,11 @@ class PythonVirtualenvOperator(PythonOperator):
         processing templated fields, for examples ``['.sql', '.hql']``
     :type templates_exts: list(str)
     """
-    def __init__(self, python_callable, requirements=None, 
python_version=None, use_dill=False,
-                 system_site_packages=True, op_args=None, op_kwargs=None, 
string_args=None,
+    def __init__(self, python_callable,
+                 requirements=None,
+                 python_version=None, use_dill=False,
+                 system_site_packages=True,
+                 op_args=None, op_kwargs=None, string_args=None,
                  templates_dict=None, templates_exts=None, *args, **kwargs):
         super(PythonVirtualenvOperator, self).__init__(
             python_callable=python_callable,
@@ -227,22 +234,25 @@ class PythonVirtualenvOperator(PythonOperator):
         self.use_dill = use_dill
         self.system_site_packages = system_site_packages
         # check that dill is present if needed
-        dill_in_requirements = map(lambda x: x.lower().startswith('dill'), 
self.requirements)
+        dill_in_requirements = map(lambda x: x.lower().startswith('dill'),
+                                   self.requirements)
         if (not system_site_packages) and use_dill and not 
any(dill_in_requirements):
             raise AirflowException('If using dill, dill must be in the 
environment ' +
                                    'either via system_site_packages or 
requirements')
         # check that a function is passed, and that it is not a lambda
-        if (not isinstance(self.python_callable, types.FunctionType)
-                or self.python_callable.__name__ == (lambda x: 0).__name__):
+        if (not isinstance(self.python_callable,
+                           types.FunctionType) or 
(self.python_callable.__name__ ==
+                                                   (lambda x: 0).__name__)):
             raise AirflowException('{} only supports functions for 
python_callable arg',
                                    self.__class__.__name__)
         # check that args are passed iff python major version matches
-        if (python_version is not None
-                and str(python_version)[0] != str(sys.version_info[0])
-                and self._pass_op_args()):
+        if (python_version is not None and
+                str(python_version)[0] != str(sys.version_info[0]) and
+                self._pass_op_args()):
             raise AirflowException("Passing op_args or op_kwargs is not 
supported across "
                                    "different Python major versions "
-                                   "for PythonVirtualenvOperator. Please use 
string_args.")
+                                   "for PythonVirtualenvOperator. "
+                                   "Please use string_args.")
 
     def execute_callable(self):
         with TemporaryDirectory(prefix='venv') as tmp_dir:
@@ -314,8 +324,9 @@ class PythonVirtualenvOperator(PythonOperator):
                 else:
                     return pickle.load(f)
             except ValueError:
-                self.log.error("Error deserializing result. Note that result 
deserialization "
-                              "is not supported across major Python versions.")
+                self.log.error("Error deserializing result. "
+                               "Note that result deserialization "
+                               "is not supported across major Python 
versions.")
                 raise
 
     def _write_script(self, script_filename):
@@ -340,9 +351,11 @@ class PythonVirtualenvOperator(PythonOperator):
             cmd = ['{}/bin/pip'.format(tmp_dir), 'install']
             return cmd + self.requirements
 
-    def _generate_python_cmd(self, tmp_dir, script_filename, input_filename, 
output_filename, string_args_filename):
+    def _generate_python_cmd(self, tmp_dir, script_filename,
+                             input_filename, output_filename, 
string_args_filename):
         # direct path alleviates need to activate
-        return ['{}/bin/python'.format(tmp_dir), script_filename, 
input_filename, output_filename, string_args_filename]
+        return ['{}/bin/python'.format(tmp_dir), script_filename,
+                input_filename, output_filename, string_args_filename]
 
     def _generate_python_code(self):
         if self.use_dill:
@@ -352,11 +365,13 @@ class PythonVirtualenvOperator(PythonOperator):
         fn = self.python_callable
         # dont try to read pickle if we didnt pass anything
         if self._pass_op_args():
-            load_args_line = 'with open(sys.argv[1], "rb") as f: arg_dict = 
{}.load(f)'.format(pickling_library)
+            load_args_line = 'with open(sys.argv[1], "rb") as f: arg_dict = 
{}.load(f)'\
+                .format(pickling_library)
         else:
             load_args_line = 'arg_dict = {"args": [], "kwargs": {}}'
 
-        # no indents in original code so we can accept any type of indents in 
the original function
+        # no indents in original code so we can accept
+        # any type of indents in the original function
         # we deserialize args, call function, serialize result if necessary
         return dedent("""\
         import {pickling_library}
@@ -364,15 +379,15 @@ class PythonVirtualenvOperator(PythonOperator):
         {load_args_code}
         args = arg_dict["args"]
         kwargs = arg_dict["kwargs"]
-        with open(sys.argv[3], 'r') as f: virtualenv_string_args = 
list(map(lambda x: x.strip(), list(f)))
+        with open(sys.argv[3], 'r') as f:
+            virtualenv_string_args = list(map(lambda x: x.strip(), list(f)))
         {python_callable_lines}
         res = {python_callable_name}(*args, **kwargs)
-        with open(sys.argv[2], 'wb') as f: res is not None and 
{pickling_library}.dump(res, f)
-        """).format(
-                load_args_code=load_args_line,
-                python_callable_lines=dedent(inspect.getsource(fn)),
-                python_callable_name=fn.__name__,
-                pickling_library=pickling_library)
+        with open(sys.argv[2], 'wb') as f:
+            res is not None and {pickling_library}.dump(res, f)
+        """).format(load_args_code=load_args_line,
+                    python_callable_lines=dedent(inspect.getsource(fn)),
+                    python_callable_name=fn.__name__,
+                    pickling_library=pickling_library)
 
         self.log.info("Done.")
-

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/redshift_to_s3_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/redshift_to_s3_operator.py 
b/airflow/operators/redshift_to_s3_operator.py
index 6c2998a..9c1b621 100644
--- a/airflow/operators/redshift_to_s3_operator.py
+++ b/airflow/operators/redshift_to_s3_operator.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.hooks.postgres_hook import PostgresHook
 from airflow.hooks.S3_hook import S3Hook
 from airflow.models import BaseOperator

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/s3_file_transform_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/s3_file_transform_operator.py 
b/airflow/operators/s3_file_transform_operator.py
index f86153d..84a6eda 100644
--- a/airflow/operators/s3_file_transform_operator.py
+++ b/airflow/operators/s3_file_transform_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/s3_to_hive_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/s3_to_hive_operator.py 
b/airflow/operators/s3_to_hive_operator.py
index 6105823..09eb836 100644
--- a/airflow/operators/s3_to_hive_operator.py
+++ b/airflow/operators/s3_to_hive_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -246,17 +246,17 @@ class S3ToHiveTransfer(BaseOperator):
         field_names = self.field_dict.keys()
         if len(field_names) != len(header_list):
             self.log.warning("Headers count mismatch"
-                              "File headers:\n {header_list}\n"
-                              "Field names: \n {field_names}\n"
-                              "".format(**locals()))
+                             "File headers:\n {header_list}\n"
+                             "Field names: \n {field_names}\n"
+                             .format(**locals()))
             return False
         test_field_match = [h1.lower() == h2.lower()
                             for h1, h2 in zip(header_list, field_names)]
         if not all(test_field_match):
             self.log.warning("Headers do not match field names"
-                              "File headers:\n {header_list}\n"
-                              "Field names: \n {field_names}\n"
-                              "".format(**locals()))
+                             "File headers:\n {header_list}\n"
+                             "Field names: \n {field_names}\n"
+                             .format(**locals()))
             return False
         else:
             return True

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/s3_to_redshift_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/s3_to_redshift_operator.py 
b/airflow/operators/s3_to_redshift_operator.py
index 1394da1..0d7921e 100644
--- a/airflow/operators/s3_to_redshift_operator.py
+++ b/airflow/operators/s3_to_redshift_operator.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.hooks.postgres_hook import PostgresHook
 from airflow.hooks.S3_hook import S3Hook
 from airflow.models import BaseOperator

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/sensors.py
----------------------------------------------------------------------
diff --git a/airflow/operators/sensors.py b/airflow/operators/sensors.py
index 62c7b56..61fb4d0 100644
--- a/airflow/operators/sensors.py
+++ b/airflow/operators/sensors.py
@@ -1,4 +1,4 @@
-# -*- coding: utf-8 -*-c
+# -*- coding: utf-8 -*-
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/slack_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/slack_operator.py 
b/airflow/operators/slack_operator.py
index 3fa8913..c5a6945 100644
--- a/airflow/operators/slack_operator.py
+++ b/airflow/operators/slack_operator.py
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -53,7 +53,8 @@ class SlackAPIOperator(BaseOperator):
         if token is None and slack_conn_id is None:
             raise AirflowException('No valid Slack token nor slack_conn_id 
supplied.')
         if token is not None and slack_conn_id is not None:
-            raise AirflowException('Cannot determine Slack credential when 
both token and slack_conn_id are supplied.')
+            raise AirflowException('Cannot determine Slack credential '
+                                   'when both token and slack_conn_id are 
supplied.')
 
         self.token = token
         self.slack_conn_id = slack_conn_id
@@ -63,11 +64,14 @@ class SlackAPIOperator(BaseOperator):
 
     def construct_api_call_params(self):
         """
-        Used by the execute function. Allows templating on the source fields 
of the api_call_params dict before construction
+        Used by the execute function. Allows templating on the source fields
+        of the api_call_params dict before construction
 
         Override in child classes.
-        Each SlackAPIOperator child class is responsible for having a 
construct_api_call_params function
-        which sets self.api_call_params with a dict of API call parameters 
(https://api.slack.com/methods)
+        Each SlackAPIOperator child class is responsible for
+        having a construct_api_call_params function
+        which sets self.api_call_params with a dict of
+        API call parameters (https://api.slack.com/methods)
         """
 
         pass
@@ -111,7 +115,8 @@ class SlackAPIPostOperator(SlackAPIOperator):
                  text='No message has been set.\n'
                       'Here is a cat video instead\n'
                       'https://www.youtube.com/watch?v=J---aiyznGQ',
-                 
icon_url='https://raw.githubusercontent.com/airbnb/airflow/master/airflow/www/static/pin_100.png',
+                 icon_url='https://raw.githubusercontent.com'
+                          
'/airbnb/airflow/master/airflow/www/static/pin_100.png',
                  attachments=None,
                  *args, **kwargs):
         self.method = 'chat.postMessage'

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/cff8318b/airflow/operators/sqlite_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/sqlite_operator.py 
b/airflow/operators/sqlite_operator.py
index 91497f5..5b7213e 100644
--- a/airflow/operators/sqlite_operator.py
+++ b/airflow/operators/sqlite_operator.py
@@ -7,15 +7,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from airflow.hooks.sqlite_hook import SqliteHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults

Reply via email to