This is an automated email from the ASF dual-hosted git repository.

maximebeauchemin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git


The following commit(s) were added to refs/heads/master by this push:
     new 8e23d4f  chore: Upgrade pylint to 2.5.3 and fix most new rules (#10101)
8e23d4f is described below

commit 8e23d4f369f35724b34b14def8a5a8bafb1d2ecb
Author: Will Barrett <w...@preset.io>
AuthorDate: Thu Jun 18 14:03:42 2020 -0700

    chore: Upgrade pylint to 2.5.3 and fix most new rules (#10101)
    
    * Bump pylint version to 2.5.3
    
    * Add a global disable for the most common new pylint error
    
    * Fix a bunch of files containing very few errors
    
    * More pylint tweakage, low-hanging fruit
    
    * More easy stuff...
    
    * Fix more erroring files
    
    * Fix the last couple of errors, clean pylint!
    
    * Black
    
    * Fix mypy issue in connectors/druid/models.py
---
 .pylintrc                                          |  4 +--
 requirements-dev.txt                               |  2 +-
 superset/app.py                                    |  3 +--
 superset/charts/api.py                             | 20 +++++++++------
 superset/commands/base.py                          |  2 --
 superset/common/query_object.py                    | 29 ++++++++++++++--------
 superset/config.py                                 |  2 +-
 superset/connectors/base/models.py                 |  4 +--
 superset/connectors/druid/models.py                | 15 ++++-------
 superset/dao/exceptions.py                         |  2 --
 superset/dashboards/api.py                         | 16 ++++++++----
 superset/dashboards/dao.py                         |  2 +-
 superset/datasets/api.py                           | 16 +++++++++---
 superset/datasets/dao.py                           |  4 +--
 superset/db_engine_specs/base.py                   | 10 +++-----
 superset/db_engine_specs/drill.py                  |  2 +-
 superset/db_engine_specs/hive.py                   | 19 ++++++++------
 superset/db_engine_specs/impala.py                 |  2 +-
 superset/db_engine_specs/pinot.py                  |  4 ++-
 superset/db_engine_specs/presto.py                 | 24 ++++++++++--------
 superset/db_engine_specs/sqlite.py                 |  5 ++--
 superset/models/annotations.py                     |  2 +-
 superset/models/core.py                            | 11 ++++----
 superset/models/dashboard.py                       |  2 +-
 superset/models/helpers.py                         |  1 +
 superset/models/schedules.py                       |  2 +-
 superset/models/sql_types/presto_sql_types.py      |  5 ++++
 superset/sql_lab.py                                | 20 ++++++++-------
 superset/sql_validators/presto_db.py               |  6 ++---
 superset/tasks/cache.py                            |  4 +--
 superset/tasks/schedules.py                        |  2 +-
 superset/tasks/slack_util.py                       |  2 +-
 .../utils/dashboard_filter_scopes_converter.py     |  2 +-
 superset/utils/log.py                              |  2 +-
 superset/utils/screenshots.py                      | 18 +++++++-------
 superset/views/annotations.py                      |  5 ++--
 superset/views/base.py                             |  5 ++--
 superset/views/dashboard/views.py                  |  7 +++---
 superset/views/database/decorators.py              |  8 +++---
 superset/views/utils.py                            | 10 +++-----
 40 files changed, 166 insertions(+), 135 deletions(-)

diff --git a/.pylintrc b/.pylintrc
index 42b8783..e887176 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -81,7 +81,7 @@ confidence=
 # --enable=similarities". If you want to run only the classes checker, but have
 # no Warning level messages displayed, use"--disable=all --enable=classes
 # --disable=W"
-disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,b
 [...]
+disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,b
 [...]
 
 
 [REPORTS]
@@ -254,7 +254,7 @@ notes=FIXME,XXX
 [SIMILARITIES]
 
 # Minimum lines number of a similarity.
-min-similarity-lines=4
+min-similarity-lines=5
 
 # Ignore comments when computing similarities.
 ignore-comments=yes
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 207b6a9..5325ea6 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -28,7 +28,7 @@ psycopg2-binary==2.8.5
 pycodestyle==2.5.0
 pydruid==0.6.1
 pyhive==0.6.2
-pylint==1.9.2
+pylint==2.5.3
 redis==3.5.1
 requests==2.23.0
 statsd==3.3.0
diff --git a/superset/app.py b/superset/app.py
index 3e83fd5..7dc7aec 100644
--- a/superset/app.py
+++ b/superset/app.py
@@ -95,7 +95,6 @@ class SupersetAppInitializer:
         """
         Called after any other init tasks
         """
-        pass
 
     def configure_celery(self) -> None:
         celery_app.config_from_object(self.config["CELERY_CONFIG"])
@@ -593,7 +592,7 @@ class SupersetAppInitializer:
     def register_blueprints(self) -> None:
         for bp in self.config["BLUEPRINTS"]:
             try:
-                logger.info(f"Registering blueprint: '{bp.name}'")
+                logger.info("Registering blueprint: %s", bp.name)
                 self.flask_app.register_blueprint(bp)
             except Exception:  # pylint: disable=broad-except
                 logger.exception("blueprint registration failed")
diff --git a/superset/charts/api.py b/superset/charts/api.py
index 148a337..d309ef1 100644
--- a/superset/charts/api.py
+++ b/superset/charts/api.py
@@ -219,7 +219,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
         except ChartInvalidError as ex:
             return self.response_422(message=ex.normalized_messages())
         except ChartCreateFailedError as ex:
-            logger.error(f"Error creating model {self.__class__.__name__}: 
{ex}")
+            logger.error(
+                "Error creating model %s: %s", self.__class__.__name__, str(ex)
+            )
             return self.response_422(message=str(ex))
 
     @expose("/<pk>", methods=["PUT"])
@@ -287,7 +289,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
         except ChartInvalidError as ex:
             return self.response_422(message=ex.normalized_messages())
         except ChartUpdateFailedError as ex:
-            logger.error(f"Error updating model {self.__class__.__name__}: 
{ex}")
+            logger.error(
+                "Error updating model %s: %s", self.__class__.__name__, str(ex)
+            )
             return self.response_422(message=str(ex))
 
     @expose("/<pk>", methods=["DELETE"])
@@ -334,7 +338,9 @@ class ChartRestApi(BaseSupersetModelRestApi):
         except ChartForbiddenError:
             return self.response_403()
         except ChartDeleteFailedError as ex:
-            logger.error(f"Error deleting model {self.__class__.__name__}: 
{ex}")
+            logger.error(
+                "Error deleting model %s: %s", self.__class__.__name__, str(ex)
+            )
             return self.response_422(message=str(ex))
 
     @expose("/", methods=["DELETE"])
@@ -386,9 +392,7 @@ class ChartRestApi(BaseSupersetModelRestApi):
             return self.response(
                 200,
                 message=ngettext(
-                    f"Deleted %(num)d chart",
-                    f"Deleted %(num)d charts",
-                    num=len(item_ids),
+                    "Deleted %(num)d chart", "Deleted %(num)d charts", 
num=len(item_ids)
                 ),
             )
         except ChartNotFoundError:
@@ -464,13 +468,15 @@ class ChartRestApi(BaseSupersetModelRestApi):
                 headers=generate_download_headers("csv"),
                 mimetype="application/csv",
             )
-        elif result_format == ChartDataResultFormat.JSON:
+
+        if result_format == ChartDataResultFormat.JSON:
             response_data = simplejson.dumps(
                 {"result": payload}, default=json_int_dttm_ser, ignore_nan=True
             )
             resp = make_response(response_data, 200)
             resp.headers["Content-Type"] = "application/json; charset=utf-8"
             return resp
+
         raise self.response_400(message=f"Unsupported result_format: 
{result_format}")
 
     @expose("/<pk>/thumbnail/<digest>/", methods=["GET"])
diff --git a/superset/commands/base.py b/superset/commands/base.py
index 998a756..0bcf3a7 100644
--- a/superset/commands/base.py
+++ b/superset/commands/base.py
@@ -29,7 +29,6 @@ class BaseCommand(ABC):
         Run executes the command. Can raise command exceptions
         :raises: CommandException
         """
-        pass
 
     @abstractmethod
     def validate(self) -> None:
@@ -38,4 +37,3 @@ class BaseCommand(ABC):
         Will raise exception if validation fails
         :raises: CommandException
         """
-        pass
diff --git a/superset/common/query_object.py b/superset/common/query_object.py
index d09c75b..8de2165 100644
--- a/superset/common/query_object.py
+++ b/superset/common/query_object.py
@@ -141,8 +141,8 @@ class QueryObject:
         if is_sip_38 and groupby:
             self.columns += groupby
             logger.warning(
-                f"The field `groupby` is deprecated. Viz plugins should "
-                f"pass all selectables via the `columns` field"
+                "The field `groupby` is deprecated. Viz plugins should "
+                "pass all selectables via the `columns` field"
             )
 
         self.orderby = orderby or []
@@ -151,15 +151,18 @@ class QueryObject:
         for field in DEPRECATED_FIELDS:
             if field.old_name in kwargs:
                 logger.warning(
-                    f"The field `{field.old_name}` is deprecated, please use "
-                    f"`{field.new_name}` instead."
+                    "The field `%s` is deprecated, please use `%s` instead.",
+                    field.old_name,
+                    field.new_name,
                 )
                 value = kwargs[field.old_name]
                 if value:
                     if hasattr(self, field.new_name):
                         logger.warning(
-                            f"The field `{field.new_name}` is already 
populated, "
-                            f"replacing value with contents from 
`{field.old_name}`."
+                            "The field `%s` is already populated, "
+                            "replacing value with contents from `%s`.",
+                            field.new_name,
+                            field.old_name,
                         )
                     setattr(self, field.new_name, value)
 
@@ -167,16 +170,20 @@ class QueryObject:
         for field in DEPRECATED_EXTRAS_FIELDS:
             if field.old_name in kwargs:
                 logger.warning(
-                    f"The field `{field.old_name}` is deprecated and should be 
"
-                    f"passed to `extras` via the `{field.new_name}` property."
+                    "The field `%s` is deprecated and should "
+                    "be passed to `extras` via the `%s` property.",
+                    field.old_name,
+                    field.new_name,
                 )
                 value = kwargs[field.old_name]
                 if value:
                     if hasattr(self.extras, field.new_name):
                         logger.warning(
-                            f"The field `{field.new_name}` is already 
populated in "
-                            f"`extras`, replacing value with contents "
-                            f"from `{field.old_name}`."
+                            "The field `%s` is already populated in "
+                            "`extras`, replacing value with contents "
+                            "from `%s`.",
+                            field.new_name,
+                            field.old_name,
                         )
                     self.extras[field.new_name] = value
 
diff --git a/superset/config.py b/superset/config.py
index b862a9c..ad4c58f 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -898,7 +898,7 @@ if CONFIG_PATH_ENV_VAR in os.environ:
         print(f"Loaded your LOCAL configuration at [{cfg_path}]")
     except Exception:
         logger.exception(
-            f"Failed to import config for {CONFIG_PATH_ENV_VAR}={cfg_path}"
+            "Failed to import config for %s=%s", CONFIG_PATH_ENV_VAR, cfg_path
         )
         raise
 elif importlib.util.find_spec("superset_config"):
diff --git a/superset/connectors/base/models.py 
b/superset/connectors/base/models.py
index 5ba28e8..f2dad88 100644
--- a/superset/connectors/base/models.py
+++ b/superset/connectors/base/models.py
@@ -348,7 +348,7 @@ class BaseDatasource(
                     value = utils.cast_to_num(value)
                 if value == NULL_STRING:
                     return None
-                elif value == "<empty string>":
+                if value == "<empty string>":
                     return ""
             return value
 
@@ -516,7 +516,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
     export_fields: List[Any] = []
 
     def __repr__(self) -> str:
-        return self.column_name
+        return str(self.column_name)
 
     num_types = (
         "DOUBLE",
diff --git a/superset/connectors/druid/models.py 
b/superset/connectors/druid/models.py
index ccfff6e..9237a1f 100644
--- a/superset/connectors/druid/models.py
+++ b/superset/connectors/druid/models.py
@@ -14,8 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-# pylint: disable=C,R,W
-# pylint: disable=invalid-unary-operand-type
+# pylint: skip-file
 import json
 import logging
 import re
@@ -81,12 +80,7 @@ except ImportError:
     pass
 
 try:
-    from superset.utils.core import (
-        DimSelector,
-        DTTM_ALIAS,
-        FilterOperator,
-        flasher,
-    )
+    from superset.utils.core import DimSelector, DTTM_ALIAS, FilterOperator, 
flasher
 except ImportError:
     pass
 
@@ -845,7 +839,8 @@ class DruidDatasource(Model, BaseDatasource):
         else:
             granularity["type"] = "duration"
             granularity["duration"] = (
-                utils.parse_human_timedelta(period_name).total_seconds() * 
1000  # type: ignore
+                utils.parse_human_timedelta(period_name).total_seconds()  # 
type: ignore
+                * 1000
             )
         return granularity
 
@@ -950,7 +945,7 @@ class DruidDatasource(Model, BaseDatasource):
 
     @staticmethod
     def metrics_and_post_aggs(
-        metrics: List[Metric], metrics_dict: Dict[str, DruidMetric],
+        metrics: List[Metric], metrics_dict: Dict[str, DruidMetric]
     ) -> Tuple["OrderedDict[str, Any]", "OrderedDict[str, Any]"]:
         # Separate metrics into those that are aggregations
         # and those that are post aggregations
diff --git a/superset/dao/exceptions.py b/superset/dao/exceptions.py
index 5a6bbbe..822b239 100644
--- a/superset/dao/exceptions.py
+++ b/superset/dao/exceptions.py
@@ -22,8 +22,6 @@ class DAOException(SupersetException):
     Base DAO exception class
     """
 
-    pass
-
 
 class DAOCreateFailedError(DAOException):
     """
diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py
index 21d4e79..53154ae 100644
--- a/superset/dashboards/api.py
+++ b/superset/dashboards/api.py
@@ -206,7 +206,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
         except DashboardInvalidError as ex:
             return self.response_422(message=ex.normalized_messages())
         except DashboardCreateFailedError as ex:
-            logger.error(f"Error creating model {self.__class__.__name__}: 
{ex}")
+            logger.error(
+                "Error creating model %s: %s", self.__class__.__name__, str(ex)
+            )
             return self.response_422(message=str(ex))
 
     @expose("/<pk>", methods=["PUT"])
@@ -274,7 +276,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
         except DashboardInvalidError as ex:
             return self.response_422(message=ex.normalized_messages())
         except DashboardUpdateFailedError as ex:
-            logger.error(f"Error updating model {self.__class__.__name__}: 
{ex}")
+            logger.error(
+                "Error updating model %s: %s", self.__class__.__name__, str(ex)
+            )
             return self.response_422(message=str(ex))
 
     @expose("/<pk>", methods=["DELETE"])
@@ -321,7 +325,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
         except DashboardForbiddenError:
             return self.response_403()
         except DashboardDeleteFailedError as ex:
-            logger.error(f"Error deleting model {self.__class__.__name__}: 
{ex}")
+            logger.error(
+                "Error deleting model %s: %s", self.__class__.__name__, str(ex)
+            )
             return self.response_422(message=str(ex))
 
     @expose("/", methods=["DELETE"])
@@ -373,8 +379,8 @@ class DashboardRestApi(BaseSupersetModelRestApi):
             return self.response(
                 200,
                 message=ngettext(
-                    f"Deleted %(num)d dashboard",
-                    f"Deleted %(num)d dashboards",
+                    "Deleted %(num)d dashboard",
+                    "Deleted %(num)d dashboards",
                     num=len(item_ids),
                 ),
             )
diff --git a/superset/dashboards/dao.py b/superset/dashboards/dao.py
index 6e12cd5..28b685f 100644
--- a/superset/dashboards/dao.py
+++ b/superset/dashboards/dao.py
@@ -49,7 +49,7 @@ class DashboardDAO(BaseDAO):
 
     @staticmethod
     def update_charts_owners(model: Dashboard, commit: bool = True) -> 
Dashboard:
-        owners = [owner for owner in model.owners]
+        owners = list(model.owners)
         for slc in model.slices:
             slc.owners = list(set(owners) | set(slc.owners))
         if commit:
diff --git a/superset/datasets/api.py b/superset/datasets/api.py
index bafcebf..b1f4ef2 100644
--- a/superset/datasets/api.py
+++ b/superset/datasets/api.py
@@ -186,7 +186,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
         except DatasetInvalidError as ex:
             return self.response_422(message=ex.normalized_messages())
         except DatasetCreateFailedError as ex:
-            logger.error(f"Error creating model {self.__class__.__name__}: 
{ex}")
+            logger.error(
+                "Error creating model %s: %s", self.__class__.__name__, str(ex)
+            )
             return self.response_422(message=str(ex))
 
     @expose("/<pk>", methods=["PUT"])
@@ -254,7 +256,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
         except DatasetInvalidError as ex:
             return self.response_422(message=ex.normalized_messages())
         except DatasetUpdateFailedError as ex:
-            logger.error(f"Error updating model {self.__class__.__name__}: 
{ex}")
+            logger.error(
+                "Error updating model %s: %s", self.__class__.__name__, str(ex)
+            )
             return self.response_422(message=str(ex))
 
     @expose("/<pk>", methods=["DELETE"])
@@ -301,7 +305,9 @@ class DatasetRestApi(BaseSupersetModelRestApi):
         except DatasetForbiddenError:
             return self.response_403()
         except DatasetDeleteFailedError as ex:
-            logger.error(f"Error deleting model {self.__class__.__name__}: 
{ex}")
+            logger.error(
+                "Error deleting model %s: %s", self.__class__.__name__, str(ex)
+            )
             return self.response_422(message=str(ex))
 
     @expose("/export/", methods=["GET"])
@@ -401,5 +407,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
         except DatasetForbiddenError:
             return self.response_403()
         except DatasetRefreshFailedError as ex:
-            logger.error(f"Error refreshing dataset {self.__class__.__name__}: 
{ex}")
+            logger.error(
+                "Error refreshing dataset %s: %s", self.__class__.__name__, 
str(ex)
+            )
             return self.response_422(message=str(ex))
diff --git a/superset/datasets/dao.py b/superset/datasets/dao.py
index ef20a69..0a253fa 100644
--- a/superset/datasets/dao.py
+++ b/superset/datasets/dao.py
@@ -46,7 +46,7 @@ class DatasetDAO(BaseDAO):
         try:
             return 
db.session.query(Database).filter_by(id=database_id).one_or_none()
         except SQLAlchemyError as ex:  # pragma: no cover
-            logger.error(f"Could not get database by id: {ex}")
+            logger.error("Could not get database by id: %s", str(ex))
             return None
 
     @staticmethod
@@ -55,7 +55,7 @@ class DatasetDAO(BaseDAO):
             database.get_table(table_name, schema=schema)
             return True
         except SQLAlchemyError as ex:  # pragma: no cover
-            logger.error(f"Got an error {ex} validating table: {table_name}")
+            logger.error("Got an error %s validating table: %s", str(ex), 
table_name)
             return False
 
     @staticmethod
diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py
index 86bbd11..c86ee9d 100644
--- a/superset/db_engine_specs/base.py
+++ b/superset/db_engine_specs/base.py
@@ -332,7 +332,6 @@ class BaseEngineSpec:  # pylint: 
disable=too-many-public-methods
         set to is_dttm=True. Note that this only gets called when new
         columns are detected/created"""
         # TODO: Fix circular import caused by importing TableColumn
-        pass
 
     @classmethod
     def epoch_to_dttm(cls) -> str:
@@ -401,9 +400,11 @@ class BaseEngineSpec:  # pylint: 
disable=too-many-public-methods
                 .limit(limit)
             )
             return database.compile_sqla_query(qry)
-        elif LimitMethod.FORCE_LIMIT:
+
+        if LimitMethod.FORCE_LIMIT:
             parsed_query = sql_parse.ParsedQuery(sql)
             sql = parsed_query.set_or_update_query_limit(limit)
+
         return sql
 
     @classmethod
@@ -465,7 +466,7 @@ class BaseEngineSpec:  # pylint: 
disable=too-many-public-methods
         Create table from contents of a csv. Note: this method does not create
         metadata for the table.
         """
-        df = cls.csv_to_df(filepath_or_buffer=filename, **csv_to_df_kwargs,)
+        df = cls.csv_to_df(filepath_or_buffer=filename, **csv_to_df_kwargs)
         engine = cls.get_engine(database)
         if table.schema:
             # only add schema when it is preset and non empty
@@ -529,7 +530,6 @@ class BaseEngineSpec:  # pylint: 
disable=too-many-public-methods
         for handling the cursor and updating progress information in the
         query object"""
         # TODO: Fix circular import error caused by importing sql_lab.Query
-        pass
 
     @classmethod
     def extract_error_message(cls, ex: Exception) -> str:
@@ -573,14 +573,12 @@ class BaseEngineSpec:  # pylint: 
disable=too-many-public-methods
         Some database drivers like presto accept '{catalog}/{schema}' in
         the database component of the URL, that can be handled here.
         """
-        pass
 
     @classmethod
     def patch(cls) -> None:
         """
         TODO: Improve docstring and refactor implementation in Hive
         """
-        pass
 
     @classmethod
     def get_schema_names(cls, inspector: Inspector) -> List[str]:
diff --git a/superset/db_engine_specs/drill.py 
b/superset/db_engine_specs/drill.py
index 8da7cd8..e43fe1d 100644
--- a/superset/db_engine_specs/drill.py
+++ b/superset/db_engine_specs/drill.py
@@ -56,7 +56,7 @@ class DrillEngineSpec(BaseEngineSpec):
         tt = target_type.upper()
         if tt == "DATE":
             return f"TO_DATE('{dttm.date().isoformat()}', 'yyyy-MM-dd')"
-        elif tt == "TIMESTAMP":
+        if tt == "TIMESTAMP":
             return f"""TO_TIMESTAMP('{dttm.isoformat(sep=" ", 
timespec="seconds")}', 'yyyy-MM-dd HH:mm:ss')"""  # pylint: 
disable=line-too-long
         return None
 
diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py
index f99b180..4cfcb4c 100644
--- a/superset/db_engine_specs/hive.py
+++ b/superset/db_engine_specs/hive.py
@@ -200,7 +200,7 @@ class HiveEngineSpec(PrestoEngineSpec):
         tt = target_type.upper()
         if tt == "DATE":
             return f"CAST('{dttm.date().isoformat()}' AS DATE)"
-        elif tt == "TIMESTAMP":
+        if tt == "TIMESTAMP":
             return f"""CAST('{dttm.isoformat(sep=" ", 
timespec="microseconds")}' AS TIMESTAMP)"""  # pylint: disable=line-too-long
         return None
 
@@ -284,7 +284,9 @@ class HiveEngineSpec(PrestoEngineSpec):
             if log:
                 log_lines = log.splitlines()
                 progress = cls.progress(log_lines)
-                logger.info(f"Query {query_id}: Progress total: {progress}")
+                logger.info(
+                    "Query %s: Progress total: %s", str(query_id), 
str(progress)
+                )
                 needs_commit = False
                 if progress > query.progress:
                     query.progress = progress
@@ -294,21 +296,25 @@ class HiveEngineSpec(PrestoEngineSpec):
                     if tracking_url:
                         job_id = tracking_url.split("/")[-2]
                         logger.info(
-                            f"Query {query_id}: Found the tracking url: 
{tracking_url}"
+                            "Query %s: Found the tracking url: %s",
+                            str(query_id),
+                            tracking_url,
                         )
                         tracking_url = tracking_url_trans(tracking_url)
                         logger.info(
-                            f"Query {query_id}: Transformation applied: 
{tracking_url}"
+                            "Query %s: Transformation applied: %s",
+                            str(query_id),
+                            tracking_url,
                         )
                         query.tracking_url = tracking_url
-                        logger.info(f"Query {query_id}: Job id: {job_id}")
+                        logger.info("Query %s: Job id: %s", str(query_id), 
str(job_id))
                         needs_commit = True
                 if job_id and len(log_lines) > last_log_line:
                     # Wait for job id before logging things out
                     # this allows for prefixing all log lines and becoming
                     # searchable in something like Kibana
                     for l in log_lines[last_log_line:]:
-                        logger.info(f"Query {query_id}: [{job_id}] {l}")
+                        logger.info("Query %s: [%s] %s", str(query_id), 
str(job_id), l)
                     last_log_line = len(log_lines)
                 if needs_commit:
                     session.commit()
@@ -414,7 +420,6 @@ class HiveEngineSpec(PrestoEngineSpec):
         """
         # Do nothing in the URL object since instead this should modify
         # the configuraiton dictionary. See get_configuration_for_impersonation
-        pass
 
     @classmethod
     def get_configuration_for_impersonation(
diff --git a/superset/db_engine_specs/impala.py 
b/superset/db_engine_specs/impala.py
index fd2dce5..59eaf01 100644
--- a/superset/db_engine_specs/impala.py
+++ b/superset/db_engine_specs/impala.py
@@ -47,7 +47,7 @@ class ImpalaEngineSpec(BaseEngineSpec):
         tt = target_type.upper()
         if tt == "DATE":
             return f"CAST('{dttm.date().isoformat()}' AS DATE)"
-        elif tt == "TIMESTAMP":
+        if tt == "TIMESTAMP":
             return f"""CAST('{dttm.isoformat(timespec="microseconds")}' AS 
TIMESTAMP)"""
         return None
 
diff --git a/superset/db_engine_specs/pinot.py 
b/superset/db_engine_specs/pinot.py
index 9243fef..853d756 100644
--- a/superset/db_engine_specs/pinot.py
+++ b/superset/db_engine_specs/pinot.py
@@ -84,7 +84,9 @@ class PinotEngineSpec(BaseEngineSpec):  # pylint: 
disable=abstract-method
             if not granularity:
                 raise NotImplementedError("No pinot grain spec for " + 
str(time_grain))
         else:
-            return TimestampExpression(f"{{col}}", col)
+            return TimestampExpression(
+                f"{{col}}", col  # pylint: 
disable=f-string-without-interpolation
+            )
         # In pinot the output is a string since there is no timestamp column 
like pg
         time_expr = f"DATETIMECONVERT({{col}}, '{tf}', '{tf}', 
'{granularity}')"
         return TimestampExpression(time_expr, col)
diff --git a/superset/db_engine_specs/presto.py 
b/superset/db_engine_specs/presto.py
index e8c9603..ee474d5 100644
--- a/superset/db_engine_specs/presto.py
+++ b/superset/db_engine_specs/presto.py
@@ -79,7 +79,8 @@ def get_children(column: Dict[str, str]) -> List[Dict[str, 
str]]:
     children_type = group["children"]
     if type_ == "ARRAY":
         return [{"name": column["name"], "type": children_type}]
-    elif type_ == "ROW":
+
+    if type_ == "ROW":
         nameless_columns = 0
         columns = []
         for child in utils.split(children_type, ","):
@@ -93,8 +94,8 @@ def get_children(column: Dict[str, str]) -> List[Dict[str, 
str]]:
                 nameless_columns += 1
             columns.append({"name": f"{column['name']}.{name.lower()}", 
"type": type_})
         return columns
-    else:
-        raise Exception(f"Unknown type {type_}!")
+
+    raise Exception(f"Unknown type {type_}!")
 
 
 class PrestoEngineSpec(BaseEngineSpec):
@@ -278,7 +279,7 @@ class PrestoEngineSpec(BaseEngineSpec):
                     if not (inner_type.endswith("array") or 
inner_type.endswith("row")):
                         stack.pop()
                 # We have an array of row objects (i.e. array(row(...)))
-                elif inner_type == "array" or inner_type == "row":
+                elif inner_type in ("array", "row"):
                     # Push a dummy object to represent the structural data type
                     stack.append(("", inner_type))
                 # We have an array of a basic data types(i.e. array(varchar)).
@@ -339,8 +340,9 @@ class PrestoEngineSpec(BaseEngineSpec):
                     )
                     result[structural_column_index]["default"] = None
                     continue
-                else:  # otherwise column is a basic data type
-                    column_type = presto_type_map[column.Type]()
+
+                # otherwise column is a basic data type
+                column_type = presto_type_map[column.Type]()
             except KeyError:
                 logger.info(
                     "Did not recognize type {} of column {}".format(  # 
pylint: disable=logging-format-interpolation
@@ -727,7 +729,7 @@ class PrestoEngineSpec(BaseEngineSpec):
     def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> 
None:
         """Updates progress information"""
         query_id = query.id
-        logger.info(f"Query {query_id}: Polling the cursor for progress")
+        logger.info("Query %i: Polling the cursor for progress", query_id)
         polled = cursor.poll()
         # poll returns dict -- JSON status information or ``None``
         # if the query is done
@@ -761,7 +763,7 @@ class PrestoEngineSpec(BaseEngineSpec):
                         query.progress = progress
                     session.commit()
             time.sleep(1)
-            logger.info(f"Query {query_id}: Polling the cursor for progress")
+            logger.info("Query %i: Polling the cursor for progress", query_id)
             polled = cursor.poll()
 
     @classmethod
@@ -903,12 +905,14 @@ class PrestoEngineSpec(BaseEngineSpec):
             raise SupersetTemplateException(
                 "The table should have one partitioned field"
             )
-        elif not show_first and len(indexes[0]["column_names"]) > 1:
+
+        if not show_first and len(indexes[0]["column_names"]) > 1:
             raise SupersetTemplateException(
                 "The table should have a single partitioned field "
                 "to use this function. You may want to use "
                 "`presto.latest_sub_partition`"
             )
+
         column_names = indexes[0]["column_names"]
         part_fields = [(column_name, True) for column_name in column_names]
         sql = cls._partition_query(table_name, database, 1, part_fields)
@@ -947,7 +951,7 @@ class PrestoEngineSpec(BaseEngineSpec):
         indexes = database.get_indexes(table_name, schema)
         part_fields = indexes[0]["column_names"]
         for k in kwargs.keys():  # pylint: 
disable=consider-iterating-dictionary
-            if k not in k in part_fields:
+            if k not in k in part_fields:  # pylint: 
disable=comparison-with-itself
                 msg = "Field [{k}] is not part of the portioning key"
                 raise SupersetTemplateException(msg)
         if len(kwargs.keys()) != len(part_fields) - 1:
diff --git a/superset/db_engine_specs/sqlite.py 
b/superset/db_engine_specs/sqlite.py
index d824c32..863e5e6 100644
--- a/superset/db_engine_specs/sqlite.py
+++ b/superset/db_engine_specs/sqlite.py
@@ -64,15 +64,14 @@ class SqliteEngineSpec(BaseEngineSpec):
                 cache=database.table_cache_enabled,
                 cache_timeout=database.table_cache_timeout,
             )
-        elif datasource_type == "view":
+        if datasource_type == "view":
             return database.get_all_view_names_in_schema(
                 schema=schema,
                 force=True,
                 cache=database.table_cache_enabled,
                 cache_timeout=database.table_cache_timeout,
             )
-        else:
-            raise Exception(f"Unsupported datasource_type: {datasource_type}")
+        raise Exception(f"Unsupported datasource_type: {datasource_type}")
 
     @classmethod
     def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:
diff --git a/superset/models/annotations.py b/superset/models/annotations.py
index ec8d3c0..b2672fb 100644
--- a/superset/models/annotations.py
+++ b/superset/models/annotations.py
@@ -34,7 +34,7 @@ class AnnotationLayer(Model, AuditMixinNullable):
     descr = Column(Text)
 
     def __repr__(self) -> str:
-        return self.name
+        return str(self.name)
 
 
 class Annotation(Model, AuditMixinNullable):
diff --git a/superset/models/core.py b/superset/models/core.py
index ed90e4a..3fe92a3 100755
--- a/superset/models/core.py
+++ b/superset/models/core.py
@@ -170,7 +170,9 @@ class Database(
         except Exception as ex:  # pylint: disable=broad-except
             # function_names property is used in bulk APIs and should not hard 
crash
             # more info in: 
https://github.com/apache/incubator-superset/issues/9678
-            logger.error(f"Failed to fetch database function names with error: 
{ex}")
+            logger.error(
+                "Failed to fetch database function names with error: %s", 
str(ex)
+            )
         return []
 
     @property
@@ -207,7 +209,7 @@ class Database(
     @property
     def backend(self) -> str:
         sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted)
-        return sqlalchemy_url.get_backend_name()
+        return sqlalchemy_url.get_backend_name()  # pylint: disable=no-member
 
     @property
     def metadata_cache_timeout(self) -> Dict[str, Any]:
@@ -452,8 +454,7 @@ class Database(
         return self.db_engine_spec.get_all_datasource_names(self, "table")
 
     @cache_util.memoized_func(
-        key=lambda *args, **kwargs: "db:{}:schema:None:view_list",
-        attribute_in_key="id",
+        key=lambda *args, **kwargs: "db:{}:schema:None:view_list", 
attribute_in_key="id"
     )
     def get_all_view_names_in_database(
         self,
@@ -652,7 +653,7 @@ class Database(
     @utils.memoized
     def get_dialect(self) -> Dialect:
         sqla_url = url.make_url(self.sqlalchemy_uri_decrypted)
-        return sqla_url.get_dialect()()
+        return sqla_url.get_dialect()()  # pylint: disable=no-member
 
 
 sqla.event.listen(Database, "after_insert", security_manager.set_perm)
diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py
index d10809e..b34906f 100644
--- a/superset/models/dashboard.py
+++ b/superset/models/dashboard.py
@@ -42,7 +42,7 @@ from sqlalchemy.orm.mapper import Mapper
 
 from superset import app, ConnectorRegistry, db, is_feature_enabled, 
security_manager
 from superset.models.helpers import AuditMixinNullable, ImportMixin
-from superset.models.slice import Slice as Slice
+from superset.models.slice import Slice
 from superset.models.tags import DashboardUpdater
 from superset.models.user_attributes import UserAttribute
 from superset.tasks.thumbnails import cache_dashboard_thumbnail
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index 4ffe5e9..d003f55 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -363,6 +363,7 @@ class AuditMixinNullable(AuditMixin):
             nullable=True,
         )
 
+    @property
     def changed_by_name(self) -> str:
         if self.created_by:
             return escape("{}".format(self.created_by))
diff --git a/superset/models/schedules.py b/superset/models/schedules.py
index f66a237..57ff52d 100644
--- a/superset/models/schedules.py
+++ b/superset/models/schedules.py
@@ -90,6 +90,6 @@ class SliceEmailSchedule(Model, AuditMixinNullable, 
ImportMixin, EmailSchedule):
 def get_scheduler_model(report_type: ScheduleType) -> 
Optional[Type[EmailSchedule]]:
     if report_type == ScheduleType.dashboard:
         return DashboardEmailSchedule
-    elif report_type == ScheduleType.slice:
+    if report_type == ScheduleType.slice:
         return SliceEmailSchedule
     return None
diff --git a/superset/models/sql_types/presto_sql_types.py 
b/superset/models/sql_types/presto_sql_types.py
index 47486cf..d6f6d39 100644
--- a/superset/models/sql_types/presto_sql_types.py
+++ b/superset/models/sql_types/presto_sql_types.py
@@ -29,6 +29,7 @@ class TinyInteger(Integer):
     A type for tiny ``int`` integers.
     """
 
+    @property
     def python_type(self) -> Type[int]:
         return int
 
@@ -42,6 +43,7 @@ class Interval(TypeEngine):
     A type for intervals.
     """
 
+    @property
     def python_type(self) -> Optional[Type[Any]]:
         return None
 
@@ -55,6 +57,7 @@ class Array(TypeEngine):
     A type for arrays.
     """
 
+    @property
     def python_type(self) -> Optional[Type[List[Any]]]:
         return list
 
@@ -68,6 +71,7 @@ class Map(TypeEngine):
     A type for maps.
     """
 
+    @property
     def python_type(self) -> Optional[Type[Dict[Any, Any]]]:
         return dict
 
@@ -81,6 +85,7 @@ class Row(TypeEngine):
     A type for rows.
     """
 
+    @property
     def python_type(self) -> Optional[Type[Any]]:
         return None
 
diff --git a/superset/sql_lab.py b/superset/sql_lab.py
index d9f5b38..4d3597c 100644
--- a/superset/sql_lab.py
+++ b/superset/sql_lab.py
@@ -96,9 +96,9 @@ def handle_query_error(
 
 def get_query_backoff_handler(details: Dict[Any, Any]) -> None:
     query_id = details["kwargs"]["query_id"]
-    logger.error(f"Query with id `{query_id}` could not be retrieved")
+    logger.error("Query with id `%s` could not be retrieved", str(query_id))
     stats_logger.incr("error_attempting_orm_query_{}".format(details["tries"] 
- 1))
-    logger.error(f"Query {query_id}: Sleeping for a sec before retrying...")
+    logger.error("Query %s: Sleeping for a sec before retrying...", 
str(query_id))
 
 
 def get_query_giveup_handler(_: Any) -> None:
@@ -287,7 +287,7 @@ def execute_sql_statement(
 def _serialize_payload(
     payload: Dict[Any, Any], use_msgpack: Optional[bool] = False
 ) -> Union[bytes, str]:
-    logger.debug(f"Serializing to msgpack: {use_msgpack}")
+    logger.debug("Serializing to msgpack: %r", use_msgpack)
     if use_msgpack:
         return msgpack.dumps(payload, default=json_iso_dttm_ser, 
use_bin_type=True)
 
@@ -360,9 +360,9 @@ def execute_sql_statements(  # pylint: 
disable=too-many-arguments, too-many-loca
     # Breaking down into multiple statements
     parsed_query = ParsedQuery(rendered_query)
     statements = parsed_query.get_statements()
-    logger.info(f"Query {query_id}: Executing {len(statements)} statement(s)")
+    logger.info("Query %s: Executing %i statement(s)", str(query_id), 
len(statements))
 
-    logger.info(f"Query {query_id}: Set query to 'running'")
+    logger.info("Query %s: Set query to 'running'", str(query_id))
     query.status = QueryStatus.RUNNING
     query.start_running_time = now_as_float()
     session.commit()
@@ -386,7 +386,7 @@ def execute_sql_statements(  # pylint: 
disable=too-many-arguments, too-many-loca
 
                 # Run statement
                 msg = f"Running statement {i+1} out of {statement_count}"
-                logger.info(f"Query {query_id}: {msg}")
+                logger.info("Query %s: %s", str(query_id), msg)
                 query.set_extra_json_key("progress", msg)
                 session.commit()
                 try:
@@ -437,7 +437,9 @@ def execute_sql_statements(  # pylint: 
disable=too-many-arguments, too-many-loca
 
     if store_results and results_backend:
         key = str(uuid.uuid4())
-        logger.info(f"Query {query_id}: Storing results in results backend, 
key: {key}")
+        logger.info(
+            "Query %s: Storing results in results backend, key: %s", 
str(query_id), key
+        )
         with stats_timing("sqllab.query.results_backend_write", stats_logger):
             with stats_timing(
                 "sqllab.query.results_backend_write_serialization", 
stats_logger
@@ -451,9 +453,9 @@ def execute_sql_statements(  # pylint: 
disable=too-many-arguments, too-many-loca
 
             compressed = zlib_compress(serialized_payload)
             logger.debug(
-                f"*** serialized payload size: {getsizeof(serialized_payload)}"
+                "*** serialized payload size: %i", 
getsizeof(serialized_payload)
             )
-            logger.debug(f"*** compressed payload size: 
{getsizeof(compressed)}")
+            logger.debug("*** compressed payload size: %i", 
getsizeof(compressed))
             results_backend.set(key, compressed, cache_timeout)
         query.results_key = key
 
diff --git a/superset/sql_validators/presto_db.py 
b/superset/sql_validators/presto_db.py
index 6c5bb30..ccafb98 100644
--- a/superset/sql_validators/presto_db.py
+++ b/superset/sql_validators/presto_db.py
@@ -138,7 +138,7 @@ class PrestoDBSQLValidator(BaseSQLValidator):
                 end_column=end_column,
             )
         except Exception as ex:
-            logger.exception(f"Unexpected error running validation query: 
{ex}")
+            logger.exception("Unexpected error running validation query: %s", 
str(ex))
             raise ex
 
     @classmethod
@@ -156,7 +156,7 @@ class PrestoDBSQLValidator(BaseSQLValidator):
         parsed_query = ParsedQuery(sql)
         statements = parsed_query.get_statements()
 
-        logger.info(f"Validating {len(statements)} statement(s)")
+        logger.info("Validating %i statement(s)", len(statements))
         engine = database.get_sqla_engine(
             schema=schema,
             nullpool=True,
@@ -174,6 +174,6 @@ class PrestoDBSQLValidator(BaseSQLValidator):
                     )
                     if annotation:
                         annotations.append(annotation)
-        logger.debug(f"Validation found {len(annotations)} error(s)")
+        logger.debug("Validation found %i error(s)", len(annotations))
 
         return annotations
diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py
index a5b0199..54b0dc1 100644
--- a/superset/tasks/cache.py
+++ b/superset/tasks/cache.py
@@ -275,7 +275,7 @@ def cache_warmup(
         logger.error(message)
         return message
 
-    logger.info(f"Loading {class_.__name__}")
+    logger.info("Loading %s", class_.__name__)
     try:
         strategy = class_(*args, **kwargs)
         logger.info("Success!")
@@ -287,7 +287,7 @@ def cache_warmup(
     results: Dict[str, List[str]] = {"success": [], "errors": []}
     for url in strategy.get_urls():
         try:
-            logger.info(f"Fetching {url}")
+            logger.info("Fetching %s", url)
             request.urlopen(url)
             results["success"].append(url)
         except URLError:
diff --git a/superset/tasks/schedules.py b/superset/tasks/schedules.py
index 3930beb..9948e94 100644
--- a/superset/tasks/schedules.py
+++ b/superset/tasks/schedules.py
@@ -488,7 +488,7 @@ def schedule_email_report(  # pylint: 
disable=unused-argument
     recipients = recipients or schedule.recipients
     slack_channel = slack_channel or schedule.slack_channel
     logger.info(
-        f"Starting report for slack: {slack_channel} and recipients: 
{recipients}."
+        "Starting report for slack: %s and recipients: %s.", slack_channel, 
recipients
     )
 
     if report_type == ScheduleType.dashboard:
diff --git a/superset/tasks/slack_util.py b/superset/tasks/slack_util.py
index 018308d..ef647eb 100644
--- a/superset/tasks/slack_util.py
+++ b/superset/tasks/slack_util.py
@@ -42,5 +42,5 @@ def deliver_slack_msg(
             channels=slack_channel, file=file, initial_comment=body, 
title=subject
         ),
     )
-    logger.info(f"Sent the report to the slack {slack_channel}")
+    logger.info("Sent the report to the slack %s", slack_channel)
     assert response["file"], str(response)  # the uploaded file
diff --git a/superset/utils/dashboard_filter_scopes_converter.py 
b/superset/utils/dashboard_filter_scopes_converter.py
index d95582d..db897fa 100644
--- a/superset/utils/dashboard_filter_scopes_converter.py
+++ b/superset/utils/dashboard_filter_scopes_converter.py
@@ -49,7 +49,7 @@ def convert_filter_scopes(
                 "immune": current_filter_immune,
             }
         else:
-            logging.info(f"slice [{filter_id}] has invalid field: 
{filter_field}")
+            logging.info("slice [%i] has invalid field: %s", filter_id, 
filter_field)
 
     for filter_slice in filters:
         filter_fields: Dict[str, Dict[str, Any]] = {}
diff --git a/superset/utils/log.py b/superset/utils/log.py
index b31abce..382d780 100644
--- a/superset/utils/log.py
+++ b/superset/utils/log.py
@@ -134,7 +134,7 @@ def get_event_logger_from_cfg_value(cfg_value: Any) -> 
AbstractEventLogger:
             "of superset.utils.log.AbstractEventLogger."
         )
 
-    logging.info(f"Configured event logger of type {type(result)}")
+    logging.info("Configured event logger of type %s", type(result))
     return cast(AbstractEventLogger, result)
 
 
diff --git a/superset/utils/screenshots.py b/superset/utils/screenshots.py
index b2f222f..3db41b1 100644
--- a/superset/utils/screenshots.py
+++ b/superset/utils/screenshots.py
@@ -155,14 +155,14 @@ class AuthWebDriverProxy:
         driver.set_window_size(*self._window)
         driver.get(url)
         img: Optional[bytes] = None
-        logger.debug(f"Sleeping for {SELENIUM_HEADSTART} seconds")
+        logger.debug("Sleeping for %i seconds", SELENIUM_HEADSTART)
         time.sleep(SELENIUM_HEADSTART)
         try:
-            logger.debug(f"Wait for the presence of {element_name}")
+            logger.debug("Wait for the presence of %s", element_name)
             element = WebDriverWait(driver, 10).until(
                 EC.presence_of_element_located((By.CLASS_NAME, element_name))
             )
-            logger.debug(f"Wait for .loading to be done")
+            logger.debug("Wait for .loading to be done")
             WebDriverWait(driver, 60).until_not(
                 EC.presence_of_all_elements_located((By.CLASS_NAME, "loading"))
             )
@@ -226,7 +226,7 @@ class BaseScreenshot:
                 user=user, thumb_size=thumb_size, cache=cache
             )
         else:
-            logger.info(f"Loaded thumbnail from cache: {self.cache_key}")
+            logger.info("Loaded thumbnail from cache: %s", self.cache_key)
         if payload:
             return BytesIO(payload)
         return None
@@ -259,7 +259,7 @@ class BaseScreenshot:
             logger.info("Thumb already cached, skipping...")
             return None
         thumb_size = thumb_size or self.thumb_size
-        logger.info(f"Processing url for thumbnail: {cache_key}")
+        logger.info("Processing url for thumbnail: %s", cache_key)
 
         payload = None
 
@@ -277,7 +277,7 @@ class BaseScreenshot:
                 payload = None
 
         if payload and cache:
-            logger.info(f"Caching thumbnail: {cache_key} {cache}")
+            logger.info("Caching thumbnail: %s %s", cache_key, str(cache))
             cache.set(cache_key, payload)
         return payload
 
@@ -291,13 +291,13 @@ class BaseScreenshot:
     ) -> bytes:
         thumb_size = thumb_size or cls.thumb_size
         img = Image.open(BytesIO(img_bytes))
-        logger.debug(f"Selenium image size: {img.size}")
+        logger.debug("Selenium image size: %s", str(img.size))
         if crop and img.size[1] != cls.window_size[1]:
             desired_ratio = float(cls.window_size[1]) / cls.window_size[0]
             desired_width = int(img.size[0] * desired_ratio)
-            logger.debug(f"Cropping to: {img.size[0]}*{desired_width}")
+            logger.debug("Cropping to: %s*%s", str(img.size[0]), 
str(desired_width))
             img = img.crop((0, 0, img.size[0], desired_width))
-        logger.debug(f"Resizing to {thumb_size}")
+        logger.debug("Resizing to %s", str(thumb_size))
         img = img.resize(thumb_size, Image.ANTIALIAS)
         new_img = BytesIO()
         if output != "png":
diff --git a/superset/views/annotations.py b/superset/views/annotations.py
index a8c26c7..03473bd 100644
--- a/superset/views/annotations.py
+++ b/superset/views/annotations.py
@@ -23,8 +23,7 @@ from wtforms.validators import StopValidation
 
 from superset.constants import RouteMethod
 from superset.models.annotations import Annotation, AnnotationLayer
-
-from .base import SupersetModelView
+from superset.views.base import SupersetModelView
 
 
 class StartEndDttmValidator:  # pylint: disable=too-few-public-methods
@@ -35,7 +34,7 @@ class StartEndDttmValidator:  # pylint: 
disable=too-few-public-methods
     def __call__(self, form: Dict[str, Any], field: Any) -> None:
         if not form["start_dttm"].data and not form["end_dttm"].data:
             raise StopValidation(_("annotation start time or end time is 
required."))
-        elif (
+        if (
             form["end_dttm"].data
             and form["start_dttm"].data
             and form["end_dttm"].data < form["start_dttm"].data
diff --git a/superset/views/base.py b/superset/views/base.py
index b95be11..6747f44 100644
--- a/superset/views/base.py
+++ b/superset/views/base.py
@@ -212,7 +212,7 @@ def validate_sqlatable(table: models.SqlaTable) -> None:
     try:
         table.get_sqla_table_object()
     except Exception as ex:
-        logger.exception(f"Got an error in pre_add for {table.name}")
+        logger.exception("Got an error in pre_add for %s", table.name)
         raise Exception(
             _(
                 "Table [%{table}s] could not be found, "
@@ -498,8 +498,7 @@ def check_ownership(obj: Any, raise_if_false: bool = True) 
-> bool:
         return True
     if raise_if_false:
         raise security_exception
-    else:
-        return False
+    return False
 
 
 def bind_field(
diff --git a/superset/views/dashboard/views.py 
b/superset/views/dashboard/views.py
index 2d68cbc..a446bf3 100644
--- a/superset/views/dashboard/views.py
+++ b/superset/views/dashboard/views.py
@@ -29,15 +29,14 @@ from superset import app, db, event_logger
 from superset.constants import RouteMethod
 from superset.typing import FlaskResponse
 from superset.utils import core as utils
-
-from ..base import (
+from superset.views.base import (
     BaseSupersetView,
     check_ownership,
     DeleteMixin,
     generate_download_headers,
     SupersetModelView,
 )
-from .mixin import DashboardMixin
+from superset.views.dashboard.mixin import DashboardMixin
 
 
 class DashboardModelView(
@@ -95,7 +94,7 @@ class DashboardModelView(
             item.owners.append(g.user)
         utils.validate_json(item.json_metadata)
         utils.validate_json(item.position_json)
-        owners = [o for o in item.owners]
+        owners = list(item.owners)
         for slc in item.slices:
             slc.owners = list(set(owners) | set(slc.owners))
 
diff --git a/superset/views/database/decorators.py 
b/superset/views/database/decorators.py
index 071b661..2cea2f9 100644
--- a/superset/views/database/decorators.py
+++ b/superset/views/database/decorators.py
@@ -51,14 +51,16 @@ def check_datasource_access(f: Callable[..., Any]) -> 
Callable[..., Any]:
             )
             return self.response_404()
         if not self.appbuilder.sm.can_access_table(
-            database, Table(table_name_parsed, schema_name_parsed),
+            database, Table(table_name_parsed, schema_name_parsed)
         ):
             self.stats_logger.incr(
                 f"permisssion_denied_{self.__class__.__name__}.select_star"
             )
             logger.warning(
-                f"Permission denied for user {g.user} on table: 
{table_name_parsed} "
-                f"schema: {schema_name_parsed}"
+                "Permission denied for user %s on table: %s schema: %s",
+                g.user,
+                table_name_parsed,
+                schema_name_parsed,
             )
             return self.response_404()
         return f(self, database, table_name_parsed, schema_name_parsed)
diff --git a/superset/views/utils.py b/superset/views/utils.py
index 73c3a54..6248865 100644
--- a/superset/views/utils.py
+++ b/superset/views/utils.py
@@ -460,7 +460,7 @@ def check_slice_perms(_self: Any, slice_id: int) -> None:
 def _deserialize_results_payload(
     payload: Union[bytes, str], query: Query, use_msgpack: Optional[bool] = 
False
 ) -> Dict[str, Any]:
-    logger.debug(f"Deserializing from msgpack: {use_msgpack}")
+    logger.debug("Deserializing from msgpack: %r", use_msgpack)
     if use_msgpack:
         with stats_timing(
             "sqllab.query.results_backend_msgpack_deserialize", stats_logger
@@ -482,11 +482,9 @@ def _deserialize_results_payload(
         )
 
         return ds_payload
-    else:
-        with stats_timing(
-            "sqllab.query.results_backend_json_deserialize", stats_logger
-        ):
-            return json.loads(payload)
+
+    with stats_timing("sqllab.query.results_backend_json_deserialize", 
stats_logger):
+        return json.loads(payload)
 
 
 def get_cta_schema_name(

Reply via email to