This is an automated email from the ASF dual-hosted git repository.

maximebeauchemin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git


The following commit(s) were added to refs/heads/master by this push:
     new 088ecdd0bf refactor: Enable G logging rules and comprehensive ruff 
improvements (#35081)
088ecdd0bf is described below

commit 088ecdd0bffaa426e4a7852c2edc25b4231709bb
Author: Maxime Beauchemin <maximebeauche...@gmail.com>
AuthorDate: Mon Sep 15 12:42:49 2025 -0700

    refactor: Enable G logging rules and comprehensive ruff improvements 
(#35081)
    
    Co-authored-by: Claude <nore...@anthropic.com>
---
 docker/pythonpath_dev/superset_config.py           |   2 +-
 pyproject.toml                                     |   2 +
 superset-extensions-cli/tests/test_cli_validate.py |   4 +-
 superset/app.py                                    |   4 +-
 superset/cli/examples.py                           |   2 +-
 superset/commands/database/uploaders/csv_reader.py |   6 +-
 superset/commands/theme/seed.py                    |   9 +-
 superset/commands/theme/set_system_theme.py        |   4 +-
 superset/daos/datasource.py                        |   2 +-
 superset/daos/theme.py                             |   9 +-
 superset/examples/bart_lines.py                    |   2 +-
 superset/examples/birth_names.py                   |   7 +-
 superset/examples/multiformat_time_series.py       |   4 +-
 superset/examples/paris.py                         |   2 +-
 superset/examples/random_time_series.py            |   2 +-
 superset/examples/sf_population_polygons.py        |   2 +-
 superset/extensions/discovery.py                   |  15 ++-
 superset/extensions/local_extensions_watcher.py    |  15 +--
 superset/extensions/utils.py                       |  25 ++--
 superset/migrations/shared/catalogs.py             |  20 +++-
 superset/migrations/shared/migrate_viz/base.py     |  10 +-
 superset/migrations/shared/security_converge.py    |  16 +--
 superset/migrations/shared/utils.py                | 133 +++++++++++++++++----
 ...2018-12-11_22-03_fb13d49b72f9_better_filters.py |   4 +-
 ..._14-13_3325d4caccc8_dashboard_scoped_filters.py |   6 +-
 ...978245563a02_migrate_iframe_to_dash_markdown.py |   2 +-
 ...5b9441_rename_big_viz_total_form_data_fields.py |  10 +-
 ...d1d2_move_pivot_table_v2_legacy_order_by_to_.py |  10 +-
 ...delete_obsolete_druid_nosql_slice_parameters.py |   4 +-
 ...6f8b1280_cleanup_erroneous_parent_filter_ids.py |   2 +-
 ...9_17-54_ee179a490af9_deckgl_path_width_units.py |   2 +-
 ...9123a_update_charts_with_old_time_comparison.py |  10 +-
 ...2_convert_metric_currencies_from_str_to_json.py |   4 +-
 superset/models/helpers.py                         |   2 +-
 superset/stats_logger.py                           |  16 +--
 superset/tasks/cache.py                            |   6 +-
 superset/themes/api.py                             |  16 +--
 superset/utils/screenshot_utils.py                 |  19 +--
 superset/utils/webdriver.py                        |  12 +-
 superset/views/base.py                             |   9 +-
 tests/common/logger_utils.py                       |  20 +++-
 tests/integration_tests/core_tests.py              |   2 +-
 .../dashboards/superset_factory_util.py            |  16 +--
 tests/unit_tests/daos/test_theme_dao.py            |  16 ++-
 tests/unit_tests/migrations/shared/utils_test.py   |  10 +-
 tests/unit_tests/utils/test_screenshot_utils.py    |  17 +--
 tests/unit_tests/utils/webdriver_test.py           |  12 +-
 47 files changed, 340 insertions(+), 184 deletions(-)

diff --git a/docker/pythonpath_dev/superset_config.py 
b/docker/pythonpath_dev/superset_config.py
index 6d80d254a6..d88d9899c2 100644
--- a/docker/pythonpath_dev/superset_config.py
+++ b/docker/pythonpath_dev/superset_config.py
@@ -138,7 +138,7 @@ try:
     from superset_config_docker import *  # noqa: F403
 
     logger.info(
-        f"Loaded your Docker configuration at 
[{superset_config_docker.__file__}]"
+        "Loaded your Docker configuration at [%s]", 
superset_config_docker.__file__
     )
 except ImportError:
     logger.info("Using default Docker config...")
diff --git a/pyproject.toml b/pyproject.toml
index 9d83616441..1451c6dadf 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -313,6 +313,7 @@ select = [
     "E",
     "F",
     "F",
+    "G",
     "I",
     "N",
     "PT",
@@ -328,6 +329,7 @@ ignore = [
     "PT006",
     "T201",
     "N999",
+    "G201",
 ]
 extend-select = ["I"]
 
diff --git a/superset-extensions-cli/tests/test_cli_validate.py 
b/superset-extensions-cli/tests/test_cli_validate.py
index d38e936f5a..e3f7e6a139 100644
--- a/superset-extensions-cli/tests/test_cli_validate.py
+++ b/superset-extensions-cli/tests/test_cli_validate.py
@@ -160,7 +160,9 @@ def 
test_validate_npm_handles_file_not_found_exception(mock_run, mock_which):
 def test_validate_npm_does_not_catch_other_subprocess_exceptions(
     mock_run, mock_which, exception_type
 ):
-    """Test validate_npm does not catch OSError and PermissionError (they 
propagate up)."""
+    """
+    Test validate_npm does not catch OSError and PermissionError (they 
propagate up).
+    """
     mock_which.return_value = "/usr/bin/npm"
     mock_run.side_effect = exception_type("Test error")
 
diff --git a/superset/app.py b/superset/app.py
index a0517b773c..54f1b79bae 100644
--- a/superset/app.py
+++ b/superset/app.py
@@ -100,8 +100,8 @@ class SupersetApp(Flask):
                 return super().send_static_file(filename)
             except NotFound:
                 logger.debug(
-                    "Webpack hot-update file not found (likely HMR "
-                    f"race condition): {filename}"
+                    "Webpack hot-update file not found (likely HMR race 
condition): %s",
+                    filename,
                 )
                 return Response("", status=204)  # No Content
         return super().send_static_file(filename)
diff --git a/superset/cli/examples.py b/superset/cli/examples.py
index 9cd81a5be7..4c2d93b9d2 100755
--- a/superset/cli/examples.py
+++ b/superset/cli/examples.py
@@ -35,7 +35,7 @@ def load_examples_run(
         logger.info("Loading examples metadata")
     else:
         examples_db = database_utils.get_example_database()
-        logger.info(f"Loading examples metadata and related data into 
{examples_db}")
+        logger.info("Loading examples metadata and related data into %s", 
examples_db)
 
     # pylint: disable=import-outside-toplevel
     import superset.examples.data_loading as examples
diff --git a/superset/commands/database/uploaders/csv_reader.py 
b/superset/commands/database/uploaders/csv_reader.py
index 146de7175c..6184bade41 100644
--- a/superset/commands/database/uploaders/csv_reader.py
+++ b/superset/commands/database/uploaders/csv_reader.py
@@ -123,7 +123,7 @@ class CSVReader(BaseDataReader):
         except Exception as ex:
             # Any other error, fall back to c engine
             logger.warning(
-                f"Error selecting CSV engine: {ex}, falling back to 'c' engine"
+                "Error selecting CSV engine: %s, falling back to 'c' engine", 
ex
             )
             return "c"
 
@@ -208,8 +208,8 @@ class CSVReader(BaseDataReader):
             invalid_value = df.loc[idx, column]
             line_number = idx + kwargs.get("header", 0) + 2
             error_details.append(
-                f"  • Line {line_number}: '{invalid_value}' cannot be 
converted to "
-                f"{dtype}"
+                "  • Line %s: '%s' cannot be converted to %s"
+                % (line_number, invalid_value, dtype)
             )
 
         return error_details, total_errors
diff --git a/superset/commands/theme/seed.py b/superset/commands/theme/seed.py
index 3aeeb0e414..a5600ca9ce 100644
--- a/superset/commands/theme/seed.py
+++ b/superset/commands/theme/seed.py
@@ -59,9 +59,8 @@ class SeedSystemThemesCommand(BaseCommand):
                     theme_config = json.loads(referenced_theme.json_data)
                     # Add a note about the theme being copied from UUID 
reference
                     theme_config["NOTE"] = (
-                        f"Copied at startup from theme UUID {original_uuid} "
-                        f"based on config reference"
-                    )
+                        "Copied at startup from theme UUID %s based on config 
reference"
+                    ) % original_uuid
                     logger.debug(
                         "Copied theme definition from UUID %s for system theme 
%s",
                         original_uuid,
@@ -93,7 +92,7 @@ class SeedSystemThemesCommand(BaseCommand):
 
         if existing_theme:
             existing_theme.json_data = json_data
-            logger.debug(f"Updated system theme: {theme_name}")
+            logger.debug("Updated system theme: %s", theme_name)
         else:
             new_theme = Theme(
                 theme_name=theme_name,
@@ -101,7 +100,7 @@ class SeedSystemThemesCommand(BaseCommand):
                 is_system=True,
             )
             db.session.add(new_theme)
-            logger.debug(f"Created system theme: {theme_name}")
+            logger.debug("Created system theme: %s", theme_name)
 
     def validate(self) -> None:
         """Validate that the command can be executed."""
diff --git a/superset/commands/theme/set_system_theme.py 
b/superset/commands/theme/set_system_theme.py
index 23001868d1..2dc630ac7e 100644
--- a/superset/commands/theme/set_system_theme.py
+++ b/superset/commands/theme/set_system_theme.py
@@ -51,7 +51,7 @@ class SetSystemDefaultThemeCommand(BaseCommand):
         self._theme.is_system_default = True
         db.session.add(self._theme)
 
-        logger.info(f"Set theme {self._theme_id} as system default")
+        logger.info("Set theme %s as system default", self._theme_id)
 
         return self._theme
 
@@ -82,7 +82,7 @@ class SetSystemDarkThemeCommand(BaseCommand):
         self._theme.is_system_dark = True
         db.session.add(self._theme)
 
-        logger.info(f"Set theme {self._theme_id} as system dark")
+        logger.info("Set theme %s as system dark", self._theme_id)
 
         return self._theme
 
diff --git a/superset/daos/datasource.py b/superset/daos/datasource.py
index c754fd8e1f..308785f625 100644
--- a/superset/daos/datasource.py
+++ b/superset/daos/datasource.py
@@ -61,7 +61,7 @@ class DatasourceDAO(BaseDAO[Datasource]):
                 filter = model.uuid == database_id_or_uuid
             except ValueError as err:
                 logger.warning(
-                    f"database_id_or_uuid {database_id_or_uuid} isn't valid 
uuid"
+                    "database_id_or_uuid %s isn't valid uuid", 
database_id_or_uuid
                 )
                 raise DatasourceValueIsIncorrect() from err
 
diff --git a/superset/daos/theme.py b/superset/daos/theme.py
index 024d698ff0..839b9f74f3 100644
--- a/superset/daos/theme.py
+++ b/superset/daos/theme.py
@@ -47,8 +47,9 @@ class ThemeDAO(BaseDAO[Theme]):
 
         if len(system_defaults) > 1:
             logger.warning(
-                f"Multiple system default themes found 
({len(system_defaults)}), "
-                "falling back to config theme"
+                "Multiple system default themes found (%s), "
+                "falling back to config theme",
+                len(system_defaults),
             )
 
         # Fallback to is_system=True theme with name 'THEME_DEFAULT'
@@ -75,8 +76,8 @@ class ThemeDAO(BaseDAO[Theme]):
 
         if len(system_darks) > 1:
             logger.warning(
-                f"Multiple system dark themes found ({len(system_darks)}), "
-                "falling back to config theme"
+                "Multiple system dark themes found (%s), falling back to 
config theme",
+                len(system_darks),
             )
 
         # Fallback to is_system=True theme with name 'THEME_DARK'
diff --git a/superset/examples/bart_lines.py b/superset/examples/bart_lines.py
index 4b741fdeed..9af12125db 100644
--- a/superset/examples/bart_lines.py
+++ b/superset/examples/bart_lines.py
@@ -59,7 +59,7 @@ def load_bart_lines(only_metadata: bool = False, force: bool 
= False) -> None:
                 index=False,
             )
 
-    logger.debug(f"Creating table {tbl_name} reference")
+    logger.debug("Creating table %s reference", tbl_name)
     table = get_table_connector_registry()
     tbl = db.session.query(table).filter_by(table_name=tbl_name).first()
     if not tbl:
diff --git a/superset/examples/birth_names.py b/superset/examples/birth_names.py
index 79210493f7..0f4a9fdaa2 100644
--- a/superset/examples/birth_names.py
+++ b/superset/examples/birth_names.py
@@ -108,7 +108,7 @@ def load_birth_names(
     table = get_table_connector_registry()
     obj = db.session.query(table).filter_by(table_name=tbl_name, 
schema=schema).first()
     if not obj:
-        logger.debug(f"Creating table [{tbl_name}] reference")
+        logger.debug("Creating table [%s] reference", tbl_name)
         obj = table(table_name=tbl_name, schema=schema)
         db.session.add(obj)
 
@@ -138,13 +138,14 @@ def _add_table_metrics(datasource: SqlaTable) -> None:
         columns.append(
             TableColumn(
                 column_name="num_california",
-                expression=f"CASE WHEN {col_state} = 'CA' THEN {col_num} ELSE 
0 END",
+                expression="CASE WHEN %s = 'CA' THEN %s ELSE 0 END"
+                % (col_state, col_num),
             )
         )
 
     if not any(col.metric_name == "sum__num" for col in metrics):
         col = str(column("num").compile(db.engine))
-        metrics.append(SqlMetric(metric_name="sum__num", 
expression=f"SUM({col})"))
+        metrics.append(SqlMetric(metric_name="sum__num", expression="SUM(%s)" 
% col))
 
     for col in columns:
         if col.column_name == "ds":  # type: ignore
diff --git a/superset/examples/multiformat_time_series.py 
b/superset/examples/multiformat_time_series.py
index 3e6130903f..1938beb5f4 100644
--- a/superset/examples/multiformat_time_series.py
+++ b/superset/examples/multiformat_time_series.py
@@ -84,7 +84,7 @@ def load_multiformat_time_series(  # pylint: 
disable=too-many-locals
         logger.debug("Done loading table!")
         logger.debug("-" * 80)
 
-    logger.debug(f"Creating table [{tbl_name}] reference")
+    logger.debug("Creating table [%s] reference", tbl_name)
     table = get_table_connector_registry()
     obj = db.session.query(table).filter_by(table_name=tbl_name).first()
     if not obj:
@@ -125,7 +125,7 @@ def load_multiformat_time_series(  # pylint: 
disable=too-many-locals
         }
 
         slc = Slice(
-            slice_name=f"Calendar Heatmap multiformat {i}",
+            slice_name="Calendar Heatmap multiformat %s" % i,
             viz_type="cal_heatmap",
             datasource_type=DatasourceType.TABLE,
             datasource_id=tbl.id,
diff --git a/superset/examples/paris.py b/superset/examples/paris.py
index 1b5f40e792..9fadf0b019 100644
--- a/superset/examples/paris.py
+++ b/superset/examples/paris.py
@@ -55,7 +55,7 @@ def load_paris_iris_geojson(only_metadata: bool = False, 
force: bool = False) ->
                 index=False,
             )
 
-    logger.debug(f"Creating table {tbl_name} reference")
+    logger.debug("Creating table %s reference", tbl_name)
     table = get_table_connector_registry()
     tbl = db.session.query(table).filter_by(table_name=tbl_name).first()
     if not tbl:
diff --git a/superset/examples/random_time_series.py 
b/superset/examples/random_time_series.py
index 395eb99c2c..c77ef0ed8c 100644
--- a/superset/examples/random_time_series.py
+++ b/superset/examples/random_time_series.py
@@ -68,7 +68,7 @@ def load_random_time_series_data(
         logger.debug("Done loading table!")
         logger.debug("-" * 80)
 
-    logger.debug(f"Creating table [{tbl_name}] reference")
+    logger.debug("Creating table [%s] reference", tbl_name)
     table = get_table_connector_registry()
     obj = db.session.query(table).filter_by(table_name=tbl_name).first()
     if not obj:
diff --git a/superset/examples/sf_population_polygons.py 
b/superset/examples/sf_population_polygons.py
index f1612e2c1d..de4c391e79 100644
--- a/superset/examples/sf_population_polygons.py
+++ b/superset/examples/sf_population_polygons.py
@@ -59,7 +59,7 @@ def load_sf_population_polygons(
                 index=False,
             )
 
-    logger.debug(f"Creating table {tbl_name} reference")
+    logger.debug("Creating table %s reference", tbl_name)
     table = get_table_connector_registry()
     tbl = db.session.query(table).filter_by(table_name=tbl_name).first()
     if not tbl:
diff --git a/superset/extensions/discovery.py b/superset/extensions/discovery.py
index f1b0094864..7a4d58c927 100644
--- a/superset/extensions/discovery.py
+++ b/superset/extensions/discovery.py
@@ -40,7 +40,9 @@ def discover_and_load_extensions(
         LoadedExtension instances for each valid .supx file found
     """
     if not extensions_path or not os.path.exists(extensions_path):
-        logger.warning(f"Extensions path does not exist or is empty: 
{extensions_path}")
+        logger.warning(
+            "Extensions path does not exist or is empty: %s", extensions_path
+        )
         return
 
     extensions_dir = Path(extensions_path)
@@ -50,7 +52,8 @@ def discover_and_load_extensions(
         for supx_file in extensions_dir.glob("*.supx"):
             if not is_zipfile(supx_file):
                 logger.warning(
-                    f"File has .supx extension but is not a valid zip file: 
{supx_file}"
+                    "File has .supx extension but is not a valid zip file: %s",
+                    supx_file,
                 )
                 continue
 
@@ -59,11 +62,13 @@ def discover_and_load_extensions(
                     files = get_bundle_files_from_zip(zip_file)
                     extension = get_loaded_extension(files)
                     extension_id = extension.manifest["id"]
-                    logger.info(f"Loaded extension '{extension_id}' from 
{supx_file}")
+                    logger.info(
+                        "Loaded extension '%s' from %s", extension_id, 
supx_file
+                    )
                     yield extension
             except Exception as e:
-                logger.error(f"Failed to load extension from {supx_file}: {e}")
+                logger.error("Failed to load extension from %s: %s", 
supx_file, e)
                 continue
 
     except Exception as e:
-        logger.error(f"Error discovering extensions in {extensions_path}: {e}")
+        logger.error("Error discovering extensions in %s: %s", 
extensions_path, e)
diff --git a/superset/extensions/local_extensions_watcher.py 
b/superset/extensions/local_extensions_watcher.py
index ae060a69cb..4616e4ff13 100644
--- a/superset/extensions/local_extensions_watcher.py
+++ b/superset/extensions/local_extensions_watcher.py
@@ -40,11 +40,11 @@ class LocalExtensionFileHandler(FileSystemEventHandler):
         if event.is_directory:
             return
 
-        logger.info(f"File change detected in LOCAL_EXTENSIONS: 
{event.src_path}")
+        logger.info("File change detected in LOCAL_EXTENSIONS: %s", 
event.src_path)
 
         # Touch superset/__init__.py to trigger Flask's file watcher
         superset_init = Path("superset/__init__.py")
-        logger.info(f"Triggering restart by touching {superset_init}")
+        logger.info("Triggering restart by touching %s", superset_init)
         os.utime(superset_init, (time.time(), time.time()))
 
 
@@ -70,12 +70,12 @@ def setup_local_extensions_watcher(app: Flask) -> None:  # 
noqa: C901
 
         ext_path = Path(ext_path).resolve()
         if not ext_path.exists():
-            logger.warning(f"LOCAL_EXTENSIONS path does not exist: {ext_path}")
+            logger.warning("LOCAL_EXTENSIONS path does not exist: %s", 
ext_path)
             continue
 
         dist_path = ext_path / "dist"
         watch_dirs.append(str(dist_path))
-        logger.info(f"Watching LOCAL_EXTENSIONS dist directory: {dist_path}")
+        logger.info("Watching LOCAL_EXTENSIONS dist directory: %s", dist_path)
 
     if not watch_dirs:
         return
@@ -89,18 +89,19 @@ def setup_local_extensions_watcher(app: Flask) -> None:  # 
noqa: C901
             try:
                 observer.schedule(event_handler, watch_dir, recursive=True)
             except Exception as e:
-                logger.warning(f"Failed to watch directory {watch_dir}: {e}")
+                logger.warning("Failed to watch directory %s: %s", watch_dir, 
e)
                 continue
 
         observer.daemon = True
         observer.start()
 
         logger.info(
-            f"LOCAL_EXTENSIONS file watcher started for {len(watch_dirs)} 
directories"  # noqa: E501
+            "LOCAL_EXTENSIONS file watcher started for %s directories",  # 
noqa: E501
+            len(watch_dirs),
         )
 
     except Exception as e:
-        logger.error(f"Failed to start LOCAL_EXTENSIONS file watcher: {e}")
+        logger.error("Failed to start LOCAL_EXTENSIONS file watcher: %s", e)
 
 
 def start_local_extensions_watcher_thread(app: Flask) -> None:
diff --git a/superset/extensions/utils.py b/superset/extensions/utils.py
index 8f058e0b96..e288287d22 100644
--- a/superset/extensions/utils.py
+++ b/superset/extensions/utils.py
@@ -109,7 +109,7 @@ def get_bundle_files_from_path(base_path: str) -> 
Generator[BundleFile, None, No
     dist_path = os.path.join(base_path, "dist")
 
     if not os.path.isdir(dist_path):
-        raise Exception(f"Expected directory {dist_path} does not exist.")
+        raise Exception("Expected directory %s does not exist." % dist_path)
 
     for root, _, files in os.walk(dist_path):
         for file in files:
@@ -137,7 +137,7 @@ def get_loaded_extension(files: Iterable[BundleFile]) -> 
LoadedExtension:
                 if "name" not in manifest:
                     raise Exception("Missing 'name' in manifest")
             except Exception as e:
-                raise Exception(f"Invalid manifest.json: {e}") from e
+                raise Exception("Invalid manifest.json: %s" % e) from e
 
         elif (match := FRONTEND_REGEX.match(filename)) is not None:
             frontend[match.group(1)] = content
@@ -146,7 +146,7 @@ def get_loaded_extension(files: Iterable[BundleFile]) -> 
LoadedExtension:
             backend[match.group(1)] = content
 
         else:
-            raise Exception(f"Unexpected file in bundle: {filename}")
+            raise Exception("Unexpected file in bundle: %s" % filename)
 
     id_ = manifest["id"]
     name = manifest["name"]
@@ -176,7 +176,8 @@ def build_extension_data(extension: LoadedExtension) -> 
dict[str, Any]:
         remote_entry = frontend["remoteEntry"]
         extension_data.update(
             {
-                "remoteEntry": 
f"/api/v1/extensions/{manifest['id']}/{remote_entry}",  # noqa: E501
+                "remoteEntry": "/api/v1/extensions/%s/%s"
+                % (manifest["id"], remote_entry),  # noqa: E501
                 "exposedModules": module_federation.get("exposes", []),
                 "contributions": frontend.get("contributions", {}),
             }
@@ -194,8 +195,9 @@ def get_extensions() -> dict[str, LoadedExtension]:
         extension_id = extension.manifest["id"]
         extensions[extension_id] = extension
         logger.info(
-            f"Loading extension {extension.name} (ID: {extension_id}) "
-            "from local filesystem"
+            "Loading extension %s (ID: %s) from local filesystem",
+            extension.name,
+            extension_id,
         )
 
     # Load extensions from discovery path (.supx files)
@@ -207,13 +209,16 @@ def get_extensions() -> dict[str, LoadedExtension]:
             if extension_id not in extensions:  # Don't override 
LOCAL_EXTENSIONS
                 extensions[extension_id] = extension
                 logger.info(
-                    f"Loading extension {extension.name} (ID: {extension_id}) "
-                    "from discovery path"
+                    "Loading extension %s (ID: %s) from discovery path",
+                    extension.name,
+                    extension_id,
                 )
             else:
                 logger.info(
-                    f"Extension {extension.name} (ID: {extension_id}) already "
-                    "loaded from LOCAL_EXTENSIONS, skipping discovery version"
+                    "Extension %s (ID: %s) already loaded from 
LOCAL_EXTENSIONS, "
+                    "skipping discovery version",
+                    extension.name,
+                    extension_id,
                 )
 
     return extensions
diff --git a/superset/migrations/shared/catalogs.py 
b/superset/migrations/shared/catalogs.py
index 56a1b90a63..616322d35d 100644
--- a/superset/migrations/shared/catalogs.py
+++ b/superset/migrations/shared/catalogs.py
@@ -154,8 +154,12 @@ def print_processed_batch(
     elapsed_formatted = f"{int(elapsed_seconds // 
3600):02}:{int((elapsed_seconds % 3600) // 60):02}:{int(elapsed_seconds % 
60):02}"  # noqa: E501
     rows_processed = min(offset + batch_size, total_rows)
     logger.info(
-        f"{elapsed_formatted} - {rows_processed:,} of {total_rows:,} 
{model.__tablename__} rows processed "  # noqa: E501
-        f"({(rows_processed / total_rows) * 100:.2f}%)"
+        "%s - %s of %s %s rows processed (%s%%)",
+        elapsed_formatted,
+        f"{rows_processed:,}",
+        f"{total_rows:,}",
+        model.__tablename__,
+        f"{(rows_processed / total_rows) * 100:.2f}",
     )
 
 
@@ -180,7 +184,7 @@ def update_catalog_column(
     """
     start_time = datetime.now()
 
-    logger.info(f"Updating {database.database_name} models to catalog 
{catalog}")
+    logger.info("Updating %s models to catalog %s", database.database_name, 
catalog)
 
     for model, column in MODELS:
         # Get the total number of rows that match the condition
@@ -192,7 +196,9 @@ def update_catalog_column(
         )
 
         logger.info(
-            f"Total rows to be processed for {model.__tablename__}: 
{total_rows:,}"
+            "Total rows to be processed for %s: %s",
+            model.__tablename__,
+            f"{total_rows:,}",
         )
 
         batch_size = get_batch_size(session)
@@ -302,7 +308,7 @@ def delete_models_non_default_catalog(
     """
     start_time = datetime.now()
 
-    logger.info(f"Deleting models not in the default catalog: {catalog}")
+    logger.info("Deleting models not in the default catalog: %s", catalog)
 
     for model, column in MODELS:
         # Get the total number of rows that match the condition
@@ -314,7 +320,9 @@ def delete_models_non_default_catalog(
         )
 
         logger.info(
-            f"Total rows to be processed for {model.__tablename__}: 
{total_rows:,}"
+            "Total rows to be processed for %s: %s",
+            model.__tablename__,
+            f"{total_rows:,}",
         )
 
         batch_size = get_batch_size(session)
diff --git a/superset/migrations/shared/migrate_viz/base.py 
b/superset/migrations/shared/migrate_viz/base.py
index 4290b89ada..9dcd3d199b 100644
--- a/superset/migrations/shared/migrate_viz/base.py
+++ b/superset/migrations/shared/migrate_viz/base.py
@@ -180,7 +180,7 @@ class MigrateViz:
             slc.params = json.dumps({**clz.data, **backup})
 
         except Exception as e:
-            logger.warning(f"Failed to migrate slice {slc.id}: {e}")
+            logger.warning("Failed to migrate slice %s: %s", slc.id, e)
 
     @classmethod
     def downgrade_slice(cls, slc: Slice) -> None:
@@ -202,14 +202,14 @@ class MigrateViz:
                     slc.query_context = None
 
         except Exception as e:
-            logger.warning(f"Failed to downgrade slice {slc.id}: {e}")
+            logger.warning("Failed to downgrade slice %s: %s", slc.id, e)
 
     @classmethod
     def upgrade(cls, session: Session) -> None:
         slices = session.query(Slice).filter(Slice.viz_type == 
cls.source_viz_type)
         for slc in paginated_update(
             slices,
-            lambda current, total: logger.info(f"Upgraded {current}/{total} 
charts"),
+            lambda current, total: logger.info("Upgraded %s/%s charts", 
current, total),
         ):
             cls.upgrade_slice(slc)
 
@@ -223,7 +223,9 @@ class MigrateViz:
         )
         for slc in paginated_update(
             slices,
-            lambda current, total: logger.info(f"Downgraded {current}/{total} 
charts"),
+            lambda current, total: logger.info(
+                "Downgraded %s/%s charts", current, total
+            ),
         ):
             cls.downgrade_slice(slc)
 
diff --git a/superset/migrations/shared/security_converge.py 
b/superset/migrations/shared/security_converge.py
index dd546dd476..f5fbebab69 100644
--- a/superset/migrations/shared/security_converge.py
+++ b/superset/migrations/shared/security_converge.py
@@ -51,7 +51,7 @@ class Permission(Base):  # type: ignore
     name = Column(String(100), unique=True, nullable=False)
 
     def __repr__(self) -> str:
-        return f"{self.name}"
+        return str(self.name)
 
 
 class ViewMenu(Base):  # type: ignore
@@ -60,7 +60,7 @@ class ViewMenu(Base):  # type: ignore
     name = Column(String(250), unique=True, nullable=False)
 
     def __repr__(self) -> str:
-        return f"{self.name}"
+        return str(self.name)
 
     def __eq__(self, other: object) -> bool:
         return (isinstance(other, self.__class__)) and (self.name == 
other.name)
@@ -89,7 +89,7 @@ class Role(Base):  # type: ignore
     )
 
     def __repr__(self) -> str:
-        return f"{self.name}"
+        return str(self.name)
 
 
 class PermissionView(Base):  # type: ignore
@@ -192,7 +192,7 @@ def _delete_old_permissions(
     for old_pvm, new_pvms in pvm_map.items():  # noqa: B007
         old_permission_name = old_pvm.permission.name
         old_view_name = old_pvm.view_menu.name
-        logger.info(f"Going to delete pvm: {old_pvm}")
+        logger.info("Going to delete pvm: %s", old_pvm)
         session.delete(old_pvm)
         pvms_with_permission = (
             session.query(PermissionView)
@@ -200,7 +200,7 @@ def _delete_old_permissions(
             .filter(Permission.name == old_permission_name)
         ).first()
         if not pvms_with_permission:
-            logger.info(f"Going to delete permission: {old_pvm.permission}")
+            logger.info("Going to delete permission: %s", old_pvm.permission)
             session.delete(old_pvm.permission)
         pvms_with_view_menu = (
             session.query(PermissionView)
@@ -208,7 +208,7 @@ def _delete_old_permissions(
             .filter(ViewMenu.name == old_view_name)
         ).first()
         if not pvms_with_view_menu:
-            logger.info(f"Going to delete view_menu: {old_pvm.view_menu}")
+            logger.info("Going to delete view_menu: %s", old_pvm.view_menu)
             session.delete(old_pvm.view_menu)
 
 
@@ -237,11 +237,11 @@ def migrate_roles(  # noqa: C901
     for role in roles:
         for old_pvm, new_pvms in pvm_map.items():
             if old_pvm in role.permissions:
-                logger.info(f"Removing {old_pvm} from {role}")
+                logger.info("Removing %s from %s", old_pvm, role)
                 role.permissions.remove(old_pvm)
                 for new_pvm in new_pvms:
                     if new_pvm not in role.permissions:
-                        logger.info(f"Add {new_pvm} to {role}")
+                        logger.info("Add %s to %s", new_pvm, role)
                         role.permissions.append(new_pvm)
 
     # Delete old permissions
diff --git a/superset/migrations/shared/utils.py 
b/superset/migrations/shared/utils.py
index 09e636ec74..9adf7978bf 100644
--- a/superset/migrations/shared/utils.py
+++ b/superset/migrations/shared/utils.py
@@ -227,7 +227,13 @@ def drop_fks_for_table(
 
         for fk_name in foreign_key_names:
             logger.info(
-                f"Dropping foreign key {GREEN}{fk_name}{RESET} from table 
{GREEN}{table_name}{RESET}..."  # noqa: E501
+                "Dropping foreign key %s%s%s from table %s%s%s...",  # noqa: 
E501
+                GREEN,
+                fk_name,
+                RESET,
+                GREEN,
+                table_name,
+                RESET,
             )
             op.drop_constraint(fk_name, table_name, type_="foreignkey")
 
@@ -246,12 +252,12 @@ def create_table(table_name: str, *columns: SchemaItem, 
**kwargs: Any) -> None:
     just like when calling alembic's method create_table()
     """
     if has_table(table_name=table_name):
-        logger.info(f"Table {LRED}{table_name}{RESET} already exists. 
Skipping...")
+        logger.info("Table %s%s%s already exists. Skipping...", LRED, 
table_name, RESET)
         return
 
-    logger.info(f"Creating table {GREEN}{table_name}{RESET}...")
+    logger.info("Creating table %s%s%s...", GREEN, table_name, RESET)
     op.create_table(table_name, *columns, **kwargs)
-    logger.info(f"Table {GREEN}{table_name}{RESET} created.")
+    logger.info("Table %s%s%s created.", GREEN, table_name, RESET)
 
 
 def drop_table(table_name: str) -> None:
@@ -267,13 +273,13 @@ def drop_table(table_name: str) -> None:
     """  # noqa: E501
 
     if not has_table(table_name=table_name):
-        logger.info(f"Table {GREEN}{table_name}{RESET} doesn't exist. 
Skipping...")
+        logger.info("Table %s%s%s doesn't exist. Skipping...", GREEN, 
table_name, RESET)
         return
 
-    logger.info(f"Dropping table {GREEN}{table_name}{RESET}...")
+    logger.info("Dropping table %s%s%s...", GREEN, table_name, RESET)
     drop_fks_for_table(table_name)
     op.drop_table(table_name=table_name)
-    logger.info(f"Table {GREEN}{table_name}{RESET} dropped.")
+    logger.info("Table %s%s%s dropped.", GREEN, table_name, RESET)
 
 
 def batch_operation(
@@ -295,17 +301,31 @@ def batch_operation(
     """  # noqa: E501
     if count <= 0:
         logger.info(
-            f"No records to process in batch {LRED}(count <= 0){RESET} for 
callable {LRED}other_callable_example{RESET}. Skipping..."  # noqa: E501
+            "No records to process in batch %s(count <= 0)%s for callable 
%sother_callable_example%s. Skipping...",  # noqa: E501
+            LRED,
+            RESET,
+            LRED,
+            RESET,
         )
         return
     for offset in range(0, count, batch_size):
         percentage = (offset / count) * 100 if count else 0
-        logger.info(f"Progress: {offset:,}/{count:,} ({percentage:.2f}%)")
+        logger.info(
+            "Progress: %s/%s (%.2f%%)",
+            "{:,}".format(offset),
+            "{:,}".format(count),
+            percentage,
+        )
         callable(offset, min(offset + batch_size, count))
 
-    logger.info(f"Progress: {count:,}/{count:,} (100%)")
+    logger.info("Progress: %s/%s (100%%)", "{:,}".format(count), 
"{:,}".format(count))
     logger.info(
-        f"End: {GREEN}{callable.__name__}{RESET} batch operation 
{GREEN}successfully{RESET} executed."  # noqa: E501
+        "End: %s%s%s batch operation %ssuccessfully%s executed.",  # noqa: E501
+        GREEN,
+        callable.__name__,
+        RESET,
+        GREEN,
+        RESET,
     )
 
 
@@ -326,7 +346,13 @@ def add_columns(table_name: str, *columns: Column) -> None:
     for col in columns:
         if table_has_column(table_name=table_name, column_name=col.name):
             logger.info(
-                f"Column {LRED}{col.name}{RESET} already present on table 
{LRED}{table_name}{RESET}. Skipping..."  # noqa: E501
+                "Column %s%s%s already present on table %s%s%s. Skipping...",  
# noqa: E501
+                LRED,
+                col.name,
+                RESET,
+                LRED,
+                table_name,
+                RESET,
             )
         else:
             cols_to_add.append(col)
@@ -334,7 +360,13 @@ def add_columns(table_name: str, *columns: Column) -> None:
     with op.batch_alter_table(table_name) as batch_op:
         for col in cols_to_add:
             logger.info(
-                f"Adding column {GREEN}{col.name}{RESET} to table 
{GREEN}{table_name}{RESET}..."  # noqa: E501
+                "Adding column %s%s%s to table %s%s%s...",  # noqa: E501
+                GREEN,
+                col.name,
+                RESET,
+                GREEN,
+                table_name,
+                RESET,
             )
             batch_op.add_column(col)
 
@@ -356,7 +388,13 @@ def drop_columns(table_name: str, *columns: str) -> None:
     for col in columns:
         if not table_has_column(table_name=table_name, column_name=col):
             logger.info(
-                f"Column {LRED}{col}{RESET} is not present on table 
{LRED}{table_name}{RESET}. Skipping..."  # noqa: E501
+                "Column %s%s%s is not present on table %s%s%s. Skipping...",  
# noqa: E501
+                LRED,
+                col,
+                RESET,
+                LRED,
+                table_name,
+                RESET,
             )
         else:
             cols_to_drop.append(col)
@@ -364,7 +402,13 @@ def drop_columns(table_name: str, *columns: str) -> None:
     with op.batch_alter_table(table_name) as batch_op:
         for col in cols_to_drop:
             logger.info(
-                f"Dropping column {GREEN}{col}{RESET} from table 
{GREEN}{table_name}{RESET}..."  # noqa: E501
+                "Dropping column %s%s%s from table %s%s%s...",  # noqa: E501
+                GREEN,
+                col,
+                RESET,
+                GREEN,
+                table_name,
+                RESET,
             )
             batch_op.drop_column(col)
 
@@ -386,12 +430,24 @@ def create_index(
 
     if table_has_index(table=table_name, index=index_name):
         logger.info(
-            f"Table {LRED}{table_name}{RESET} already has index 
{LRED}{index_name}{RESET}. Skipping..."  # noqa: E501
+            "Table %s%s%s already has index %s%s%s. Skipping...",  # noqa: E501
+            LRED,
+            table_name,
+            RESET,
+            LRED,
+            index_name,
+            RESET,
         )
         return
 
     logger.info(
-        f"Creating index {GREEN}{index_name}{RESET} on table 
{GREEN}{table_name}{RESET}"
+        "Creating index %s%s%s on table %s%s%s",
+        GREEN,
+        index_name,
+        RESET,
+        GREEN,
+        table_name,
+        RESET,
     )
 
     op.create_index(
@@ -415,12 +471,24 @@ def drop_index(table_name: str, index_name: str) -> None:
 
     if not table_has_index(table=table_name, index=index_name):
         logger.info(
-            f"Table {LRED}{table_name}{RESET} doesn't have index 
{LRED}{index_name}{RESET}. Skipping..."  # noqa: E501
+            "Table %s%s%s doesn't have index %s%s%s. Skipping...",  # noqa: 
E501
+            LRED,
+            table_name,
+            RESET,
+            LRED,
+            index_name,
+            RESET,
         )
         return
 
     logger.info(
-        f"Dropping index {GREEN}{index_name}{RESET} from table 
{GREEN}{table_name}{RESET}..."  # noqa: E501
+        "Dropping index %s%s%s from table %s%s%s...",  # noqa: E501
+        GREEN,
+        index_name,
+        RESET,
+        GREEN,
+        table_name,
+        RESET,
     )
 
     op.drop_index(table_name=table_name, index_name=index_name)
@@ -448,7 +516,10 @@ def create_fks_for_table(
 
     if not has_table(table_name):
         logger.warning(
-            f"Table {LRED}{table_name}{RESET} does not exist. Skipping foreign 
key creation."  # noqa: E501
+            "Table %s%s%s does not exist. Skipping foreign key creation.",  # 
noqa: E501
+            LRED,
+            table_name,
+            RESET,
         )
         return
 
@@ -456,7 +527,13 @@ def create_fks_for_table(
         # SQLite requires batch mode since ALTER TABLE is limited
         with op.batch_alter_table(table_name) as batch_op:
             logger.info(
-                f"Creating foreign key {GREEN}{foreign_key_name}{RESET} on 
table {GREEN}{table_name}{RESET} (SQLite mode)..."  # noqa: E501
+                "Creating foreign key %s%s%s on table %s%s%s (SQLite 
mode)...",  # noqa: E501
+                GREEN,
+                foreign_key_name,
+                RESET,
+                GREEN,
+                table_name,
+                RESET,
             )
             batch_op.create_foreign_key(
                 foreign_key_name,
@@ -468,7 +545,13 @@ def create_fks_for_table(
     else:
         # Standard FK creation for other databases
         logger.info(
-            f"Creating foreign key {GREEN}{foreign_key_name}{RESET} on table 
{GREEN}{table_name}{RESET}..."  # noqa: E501
+            "Creating foreign key %s%s%s on table %s%s%s...",  # noqa: E501
+            GREEN,
+            foreign_key_name,
+            RESET,
+            GREEN,
+            table_name,
+            RESET,
         )
         op.create_foreign_key(
             foreign_key_name,
@@ -541,7 +624,11 @@ USING safe_to_jsonb({column});
             json.loads(value)
         except json.JSONDecodeError:
             logger.warning(
-                f"Invalid JSON value in column {column} for {pk}={row_pk}: 
{value}"
+                "Invalid JSON value in column %s for %s=%s: %s",
+                column,
+                pk,
+                row_pk,
+                value,
             )
             continue
         stmt_update = update(t).where(t.c[pk] == row_pk).values({tmp_column: 
value})
diff --git 
a/superset/migrations/versions/2018-12-11_22-03_fb13d49b72f9_better_filters.py 
b/superset/migrations/versions/2018-12-11_22-03_fb13d49b72f9_better_filters.py
index ad67e835b3..19be6b717d 100644
--- 
a/superset/migrations/versions/2018-12-11_22-03_fb13d49b72f9_better_filters.py
+++ 
b/superset/migrations/versions/2018-12-11_22-03_fb13d49b72f9_better_filters.py
@@ -51,7 +51,7 @@ class Slice(Base):
 
 def upgrade_slice(slc):
     params = json.loads(slc.params)
-    logger.info(f"Upgrading {slc.slice_name}")
+    logger.info("Upgrading %s", slc.slice_name)
     cols = params.get("groupby")
     metric = params.get("metric")
     if cols:
@@ -96,7 +96,7 @@ def downgrade():
     for slc in filter_box_slices.all():
         try:
             params = json.loads(slc.params)
-            logger.info(f"Downgrading {slc.slice_name}")
+            logger.info("Downgrading %s", slc.slice_name)
             flts = params.get("filter_configs")
             if not flts:
                 continue
diff --git 
a/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py
 
b/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py
index ed277b2b66..8a8e680006 100644
--- 
a/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py
+++ 
b/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py
@@ -89,7 +89,9 @@ def upgrade():
                 filter_scopes = convert_filter_scopes(json_metadata, filters)
                 json_metadata["filter_scopes"] = filter_scopes
                 logger.info(
-                    f"Adding filter_scopes for dashboard {dashboard.id}: 
{json.dumps(filter_scopes)}"  # noqa: E501
+                    "Adding filter_scopes for dashboard %s: %s",  # noqa: E501
+                    dashboard.id,
+                    json.dumps(filter_scopes),
                 )
 
             json_metadata.pop("filter_immune_slices", None)
@@ -102,7 +104,7 @@ def upgrade():
             else:
                 dashboard.json_metadata = None
         except Exception as ex:
-            logger.exception(f"dashboard {dashboard.id} has error: {ex}")
+            logger.exception("dashboard %s has error: %s", dashboard.id, ex)
 
     session.commit()
     session.close()
diff --git 
a/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py
 
b/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py
index 6e1aaa6ecb..a9353fee9d 100644
--- 
a/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py
+++ 
b/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py
@@ -188,7 +188,7 @@ def upgrade():
         )
 
     except Exception as ex:
-        logging.exception(f"dashboard {dashboard.id} has error: {ex}")
+        logging.exception("dashboard %s has error: %s", dashboard.id, ex)
 
     session.commit()
     session.close()
diff --git 
a/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py
 
b/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py
index b73bd05c42..fefda2fb1b 100644
--- 
a/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py
+++ 
b/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py
@@ -65,8 +65,9 @@ def upgrade():
             slc.params = json.dumps(params, sort_keys=True)
         except Exception:
             logger.exception(
-                f"An error occurred: parsing params for slice {slc.id} failed."
-                f"You need to fix it before upgrading your DB."
+                "An error occurred: parsing params for slice %s failed."
+                "You need to fix it before upgrading your DB.",
+                slc.id,
             )
             raise
 
@@ -91,8 +92,9 @@ def downgrade():
             slc.params = json.dumps(params, sort_keys=True)
         except Exception:
             logger.exception(
-                f"An error occurred: parsing params for slice {slc.id} failed. 
"
-                "You need to fix it before downgrading your DB."
+                "An error occurred: parsing params for slice %s failed. "
+                "You need to fix it before downgrading your DB.",
+                slc.id,
             )
             raise
 
diff --git 
a/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py
 
b/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py
index 2ce46b9068..e902cb9a19 100644
--- 
a/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py
+++ 
b/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py
@@ -63,8 +63,9 @@ def upgrade():
             slc.params = json.dumps(params, sort_keys=True)
         except Exception:
             logger.exception(
-                f"An error occurred: parsing params for slice {slc.id} failed."
-                f"You need to fix it before upgrading your DB."
+                "An error occurred: parsing params for slice %s failed."
+                "You need to fix it before upgrading your DB.",
+                slc.id,
             )
             raise
 
@@ -86,8 +87,9 @@ def downgrade():
             slc.params = json.dumps(params, sort_keys=True)
         except Exception:
             logger.exception(
-                f"An error occurred: parsing params for slice {slc.id} failed. 
"
-                "You need to fix it before downgrading your DB."
+                "An error occurred: parsing params for slice %s failed. "
+                "You need to fix it before downgrading your DB.",
+                slc.id,
             )
             raise
 
diff --git 
a/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py
 
b/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py
index cc1c31d42b..1dfab9c6b2 100644
--- 
a/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py
+++ 
b/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py
@@ -65,7 +65,7 @@ def upgrade():  # noqa: C901
                 if updated:
                     slc.params = json.dumps(params)
             except Exception:
-                logging.exception(f"Unable to parse params for slice {slc.id}")
+                logging.exception("Unable to parse params for slice %s", 
slc.id)
 
         if slc.query_context:
             updated = False
@@ -93,7 +93,7 @@ def upgrade():  # noqa: C901
                 if updated:
                     slc.query_context = json.dumps(query_context)
             except Exception:
-                logging.exception(f"Unable to parse query context for slice 
{slc.id}")
+                logging.exception("Unable to parse query context for slice 
%s", slc.id)
 
     session.commit()
     session.close()
diff --git 
a/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py
 
b/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py
index f728e20679..4bfe9831f5 100644
--- 
a/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py
+++ 
b/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py
@@ -70,7 +70,7 @@ def upgrade():
                     dashboard.json_metadata = json.dumps(json_metadata)
             except Exception:
                 logging.exception(
-                    f"Unable to parse JSON metadata for dashboard 
{dashboard.id}"
+                    "Unable to parse JSON metadata for dashboard %s", 
dashboard.id
                 )
 
     session.commit()
diff --git 
a/superset/migrations/versions/2023-07-19_17-54_ee179a490af9_deckgl_path_width_units.py
 
b/superset/migrations/versions/2023-07-19_17-54_ee179a490af9_deckgl_path_width_units.py
index 25042dd682..5672b238c0 100644
--- 
a/superset/migrations/versions/2023-07-19_17-54_ee179a490af9_deckgl_path_width_units.py
+++ 
b/superset/migrations/versions/2023-07-19_17-54_ee179a490af9_deckgl_path_width_units.py
@@ -62,7 +62,7 @@ def upgrade():
                 params["line_width_unit"] = "meters"
                 slc.params = json.dumps(params)
         except Exception:
-            logging.exception(f"Unable to parse params for slice {slc.id}")
+            logging.exception("Unable to parse params for slice %s", slc.id)
     session.commit()
     session.close()
 
diff --git 
a/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py
 
b/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py
index bc8ecc4f02..8c725679ba 100644
--- 
a/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py
+++ 
b/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py
@@ -113,8 +113,9 @@ def upgrade():
         except Exception as ex:
             session.rollback()
             logger.exception(
-                f"An error occurred: Upgrading params for slice {slc.id} 
failed."
-                f"You need to fix it before upgrading your DB."
+                "An error occurred: Upgrading params for slice %s failed."
+                "You need to fix it before upgrading your DB.",
+                slc.id,
             )
             raise Exception(f"An error occurred while upgrading slice: {ex}") 
from ex
 
@@ -213,8 +214,9 @@ def downgrade():
         except Exception as ex:
             session.rollback()
             logger.exception(
-                f"An error occurred: Downgrading params for slice {slc.id} 
failed."
-                f"You need to fix it before downgrading your DB."
+                "An error occurred: Downgrading params for slice %s failed."
+                "You need to fix it before downgrading your DB.",
+                slc.id,
             )
             raise Exception(f"An error occurred while downgrading slice: 
{ex}") from ex
 
diff --git 
a/superset/migrations/versions/2025-06-06_00-39_363a9b1e8992_convert_metric_currencies_from_str_to_json.py
 
b/superset/migrations/versions/2025-06-06_00-39_363a9b1e8992_convert_metric_currencies_from_str_to_json.py
index 1619ccd5a2..db7e942530 100644
--- 
a/superset/migrations/versions/2025-06-06_00-39_363a9b1e8992_convert_metric_currencies_from_str_to_json.py
+++ 
b/superset/migrations/versions/2025-06-06_00-39_363a9b1e8992_convert_metric_currencies_from_str_to_json.py
@@ -55,7 +55,7 @@ def upgrade():
     currency_configs = 
session.query(SqlMetric).filter(SqlMetric.currency.isnot(None))
     for metric in paginated_update(
         currency_configs,
-        lambda current, total: logger.info((f"Upgrading {current}/{total} 
metrics")),
+        lambda current, total: logger.info("Upgrading %s/%s metrics", current, 
total),
     ):
         while True:
             if isinstance(metric.currency, str):
@@ -63,7 +63,7 @@ def upgrade():
                     metric.currency = json.loads(metric.currency)
                 except Exception as e:
                     logger.error(
-                        f"Error loading metric {metric.metric_name} as json: 
{e}"
+                        "Error loading metric %s as json: %s", 
metric.metric_name, e
                     )
                     metric.currency = {}
                     break
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index 7d6c846da4..6e6ef22bde 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -572,7 +572,7 @@ class AuditMixinNullable(AuditMixin):
             humanize.i18n.deactivate()
             return result
         except Exception as e:
-            logger.warning(f"Locale '{locale}' is not supported in humanize: 
{e}")
+            logger.warning("Locale '%s' is not supported in humanize: %s", 
locale, e)
             return humanize.naturaltime(time_diff)
 
     @property
diff --git a/superset/stats_logger.py b/superset/stats_logger.py
index 5b6b86c263..d8ae2731f7 100644
--- a/superset/stats_logger.py
+++ b/superset/stats_logger.py
@@ -51,23 +51,23 @@ class BaseStatsLogger:
 
 class DummyStatsLogger(BaseStatsLogger):
     def incr(self, key: str) -> None:
-        logger.debug(Fore.CYAN + "[stats_logger] (incr) " + key + 
Style.RESET_ALL)
+        logger.debug("%s[stats_logger] (incr) %s%s", Fore.CYAN, key, 
Style.RESET_ALL)
 
     def decr(self, key: str) -> None:
-        logger.debug(Fore.CYAN + "[stats_logger] (decr) " + key + 
Style.RESET_ALL)
+        logger.debug("%s[stats_logger] (decr) %s%s", Fore.CYAN, key, 
Style.RESET_ALL)
 
     def timing(self, key: str, value: float) -> None:
         logger.debug(
-            Fore.CYAN + f"[stats_logger] (timing) {key} | {value} " + 
Style.RESET_ALL
+            "%s[stats_logger] (timing) %s | %s %s",
+            Fore.CYAN,
+            key,
+            value,
+            Style.RESET_ALL,
         )
 
     def gauge(self, key: str, value: float) -> None:
         logger.debug(
-            Fore.CYAN
-            + "[stats_logger] (gauge) "
-            + f"{key}"
-            + f"{value}"
-            + Style.RESET_ALL
+            "%s[stats_logger] (gauge) %s%s%s", Fore.CYAN, key, value, 
Style.RESET_ALL
         )
 
 
diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py
index 1c8012ff83..0f28c07070 100644
--- a/superset/tasks/cache.py
+++ b/superset/tasks/cache.py
@@ -294,7 +294,7 @@ def cache_warmup(
         if class_.name == strategy_name:  # type: ignore
             break
     else:
-        message = f"No strategy {strategy_name} found!"
+        message = "No strategy %s found!" % strategy_name
         logger.error(message, exc_info=True)
         return message
 
@@ -316,7 +316,7 @@ def cache_warmup(
                 user = security_manager.get_user_by_username(username)
                 cookies = MachineAuthProvider.get_auth_cookies(user)
                 headers = {
-                    "Cookie": f"session={cookies.get('session', '')}",
+                    "Cookie": "session=%s" % cookies.get("session", ""),
                     "Content-Type": "application/json",
                 }
                 logger.info("Scheduling %s", payload)
@@ -326,6 +326,6 @@ def cache_warmup(
                 logger.exception("Error scheduling fetch_url for payload: %s", 
payload)
                 results["errors"].append(payload)
         else:
-            logger.warn("Executor not found for %s", payload)
+            logger.warning("Executor not found for %s", payload)
 
     return results
diff --git a/superset/themes/api.py b/superset/themes/api.py
index b98d3df430..8314e4e15b 100644
--- a/superset/themes/api.py
+++ b/superset/themes/api.py
@@ -207,7 +207,7 @@ class ThemeRestApi(BaseSupersetModelRestApi):
         except SystemThemeInUseError as ex:
             return self.response_422(message=str(ex))
         except ThemeDeleteFailedError as ex:
-            logger.exception(f"Theme delete failed for ID: {pk}")
+            logger.exception("Theme delete failed for ID: %s", pk)
             return self.response_422(message=str(ex))
 
     @expose("/", methods=("DELETE",))
@@ -268,7 +268,7 @@ class ThemeRestApi(BaseSupersetModelRestApi):
         except SystemThemeInUseError as ex:
             return self.response_422(message=str(ex))
         except ThemeDeleteFailedError as ex:
-            logger.exception(f"Theme delete failed for IDs: {item_ids}")
+            logger.exception("Theme delete failed for IDs: %s", item_ids)
             return self.response_422(message=str(ex))
 
     @expose("/<int:pk>", methods=("PUT",))
@@ -326,7 +326,7 @@ class ThemeRestApi(BaseSupersetModelRestApi):
                 return self.response_400(message="Request body is required")
 
             # Log the incoming request for debugging
-            logger.debug(f"PUT request data for theme {pk}: {request.json}")
+            logger.debug("PUT request data for theme %s: %s", pk, request.json)
 
             # Filter out read-only fields that shouldn't be in the schema
             filtered_data = {
@@ -337,7 +337,9 @@ class ThemeRestApi(BaseSupersetModelRestApi):
 
             item = self.edit_model_schema.load(filtered_data)
         except ValidationError as error:
-            logger.exception(f"Validation error in PUT /theme/{pk}: 
{error.messages}")
+            logger.exception(
+                "Validation error in PUT /theme/%s: %s", pk, error.messages
+            )
             return self.response_400(message=error.messages)
 
         try:
@@ -348,7 +350,7 @@ class ThemeRestApi(BaseSupersetModelRestApi):
         except SystemThemeProtectedError:
             return self.response_403()
         except Exception as ex:
-            logger.exception(f"Unexpected error in PUT /theme/{pk}")
+            logger.exception("Unexpected error in PUT /theme/%s", pk)
             return self.response_422(message=str(ex))
 
     @expose("/", methods=("POST",))
@@ -396,10 +398,10 @@ class ThemeRestApi(BaseSupersetModelRestApi):
             if not request.json:
                 return self.response_400(message="Request body is required")
 
-            logger.debug(f"POST request data for new theme: {request.json}")
+            logger.debug("POST request data for new theme: %s", request.json)
             item = self.add_model_schema.load(request.json)
         except ValidationError as error:
-            logger.exception(f"Validation error in POST /theme: 
{error.messages}")
+            logger.exception("Validation error in POST /theme: %s", 
error.messages)
             return self.response_400(message=error.messages)
 
         try:
diff --git a/superset/utils/screenshot_utils.py 
b/superset/utils/screenshot_utils.py
index e84eb12cfa..7c996e606f 100644
--- a/superset/utils/screenshot_utils.py
+++ b/superset/utils/screenshot_utils.py
@@ -71,7 +71,7 @@ def combine_screenshot_tiles(screenshot_tiles: list[bytes]) 
-> bytes:
         return output.getvalue()
 
     except Exception as e:
-        logger.exception(f"Failed to combine screenshot tiles: {e}")
+        logger.exception("Failed to combine screenshot tiles: %s", e)
         # Return the first tile as fallback
         return screenshot_tiles[0]
 
@@ -113,13 +113,16 @@ def take_tiled_screenshot(
         dashboard_width = element_info["width"]
 
         logger.info(
-            f"Dashboard: {dashboard_width}x{dashboard_height}px at "
-            f"({dashboard_left}, {dashboard_top})"
+            "Dashboard: %sx%spx at (%s, %s)",
+            dashboard_width,
+            dashboard_height,
+            dashboard_left,
+            dashboard_top,
         )
 
         # Calculate number of tiles needed
         num_tiles = max(1, (dashboard_height + viewport_height - 1) // 
viewport_height)
-        logger.info(f"Taking {num_tiles} screenshot tiles")
+        logger.info("Taking %s screenshot tiles", num_tiles)
 
         screenshot_tiles = []
 
@@ -129,7 +132,9 @@ def take_tiled_screenshot(
 
             # Scroll the window to the desired position
             page.evaluate(f"window.scrollTo(0, {scroll_y})")
-            logger.debug(f"Scrolled window to {scroll_y} for tile {i + 
1}/{num_tiles}")
+            logger.debug(
+                "Scrolled window to %s for tile %s/%s", scroll_y, i + 1, 
num_tiles
+            )
 
             # Wait for scroll to settle and content to load
             page.wait_for_timeout(2000)  # 2 second wait per tile
@@ -163,7 +168,7 @@ def take_tiled_screenshot(
             tile_screenshot = page.screenshot(type="png", clip=clip)
             screenshot_tiles.append(tile_screenshot)
 
-            logger.debug(f"Captured tile {i + 1}/{num_tiles} with clip {clip}")
+            logger.debug("Captured tile %s/%s with clip %s", i + 1, num_tiles, 
clip)
 
         # Combine all tiles
         logger.info("Combining screenshot tiles...")
@@ -175,5 +180,5 @@ def take_tiled_screenshot(
         return combined_screenshot
 
     except Exception as e:
-        logger.exception(f"Tiled screenshot failed: {e}")
+        logger.exception("Tiled screenshot failed: %s", e)
         return None
diff --git a/superset/utils/webdriver.py b/superset/utils/webdriver.py
index c43b2c1d31..953ee2f224 100644
--- a/superset/utils/webdriver.py
+++ b/superset/utils/webdriver.py
@@ -271,10 +271,10 @@ class WebDriverPlaywright(WebDriverProxy):
 
                     if use_tiled:
                         logger.info(
-                            (
-                                f"Large dashboard detected: {chart_count} 
charts, "
-                                f"{dashboard_height}px height. Using tiled 
screenshots."
-                            )
+                            "Large dashboard detected: %s charts, %spx height. 
"
+                            "Using tiled screenshots.",
+                            chart_count,
+                            dashboard_height,
                         )
                         img = take_tiled_screenshot(
                             page, element_name, viewport_height=viewport_height
@@ -352,7 +352,9 @@ class WebDriverSelenium(WebDriverProxy):
                     except (ValueError, TypeError):
                         config[key] = None
                         logger.warning(
-                            f"Invalid timeout value for {key}: {value}, 
setting to None"
+                            "Invalid timeout value for %s: %s, setting to 
None",
+                            key,
+                            value,
                         )
         return config
 
diff --git a/superset/views/base.py b/superset/views/base.py
index 5744e73198..1e6e12d2cc 100644
--- a/superset/views/base.py
+++ b/superset/views/base.py
@@ -330,7 +330,8 @@ def get_theme_bootstrap_data() -> dict[str, Any]:
                 default_theme = json.loads(default_theme_model.json_data)
             except json.JSONDecodeError:
                 logger.error(
-                    f"Invalid JSON in system default theme 
{default_theme_model.id}"
+                    "Invalid JSON in system default theme %s",
+                    default_theme_model.id,
                 )
                 # Fallback to config
                 default_theme = get_config_value("THEME_DEFAULT")
@@ -343,7 +344,9 @@ def get_theme_bootstrap_data() -> dict[str, Any]:
             try:
                 dark_theme = json.loads(dark_theme_model.json_data)
             except json.JSONDecodeError:
-                logger.error(f"Invalid JSON in system dark theme 
{dark_theme_model.id}")
+                logger.error(
+                    "Invalid JSON in system dark theme %s", dark_theme_model.id
+                )
                 # Fallback to config
                 dark_theme = get_config_value("THEME_DARK")
         else:
@@ -404,7 +407,7 @@ def get_default_spinner_svg() -> str | None:
         with open(svg_path, "r", encoding="utf-8") as f:
             return f.read().strip()
     except (FileNotFoundError, OSError, UnicodeDecodeError) as e:
-        logger.warning(f"Could not load default spinner SVG: {e}")
+        logger.warning("Could not load default spinner SVG: %s", e)
         return None
 
 
diff --git a/tests/common/logger_utils.py b/tests/common/logger_utils.py
index 5a77faf365..28e6113711 100644
--- a/tests/common/logger_utils.py
+++ b/tests/common/logger_utils.py
@@ -116,7 +116,7 @@ def _make_decorator(  # noqa: C901
             def _log_enter_to_function(*args, **kwargs) -> None:
                 if _is_log_info():
                     decorated_logger.info(
-                        
f"{prefix_enter_msg}'{full_func_name}'{suffix_enter_msg}"
+                        "%s'%s'%s", prefix_enter_msg, full_func_name, 
suffix_enter_msg
                     )
                 elif _is_debug_enable():
                     _log_debug(*args, **kwargs)
@@ -142,19 +142,27 @@ def _make_decorator(  # noqa: C901
                 _CLS_PARAM in used_parameters and 
used_parameters.pop(_CLS_PARAM)
                 if used_parameters:
                     decorated_logger.debug(
-                        
f"{prefix_enter_msg}'{full_func_name}'{with_arguments_msg_part}"
-                        f"{used_parameters}{suffix_enter_msg}"
+                        "%s'%s'%s%s%s",
+                        prefix_enter_msg,
+                        full_func_name,
+                        with_arguments_msg_part,
+                        used_parameters,
+                        suffix_enter_msg,
                     )
                 else:
                     decorated_logger.debug(
-                        
f"{prefix_enter_msg}'{full_func_name}'{suffix_enter_msg}"
+                        "%s'%s'%s", prefix_enter_msg, full_func_name, 
suffix_enter_msg
                     )
 
             def _log_exit_of_function(return_value: Any) -> None:
                 if _is_debug_enable() and has_return_value:
                     decorated_logger.debug(
-                        
f"{prefix_out_msg}'{full_func_name}'{return_value_msg_part}"
-                        f"'{return_value}'{suffix_out_msg}"
+                        "%s'%s'%s'%s'%s",
+                        prefix_out_msg,
+                        full_func_name,
+                        return_value_msg_part,
+                        return_value,
+                        suffix_out_msg,
                     )
 
             return _wrapper_func
diff --git a/tests/integration_tests/core_tests.py 
b/tests/integration_tests/core_tests.py
index ce3670f18e..72e384a09d 100644
--- a/tests/integration_tests/core_tests.py
+++ b/tests/integration_tests/core_tests.py
@@ -242,7 +242,7 @@ class TestCore(SupersetTestCase):
                 (slc.slice_name, "explore", slc.slice_url),
             ]
         for name, method, url in urls:
-            logger.info(f"[{name}]/[{method}]: {url}")
+            logger.info("[%s]/[%s]: %s", name, method, url)
             print(f"[{name}]/[{method}]: {url}")
             resp = self.client.get(url)
             assert resp.status_code == 200
diff --git a/tests/integration_tests/dashboards/superset_factory_util.py 
b/tests/integration_tests/dashboards/superset_factory_util.py
index c21b666eec..c4ea0deaeb 100644
--- a/tests/integration_tests/dashboards/superset_factory_util.py
+++ b/tests/integration_tests/dashboards/superset_factory_util.py
@@ -199,7 +199,7 @@ def delete_all_inserted_dashboards():
             try:
                 delete_dashboard(dashboard, False)
             except Exception:
-                logger.error(f"failed to delete {dashboard.id}", exc_info=True)
+                logger.error("failed to delete %s", dashboard.id, 
exc_info=True)
                 raise
         if len(inserted_dashboards_ids) > 0:
             db.session.commit()
@@ -210,7 +210,7 @@ def delete_all_inserted_dashboards():
 
 
 def delete_dashboard(dashboard: Dashboard, do_commit: bool = False) -> None:
-    logger.info(f"deleting dashboard{dashboard.id}")
+    logger.info("deleting dashboard%s", dashboard.id)
     delete_dashboard_roles_associations(dashboard)
     delete_dashboard_users_associations(dashboard)
     delete_dashboard_slices_associations(dashboard)
@@ -246,7 +246,7 @@ def delete_all_inserted_slices():
             try:
                 delete_slice(slice, False)
             except Exception:
-                logger.error(f"failed to delete {slice.id}", exc_info=True)
+                logger.error("failed to delete %s", slice.id, exc_info=True)
                 raise
         if len(inserted_slices_ids) > 0:
             db.session.commit()
@@ -257,7 +257,7 @@ def delete_all_inserted_slices():
 
 
 def delete_slice(slice_: Slice, do_commit: bool = False) -> None:
-    logger.info(f"deleting slice{slice_.id}")
+    logger.info("deleting slice%s", slice_.id)
     delete_slice_users_associations(slice_)
     db.session.delete(slice_)
     if do_commit:
@@ -279,7 +279,7 @@ def delete_all_inserted_tables():
             try:
                 delete_sqltable(table, False)
             except Exception:
-                logger.error(f"failed to delete {table.id}", exc_info=True)
+                logger.error("failed to delete %s", table.id, exc_info=True)
                 raise
         if len(inserted_sqltables_ids) > 0:
             db.session.commit()
@@ -290,7 +290,7 @@ def delete_all_inserted_tables():
 
 
 def delete_sqltable(table: SqlaTable, do_commit: bool = False) -> None:
-    logger.info(f"deleting table{table.id}")
+    logger.info("deleting table%s", table.id)
     delete_table_users_associations(table)
     db.session.delete(table)
     if do_commit:
@@ -314,7 +314,7 @@ def delete_all_inserted_dbs():
             try:
                 delete_database(database, False)
             except Exception:
-                logger.error(f"failed to delete {database.id}", exc_info=True)
+                logger.error("failed to delete %s", database.id, exc_info=True)
                 raise
         if len(inserted_databases_ids) > 0:
             db.session.commit()
@@ -325,7 +325,7 @@ def delete_all_inserted_dbs():
 
 
 def delete_database(database: Database, do_commit: bool = False) -> None:
-    logger.info(f"deleting database{database.id}")
+    logger.info("deleting database%s", database.id)
     db.session.delete(database)
     if do_commit:
         db.session.commit()
diff --git a/tests/unit_tests/daos/test_theme_dao.py 
b/tests/unit_tests/daos/test_theme_dao.py
index 546dee7991..54b4666a56 100644
--- a/tests/unit_tests/daos/test_theme_dao.py
+++ b/tests/unit_tests/daos/test_theme_dao.py
@@ -77,12 +77,14 @@ class TestThemeDAO:
         # Call the method
         result = ThemeDAO.find_system_default()
 
-        # Verify warning was logged
+        # Verify warning was logged with lazy logging format
         mock_logger.warning.assert_called_once()
+        call_args = mock_logger.warning.call_args
         assert (
-            "Multiple system default themes found (2)"
-            in mock_logger.warning.call_args[0][0]
+            call_args[0][0]
+            == "Multiple system default themes found (%s), falling back to 
config theme"
         )
+        assert call_args[0][1] == 2
 
         # Verify the result is the fallback theme
         assert result == mock_fallback
@@ -169,12 +171,14 @@ class TestThemeDAO:
         # Call the method
         result = ThemeDAO.find_system_dark()
 
-        # Verify warning was logged
+        # Verify warning was logged with lazy logging format
         mock_logger.warning.assert_called_once()
+        call_args = mock_logger.warning.call_args
         assert (
-            "Multiple system dark themes found (2)"
-            in mock_logger.warning.call_args[0][0]
+            call_args[0][0]
+            == "Multiple system dark themes found (%s), falling back to config 
theme"
         )
+        assert call_args[0][1] == 2
 
         # Verify the result is the fallback theme
         assert result == mock_fallback
diff --git a/tests/unit_tests/migrations/shared/utils_test.py 
b/tests/unit_tests/migrations/shared/utils_test.py
index 66580d1a4a..fbfdca95d5 100644
--- a/tests/unit_tests/migrations/shared/utils_test.py
+++ b/tests/unit_tests/migrations/shared/utils_test.py
@@ -23,8 +23,13 @@ class DummyLogger:
     def __init__(self):
         self.messages = []
 
-    def info(self, message):
-        self.messages.append(message)
+    def info(self, message, *args):
+        # Handle lazy logging format with multiple arguments
+        if args:
+            formatted_message = message % args
+        else:
+            formatted_message = message
+        self.messages.append(formatted_message)
 
 
 class DummyOp:
@@ -125,7 +130,6 @@ def test_create_unique_index_creates_index(monkeypatch):
     assert call_kwargs.get("unique") is True
     assert call_kwargs.get("columns") == columns
     # And a log message mentioning "Creating index" should be generated.
-    print(dummy_logger.messages)
     assert any("Creating index" in msg for msg in dummy_logger.messages)
 
 
diff --git a/tests/unit_tests/utils/test_screenshot_utils.py 
b/tests/unit_tests/utils/test_screenshot_utils.py
index 19ca889bea..67d4d638e9 100644
--- a/tests/unit_tests/utils/test_screenshot_utils.py
+++ b/tests/unit_tests/utils/test_screenshot_utils.py
@@ -263,10 +263,12 @@ class TestTakeTiledScreenshot:
             with 
patch("superset.utils.screenshot_utils.combine_screenshot_tiles"):
                 take_tiled_screenshot(mock_page, "dashboard", 
viewport_height=2000)
 
-                # Should log dashboard dimensions
-                mock_logger.info.assert_any_call("Dashboard: 800x5000px at 
(50, 100)")
-                # Should log number of tiles
-                mock_logger.info.assert_any_call("Taking 3 screenshot tiles")
+                # Should log dashboard dimensions with lazy logging format
+                mock_logger.info.assert_any_call(
+                    "Dashboard: %sx%spx at (%s, %s)", 800, 5000, 50, 100
+                )
+                # Should log number of tiles with lazy logging format
+                mock_logger.info.assert_any_call("Taking %s screenshot tiles", 
3)
 
     def test_exception_handling_returns_none(self):
         """Test that exceptions are handled and None is returned."""
@@ -277,9 +279,10 @@ class TestTakeTiledScreenshot:
             result = take_tiled_screenshot(mock_page, "dashboard", 
viewport_height=2000)
 
             assert result is None
-            mock_logger.exception.assert_called_once_with(
-                "Tiled screenshot failed: Unexpected error"
-            )
+            # The exception object is passed, not the string
+            call_args = mock_logger.exception.call_args
+            assert call_args[0][0] == "Tiled screenshot failed: %s"
+            assert str(call_args[0][1]) == "Unexpected error"
 
     def test_wait_timeouts_between_tiles(self, mock_page):
         """Test that there are appropriate waits between tiles."""
diff --git a/tests/unit_tests/utils/webdriver_test.py 
b/tests/unit_tests/utils/webdriver_test.py
index 12b457bdb5..3946aa4aca 100644
--- a/tests/unit_tests/utils/webdriver_test.py
+++ b/tests/unit_tests/utils/webdriver_test.py
@@ -152,16 +152,20 @@ class TestWebDriverSelenium:
         assert call_kwargs["connect_timeout"] is None
         assert call_kwargs["Page_Load_Timeout"] is None
 
-        # Check that warnings were logged
+        # Check that warnings were logged with lazy logging format
         assert mock_logger.warning.call_count == 3
         mock_logger.warning.assert_any_call(
-            "Invalid timeout value for timeout: invalid, setting to None"
+            "Invalid timeout value for %s: %s, setting to None", "timeout", 
"invalid"
         )
         mock_logger.warning.assert_any_call(
-            "Invalid timeout value for connect_timeout: not_a_number, setting 
to None"
+            "Invalid timeout value for %s: %s, setting to None",
+            "connect_timeout",
+            "not_a_number",
         )
         mock_logger.warning.assert_any_call(
-            "Invalid timeout value for Page_Load_Timeout: abc123, setting to 
None"
+            "Invalid timeout value for %s: %s, setting to None",
+            "Page_Load_Timeout",
+            "abc123",
         )
 
     @patch("superset.utils.webdriver.app")

Reply via email to