This is an automated email from the ASF dual-hosted git repository.
sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git
The following commit(s) were added to refs/heads/main by this push:
new d444d20 Prevent template padding attacks and use a consistent logging
style
d444d20 is described below
commit d444d20be776bc9ae5f99631630744f53a5b36e2
Author: Sean B. Palmer <[email protected]>
AuthorDate: Tue Nov 25 15:43:02 2025 +0000
Prevent template padding attacks and use a consistent logging style
---
atr/admin/__init__.py | 10 ++++-----
atr/archives.py | 4 ++--
atr/blueprints/get.py | 9 +-------
atr/blueprints/post.py | 9 +-------
atr/log.py | 49 +++++++++++++++++++++++-------------------
atr/manager.py | 4 ++--
atr/post/keys.py | 2 +-
atr/server.py | 9 ++++----
atr/storage/writers/release.py | 8 +++----
atr/svn/pubsub.py | 11 +++++-----
atr/tasks/checks/paths.py | 8 +++----
atr/tasks/svn.py | 2 +-
atr/util.py | 6 +++---
13 files changed, 61 insertions(+), 70 deletions(-)
diff --git a/atr/admin/__init__.py b/atr/admin/__init__.py
index b8c497b..28a9365 100644
--- a/atr/admin/__init__.py
+++ b/atr/admin/__init__.py
@@ -144,7 +144,7 @@ async def browse_as_post(session: web.Committer,
browse_form: BrowseAsUserForm)
ldap_projects_data = await apache.get_ldap_projects_data()
committee_data = await apache.get_active_committee_data()
ldap_data = ldap_params.results_list[0]
- log.info("Current ASFQuart session data: %s", current_session)
+ log.info(f"Current ASFQuart session data: {current_session}")
new_session_data = _session_data(
ldap_data,
new_uid,
@@ -154,7 +154,7 @@ async def browse_as_post(session: web.Committer,
browse_form: BrowseAsUserForm)
bind_dn,
bind_password,
)
- log.info("New Quart cookie (not ASFQuart session) data: %s",
new_session_data)
+ log.info(f"New Quart cookie (not ASFQuart session) data:
{new_session_data}")
asfquart.session.write(new_session_data)
await quart.flash(
@@ -538,7 +538,7 @@ async def keys_update_post(session: web.Committer) -> str |
web.WerkzeugResponse
}, 200
except Exception as e:
detail = _format_exception_location(e)
- log.exception("Failed to start key update process: %s", detail)
+ log.exception(f"Failed to start key update process: {detail}")
return {
"message": f"Failed to update keys: {detail}",
"category": "error",
@@ -583,7 +583,7 @@ async def ldap_post(session: web.Committer, lookup_form:
LdapLookupForm) -> str:
)
await asyncio.to_thread(ldap.search, ldap_params)
end = time.perf_counter_ns()
- log.info("LDAP search took %d ms", (end - start) / 1000000)
+ log.info(f"LDAP search took {(end - start) / 1000000} ms")
rendered_form = form.render(
model_cls=LdapLookupForm,
@@ -663,7 +663,7 @@ async def performance(session: web.Committer) -> str:
}
)
except (ValueError, IndexError):
- log.error("Error parsing line: %s", line)
+ log.error(f"Error parsing line: {line}")
continue
# Calculate summary statistics for each route
diff --git a/atr/archives.py b/atr/archives.py
index 664f13b..0f96b0e 100644
--- a/atr/archives.py
+++ b/atr/archives.py
@@ -229,7 +229,7 @@ def _archive_extract_safe_process_symlink(member:
tarfile.TarInfo, extract_dir:
return
os.symlink(link_target, target_path)
except (OSError, NotImplementedError) as e:
- log.warning("Failed to create symlink %s -> %s: %s", target_path,
link_target, e)
+ log.warning(f"Failed to create symlink {target_path} -> {link_target}:
{e}")
def _safe_path(base_dir: str, *paths: str) -> str | None:
@@ -292,7 +292,7 @@ def _zip_archive_extract_member(
if member.isdir():
target_path = os.path.join(extract_dir, member.name)
if not
os.path.abspath(target_path).startswith(os.path.abspath(extract_dir)):
- log.warning("Skipping potentially unsafe path: %s", member.name)
+ log.warning(f"Skipping potentially unsafe path: {member.name}")
return 0, extracted_paths
os.makedirs(target_path, exist_ok=True)
return total_extracted, extracted_paths
diff --git a/atr/blueprints/get.py b/atr/blueprints/get.py
index c4bddc3..eb05ac9 100644
--- a/atr/blueprints/get.py
+++ b/atr/blueprints/get.py
@@ -56,14 +56,7 @@ def committer(path: str) ->
Callable[[web.CommitterRouteFunction[Any]], web.Rout
# TODO: Make this configurable in config.py
log.performance(
- "%s %s %s %s %s %s %s",
- "GET",
- path,
- func.__name__,
- "=",
- 0,
- 0,
- total_ms,
+ f"GET {path} {func.__name__} = 0 0 {total_ms}",
)
return response
diff --git a/atr/blueprints/post.py b/atr/blueprints/post.py
index 5498b3e..9acbd26 100644
--- a/atr/blueprints/post.py
+++ b/atr/blueprints/post.py
@@ -59,14 +59,7 @@ def committer(path: str) ->
Callable[[web.CommitterRouteFunction[Any]], web.Rout
# TODO: Make this configurable in config.py
log.performance(
- "%s %s %s %s %s %s %s",
- "POST",
- path,
- func.__name__,
- "=",
- 0,
- 0,
- total_ms,
+ f"POST {path} {func.__name__} = 0 0 {total_ms}",
)
return response
diff --git a/atr/log.py b/atr/log.py
index 5f9e9ab..13a68d7 100644
--- a/atr/log.py
+++ b/atr/log.py
@@ -19,7 +19,7 @@ import inspect
import logging
import logging.handlers
import queue
-from typing import Any, Final
+from typing import Final
PERFORMANCE: logging.Logger | None = None
@@ -57,43 +57,42 @@ def caller_name(depth: int = 1) -> str:
return name
-def critical(msg: str, *args: Any, **kwargs: Any) -> None:
- _event(logging.CRITICAL, msg, *args, **kwargs)
+def critical(msg: str) -> None:
+ _event(logging.CRITICAL, msg)
-def debug(msg: str, *args: Any, **kwargs: Any) -> None:
- _event(logging.DEBUG, msg, *args, **kwargs)
+def debug(msg: str) -> None:
+ _event(logging.DEBUG, msg)
-def error(msg: str, *args: Any, **kwargs: Any) -> None:
- _event(logging.ERROR, msg, *args, **kwargs)
+def error(msg: str) -> None:
+ _event(logging.ERROR, msg)
-def exception(msg: str, *args: Any, **kwargs: Any) -> None:
- kwargs.setdefault("exc_info", True)
- _event(logging.ERROR, msg, *args, **kwargs)
+def exception(msg: str) -> None:
+ _event(logging.ERROR, msg, exc_info=True)
-def info(msg: str, *args: Any, **kwargs: Any) -> None:
- _event(logging.INFO, msg, *args, **kwargs)
+def info(msg: str) -> None:
+ _event(logging.INFO, msg)
def interface_name(depth: int = 1) -> str:
return caller_name(depth=depth)
-def log(level: int, msg: str, *args: Any, **kwargs: Any) -> None:
+def log(level: int, msg: str) -> None:
# Custom log level
- _event(level, msg, *args, **kwargs)
+ _event(level, msg)
def python_repr(object_name: str) -> str:
return f"<{object_name}>"
-def performance(msg: str, *args: Any, **kwargs: Any) -> None:
+def performance(msg: str) -> None:
if PERFORMANCE is not None:
- PERFORMANCE.info(msg, *args, **kwargs)
+ PERFORMANCE.info(msg)
def performance_init() -> None:
@@ -101,7 +100,7 @@ def performance_init() -> None:
PERFORMANCE = _performance_logger()
-def secret(msg: str, data: bytes, *args: Any, **kwargs: Any) -> None:
+def secret(msg: str, data: bytes) -> None:
import base64
import nacl.encoding as encoding
@@ -120,22 +119,28 @@ def secret(msg: str, data: bytes, *args: Any, **kwargs:
Any) -> None:
)
ciphertext = public.SealedBox(recipient_pk).encrypt(data)
encoded_ciphertext = base64.b64encode(ciphertext).decode("ascii")
- _event(logging.INFO, f"{msg} {encoded_ciphertext}", *args, **kwargs)
+ _event(logging.INFO, f"{msg} {encoded_ciphertext}")
-def warning(msg: str, *args: Any, **kwargs: Any) -> None:
- _event(logging.WARNING, msg, *args, **kwargs)
+def warning(msg: str) -> None:
+ _event(logging.WARNING, msg)
def _caller_logger(depth: int = 1) -> logging.Logger:
return logging.getLogger(caller_name(depth))
-def _event(level: int, msg: str, *args: Any, stacklevel: int = 3, **kwargs:
Any) -> None:
+def _event(level: int, msg: str, stacklevel: int = 3, exc_info: bool = False)
-> None:
logger = _caller_logger(depth=3)
# Stack level 1 is *here*, 2 is the caller, 3 is the caller of the caller
# I.e. _event (1), log.* (2), actual caller (3)
- logger.log(level, msg, *args, stacklevel=stacklevel, **kwargs)
+ # TODO: We plan to use t-strings instead of the present f-strings for all
logging calls
+ # To do so, however, we first need to migrate to Python 3.14
+ # https://github.com/apache/tooling-trusted-releases/issues/339
+ # https://github.com/apache/tooling-trusted-releases/issues/346
+ # The stacklevel and exc_info keyword arguments are not available as
parameters
+ # Therefore this should be safe even with an untrusted msg template
+ logger.log(level, msg, stacklevel=stacklevel, exc_info=exc_info)
def _performance_logger() -> logging.Logger:
diff --git a/atr/manager.py b/atr/manager.py
index 74023db..984531b 100644
--- a/atr/manager.py
+++ b/atr/manager.py
@@ -65,7 +65,7 @@ class WorkerManager:
return
self.running = True
- log.info("Starting worker manager in %s", os.getcwd())
+ log.info(f"Starting worker manager in {os.getcwd()}")
# Start initial workers
for _ in range(self.min_workers):
@@ -189,7 +189,7 @@ class WorkerManager:
except asyncio.CancelledError:
break
except Exception as e:
- log.error(f"Error in worker monitor: {e}", exc_info=e)
+ log.exception(f"Error in worker monitor: {e}")
# TODO: How long should we wait before trying again?
await asyncio.sleep(1.0)
diff --git a/atr/post/keys.py b/atr/post/keys.py
index 4a0d916..0876cf6 100644
--- a/atr/post/keys.py
+++ b/atr/post/keys.py
@@ -69,7 +69,7 @@ async def add(session: web.Committer, add_openpgp_key_form:
shared.keys.AddOpenP
await quart.flash(f"OpenPGP key {fingerprint_upper} added
successfully.", "success")
except web.FlashError as e:
- log.warning("FlashError adding OpenPGP key: %s", e)
+ log.warning(f"FlashError adding OpenPGP key: {e}")
await quart.flash(str(e), "error")
except Exception as e:
log.exception("Error adding OpenPGP key:")
diff --git a/atr/server.py b/atr/server.py
index 34a721f..2d68df8 100644
--- a/atr/server.py
+++ b/atr/server.py
@@ -190,11 +190,12 @@ def app_setup_lifecycle(app: base.QuartApp) -> None:
log.info("PubSub SVN listener task created")
else:
log.info(
- "PubSub SVN listener not started: pubsub_url=%s pubsub_user=%s
pubsub_password=%s",
- bool(valid_pubsub_url),
- bool(pubsub_user),
+ "PubSub SVN listener not started: "
+ f"pubsub_url={bool(valid_pubsub_url)} "
+ f"pubsub_user={bool(pubsub_user)} "
# Essential to use bool(...) here to avoid logging the password
- bool(pubsub_password),
+ # TODO: We plan to add secret scanning when we migrate to
t-strings
+ f"pubsub_password={bool(pubsub_password)}",
)
ssh_server = await ssh.server_start()
diff --git a/atr/storage/writers/release.py b/atr/storage/writers/release.py
index 8db08f0..66ca901 100644
--- a/atr/storage/writers/release.py
+++ b/atr/storage/writers/release.py
@@ -497,13 +497,13 @@ class CommitteeParticipant(FoundationCommitter):
# Delete from the filesystem
try:
if await aiofiles.os.path.isdir(release_dir):
- log.info("Deleting filesystem directory: %s", release_dir)
+ log.info(f"Deleting filesystem directory: {release_dir}")
await aioshutil.rmtree(release_dir)
- log.info("Successfully deleted directory: %s", release_dir)
+ log.info(f"Successfully deleted directory: {release_dir}")
else:
- log.warning("Filesystem directory not found, skipping
deletion: %s", release_dir)
+ log.warning(f"Filesystem directory not found, skipping
deletion: {release_dir}")
except Exception as e:
- log.exception("Error deleting filesystem directory %s:",
release_dir)
+ log.exception(f"Error deleting filesystem directory
{release_dir}:")
return (
f"Database records for '{project_name} {version}' deleted,"
f" but failed to delete filesystem directory: {e!s}"
diff --git a/atr/svn/pubsub.py b/atr/svn/pubsub.py
index 9c53658..340e0fa 100644
--- a/atr/svn/pubsub.py
+++ b/atr/svn/pubsub.py
@@ -67,8 +67,7 @@ class SVNListener:
if not self.url.startswith(("http://", "https://")):
log.error(
- "Invalid PubSub URL: %r. Expected full URL like
'https://pubsub.apache.org:2069'",
- self.url,
+ f"Invalid PubSub URL: {self.url!r}. Expected full URL like
'https://pubsub.apache.org:2069'",
)
log.warning("SVNListener disabled due to invalid URL")
return
@@ -89,13 +88,13 @@ class SVNListener:
if not pubsub_path.startswith(_WATCHED_PREFIXES):
# Ignore commits outside dist/dev or dist/release
continue
- log.debug("PubSub payload: %s", payload)
+ log.debug(f"PubSub payload: {payload}")
await self._process_payload(payload)
except asyncio.CancelledError:
log.info("SVNListener cancelled, shutting down gracefully")
raise
except Exception as exc:
- log.error("SVNListener error: %s", exc, exc_info=True)
+ log.exception(f"SVNListener error: {exc}")
finally:
log.info("SVNListener.start() finished")
@@ -119,6 +118,6 @@ class SVNListener:
local_path = self.working_copy_root / repo_path[len(prefix)
:].lstrip("/")
try:
await svn.update(local_path)
- log.info("svn updated %s", local_path)
+ log.info(f"svn updated {local_path}")
except Exception as exc:
- log.warning("failed svn update %s: %s", local_path, exc)
+ log.warning(f"failed svn update {local_path}: {exc}")
diff --git a/atr/tasks/checks/paths.py b/atr/tasks/checks/paths.py
index a79a53d..8f82e9c 100644
--- a/atr/tasks/checks/paths.py
+++ b/atr/tasks/checks/paths.py
@@ -79,7 +79,7 @@ async def check(args: checks.FunctionArguments) ->
results.Results | None:
return
if not await aiofiles.os.path.isdir(base_path):
- log.error("Base release directory does not exist or is not a
directory: %s", base_path)
+ log.error(f"Base release directory does not exist or is not a
directory: {base_path}")
return
is_podling = args.extra_args.get("is_podling", False)
@@ -204,13 +204,13 @@ async def _check_path_process_single(
allowed_top_level = _ALLOWED_TOP_LEVEL
if ext_artifact:
- log.info("Checking artifact rules for %s", full_path)
+ log.info(f"Checking artifact rules for {full_path}")
await _check_artifact_rules(base_path, relative_path, relative_paths,
errors, is_podling)
elif ext_metadata:
- log.info("Checking metadata rules for %s", full_path)
+ log.info(f"Checking metadata rules for {full_path}")
await _check_metadata_rules(base_path, relative_path, relative_paths,
ext_metadata, errors, warnings)
else:
- log.info("Checking general rules for %s", full_path)
+ log.info(f"Checking general rules for {full_path}")
if (relative_path.parent == pathlib.Path(".")) and (relative_path.name
not in allowed_top_level):
warnings.append(f"Unknown top level file: {relative_path.name}")
diff --git a/atr/tasks/svn.py b/atr/tasks/svn.py
index c193e06..87f532a 100644
--- a/atr/tasks/svn.py
+++ b/atr/tasks/svn.py
@@ -160,7 +160,7 @@ async def _import_files_core_run_svn_export(svn_command:
list[str], temp_export_
log.warning(f"SVN stderr: {stderr_str}")
except TimeoutError:
- log.error("SVN export command timed out after %d seconds",
timeout_seconds)
+ log.error(f"SVN export command timed out after {timeout_seconds}
seconds")
raise SvnImportError("SVN export command timed out")
except FileNotFoundError:
log.error("svn command not found. Is it installed and in PATH?")
diff --git a/atr/util.py b/atr/util.py
index 32c1d4f..61b87a0 100644
--- a/atr/util.py
+++ b/atr/util.py
@@ -848,7 +848,7 @@ async def task_archive_url(task_mid: str, recipient: str |
None = None) -> str |
return None
return "https://lists.apache.org/thread/" + mid
except Exception:
- log.exception("Failed to get archive URL for task %s", task_mid)
+ log.exception(f"Failed to get archive URL for task {task_mid}")
return None
@@ -884,13 +884,13 @@ async def thread_messages(
async for url, status, content in get_urls_as_completed(email_urls):
if status != 200 or not content:
- log.warning("Failed to fetch email data from %s: %s", url, status)
+ log.warning(f"Failed to fetch email data from {url}: {status}")
continue
try:
msg_json = json.loads(content.decode())
messages.append(msg_json)
except Exception as exc:
- log.warning("Failed to parse email JSON from %s: %s", url, exc)
+ log.warning(f"Failed to parse email JSON from {url}: {exc}")
messages.sort(key=lambda m: m.get("epoch", 0))
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]