This is an automated email from the ASF dual-hosted git repository.

arm pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git


The following commit(s) were added to refs/heads/main by this push:
     new 24e53a1  #549 and #471 - implement structured logging when running not 
in debug mode
24e53a1 is described below

commit 24e53a127be766930011d0654d206765a2c941e2
Author: Alastair McFarlane <[email protected]>
AuthorDate: Thu Jan 22 11:56:51 2026 +0000

    #549 and #471 - implement structured logging when running not in debug mode
---
 .pre-commit-config.yaml |   6 +-
 atr/log.py              |  24 +++++-
 atr/server.py           | 194 +++++++++++++++++++++++++++++++++---------------
 atr/worker.py           |  45 +++++++++--
 pyproject.toml          |   1 +
 uv.lock                 |  52 ++++++++-----
 6 files changed, 232 insertions(+), 90 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 4a71a40..ae5c551 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -84,7 +84,7 @@ repos:
 #        - --profile=jinja
 #        - --reformat
 - repo: https://github.com/thibaudcolas/pre-commit-stylelint
-  rev: v16.26.1
+  rev: v17.0.0
   hooks:
     - id: stylelint
       additional_dependencies: ['[email protected]', 
'[email protected]']
@@ -92,7 +92,7 @@ repos:
       types_or: ['css']
       args: ['--fix', '--allow-empty-input']
 - repo: https://github.com/woodruffw/zizmor-pre-commit
-  rev: v1.21.0
+  rev: v1.22.0
   hooks:
     - id: zizmor
       args: [--min-severity, low]
@@ -101,7 +101,7 @@ repos:
   hooks:
     - id: pip-audit
 - repo: https://github.com/oxc-project/mirrors-oxlint
-  rev: v1.39.0
+  rev: v1.41.0
   hooks:
     - id: oxlint
       name: lint JS files with Oxlint
diff --git a/atr/log.py b/atr/log.py
index 8dca4ac..bc7b835 100644
--- a/atr/log.py
+++ b/atr/log.py
@@ -23,6 +23,8 @@ import queue
 import threading
 from typing import Final
 
+import structlog
+
 PERFORMANCE: logging.Logger | None = None
 
 _global_recent_logs: collections.deque[str] | None = None
@@ -41,6 +43,19 @@ class BufferingHandler(logging.Handler):
             self.handleError(record)
 
 
+class StructlogQueueHandler(logging.handlers.QueueHandler):
+    """QueueHandler that preserves structlog's record.msg dict."""
+
+    def prepare(self, record: logging.LogRecord) -> logging.LogRecord:
+        # Don't call format() - it would convert the dict msg to a string
+        return record
+
+
+def add_context(**kwargs):
+    """Add context to the request log"""
+    structlog.contextvars.bind_contextvars(**kwargs)
+
+
 def caller_name(depth: int = 1) -> str:
     frame = inspect.currentframe()
     for _ in range(depth + 1):
@@ -74,6 +89,11 @@ def caller_name(depth: int = 1) -> str:
     return name
 
 
+def clear_context():
+    """Clear context from the request log"""
+    structlog.contextvars.clear_contextvars()
+
+
 def critical(msg: str) -> None:
     _event(logging.CRITICAL, msg)
 
@@ -160,7 +180,7 @@ def warning(msg: str) -> None:
 
 
 def _caller_logger(depth: int = 1) -> logging.Logger:
-    return logging.getLogger(caller_name(depth))
+    return structlog.getLogger(caller_name(depth))
 
 
 def _event(level: int, msg: str, stacklevel: int = 3, exc_info: bool = False) 
-> None:
@@ -191,7 +211,7 @@ def _performance_logger() -> logging.Logger:
     performance_queue = queue.Queue(-1)
     performance_listener = logging.handlers.QueueListener(performance_queue, 
performance_handler)
     performance_listener.start()
-    performance.addHandler(logging.handlers.QueueHandler(performance_queue))
+    performance.addHandler(StructlogQueueHandler(performance_queue))
     performance.setLevel(logging.INFO)
     # If we don't set propagate to False then it logs to the term as well
     performance.propagate = False
diff --git a/atr/server.py b/atr/server.py
index 380a7f2..ad24301 100644
--- a/atr/server.py
+++ b/atr/server.py
@@ -28,6 +28,7 @@ import queue
 import stat
 import sys
 import urllib.parse
+import uuid
 from collections.abc import Iterable
 from typing import Any, Final
 
@@ -39,7 +40,6 @@ import blockbuster
 import quart
 import quart_schema
 import quart_wtf
-import rich.logging as rich_logging
 import werkzeug.routing as routing
 
 import atr
@@ -249,32 +249,7 @@ def _app_setup_lifecycle(app: base.QuartApp, app_config: 
type[config.AppConfig])
 
         await _initialise_test_environment(app_config)
 
-        pubsub_url = app_config.PUBSUB_URL
-        pubsub_user = app_config.PUBSUB_USER
-        pubsub_password = app_config.PUBSUB_PASSWORD
-        parsed_pubsub_url = urllib.parse.urlparse(pubsub_url) if pubsub_url 
else None
-        valid_pubsub_url = bool(parsed_pubsub_url and parsed_pubsub_url.scheme 
and parsed_pubsub_url.netloc)
-
-        if valid_pubsub_url and pubsub_url and pubsub_user and pubsub_password:
-            log.info("Starting PubSub SVN listener")
-            listener = pubsub.SVNListener(
-                working_copy_root=app_config.SVN_STORAGE_DIR,
-                url=pubsub_url,
-                username=pubsub_user,
-                password=pubsub_password,
-            )
-            task = asyncio.create_task(listener.start())
-            app.extensions["svn_listener"] = task
-            log.info("PubSub SVN listener task created")
-        else:
-            log.info(
-                "PubSub SVN listener not started: "
-                f"pubsub_url={bool(valid_pubsub_url)} "
-                f"pubsub_user={bool(pubsub_user)} "
-                # Essential to use bool(...) here to avoid logging the password
-                # TODO: We plan to add secret scanning when we migrate to 
t-strings
-                f"pubsub_password={bool(pubsub_password)}",
-            )
+        await _initialise_pubsub(app_config, app)
 
         ssh_server = await ssh.server_start()
         app.extensions["ssh_server"] = ssh_server
@@ -303,58 +278,97 @@ def _app_setup_lifecycle(app: base.QuartApp, app_config: 
type[config.AppConfig])
             with contextlib.suppress(asyncio.CancelledError):
                 await task
 
+        await db.shutdown_database()
+
+        if audit_listener := app.extensions.get("audit_listener"):
+            audit_listener.stop()
         if listener := app.extensions.get("logging_listener"):
             listener.stop()
 
-        await db.shutdown_database()
-
         app.background_tasks.clear()
 
 
 def _app_setup_logging(app: base.QuartApp, config_mode: config.Mode, 
app_config: type[config.AppConfig]) -> None:
-    """Setup application logging."""
-    import logging
+    """Setup application logging with structlog and queue-based handlers."""
     import logging.handlers
 
-    console_handler = rich_logging.RichHandler(rich_tracebacks=True, 
show_time=False)
-    log_queue = queue.Queue(-1)
-    handlers: list[logging.Handler] = [console_handler]
-    if (config_mode == config.Mode.Debug) and app_config.ALLOW_TESTS:
+    import structlog
+
+    # Shared processors for structlog (run before formatting)
+    shared_processors: list[structlog.types.Processor] = [
+        structlog.contextvars.merge_contextvars,
+        structlog.stdlib.add_log_level,
+        structlog.stdlib.add_logger_name,
+        structlog.stdlib.PositionalArgumentsFormatter(),
+        structlog.processors.TimeStamper(fmt="iso"),
+        structlog.processors.StackInfoRenderer(),
+        structlog.processors.UnicodeDecoder(),
+    ]
+
+    # Output handler: pretty console for dev (Debug and Allow Tests), JSON for 
non-dev (Docker, etc.)
+    is_dev = (config_mode == config.Mode.Debug) and app_config.ALLOW_TESTS
+    output_handler = logging.StreamHandler(sys.stderr)
+    if is_dev:
+        renderer: structlog.types.Processor = 
structlog.dev.ConsoleRenderer(colors=True)
+    else:
+        renderer = structlog.processors.JSONRenderer()
+    output_handler.setFormatter(
+        structlog.stdlib.ProcessorFormatter(
+            processors=[
+                structlog.stdlib.ProcessorFormatter.remove_processors_meta,
+                renderer,
+            ],
+            foreign_pre_chain=shared_processors,
+        )
+    )
+
+    # Queue-based logging for thread safety
+    log_queue: queue.Queue[logging.LogRecord] = queue.Queue(-1)
+    handlers: list[logging.Handler] = [output_handler]
+    if is_dev:
         handlers.append(log.create_debug_handler())
-    listener = logging.handlers.QueueListener(log_queue, *handlers)
+
+    listener = logging.handlers.QueueListener(log_queue, *handlers, 
respect_handler_level=True)
     app.extensions["logging_listener"] = listener
 
     logging.basicConfig(
-        format="[ %(asctime)s.%(msecs)03d ] %(process)d <%(name)s> 
%(message)s",
         level=logging.INFO,
-        datefmt="%Y-%m-%d %H:%M:%S",
-        handlers=[logging.handlers.QueueHandler(log_queue)],
+        handlers=[log.StructlogQueueHandler(log_queue)],
         force=True,
     )
 
-    # Configure dedicated audit logger
-    try:
-        audit_handler = logging.FileHandler(
-            app_config.STORAGE_AUDIT_LOG_FILE,
-            encoding="utf-8",
-            mode="a",
+    structlog.configure(
+        processors=[
+            *shared_processors,
+            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
+        ],
+        wrapper_class=structlog.stdlib.BoundLogger,
+        context_class=dict,
+        logger_factory=structlog.stdlib.LoggerFactory(),
+        cache_logger_on_first_use=True,
+    )
+
+    # Audit logger - JSON to dedicated file via queue
+    audit_handler = logging.FileHandler(app_config.STORAGE_AUDIT_LOG_FILE, 
encoding="utf-8")
+    audit_handler.setFormatter(
+        structlog.stdlib.ProcessorFormatter(
+            processors=[
+                structlog.stdlib.ProcessorFormatter.remove_processors_meta,
+                structlog.processors.JSONRenderer(),
+            ],
+            foreign_pre_chain=shared_processors,
         )
-        # audit_handler.setFormatter(
-        #     logging.Formatter("%(message)s")
-        # )
-        audit_queue = queue.Queue(-1)
-        audit_listener = logging.handlers.QueueListener(audit_queue, 
audit_handler)
-        audit_listener.start()
-        app.extensions["audit_listener"] = audit_listener
-
-        audit_logger = logging.getLogger("atr.storage.audit")
-        audit_logger.setLevel(logging.INFO)
-        audit_logger.addHandler(audit_handler)
-        audit_logger.propagate = False
-        audit_queue_handler = logging.handlers.QueueHandler(audit_queue)
-        audit_logger.handlers = [audit_queue_handler]
-    except Exception:
-        logging.getLogger(__name__).exception("Failed to configure audit 
logger")
+    )
+    audit_queue: queue.Queue[logging.LogRecord] = queue.Queue(-1)
+    audit_listener = logging.handlers.QueueListener(audit_queue, audit_handler)
+    audit_listener.start()
+    app.extensions["audit_listener"] = audit_listener
+
+    audit_logger = logging.getLogger("atr.storage.audit")
+    audit_logger.setLevel(logging.INFO)
+    audit_logger.handlers.clear()
+    audit_logger.addHandler(logging.handlers.QueueHandler(audit_queue))
+    audit_logger.propagate = False
 
     # Enable debug output for atr.* in DEBUG mode
     if config_mode == config.Mode.Debug:
@@ -369,6 +383,34 @@ def _app_setup_logging(app: base.QuartApp, config_mode: 
config.Mode, app_config:
             log.info(f"STATE_DIR    = {app_config.STATE_DIR}")
 
 
+def _app_setup_request_lifecycle(app: base.QuartApp) -> None:
+    """Setup application request lifecycle hooks."""
+    import structlog
+
+    logger = structlog.get_logger("atr.request")
+
+    @app.before_request
+    async def bind_request_context_vars() -> None:
+        log.clear_context()
+        log.add_context(request_id=str(uuid.uuid4()))
+
+        # Bind user_id if authenticated
+        session = await asfquart.session.read()
+        if session is not None:
+            log.add_context(user_id=session.uid)
+
+    @app.after_request
+    async def log_request(response: quart.Response) -> quart.Response:
+        logger.info(
+            "request",
+            method=quart.request.method,
+            path=quart.request.path,
+            status=response.status_code,
+            remote_addr=quart.request.remote_addr,
+        )
+        return response
+
+
 def _app_setup_security_headers(app: base.QuartApp) -> None:
     """Setup security headers including a Content Security Policy."""
 
@@ -450,6 +492,7 @@ def _create_app(app_config: type[config.AppConfig]) -> 
base.QuartApp:
     filters.register_filters(app)
     _app_setup_context(app)
     _app_setup_security_headers(app)
+    _app_setup_request_lifecycle(app)
     _app_setup_lifecycle(app, app_config)
 
     # _register_recurrent_tasks()
@@ -479,6 +522,35 @@ def _create_app(app_config: type[config.AppConfig]) -> 
base.QuartApp:
     return app
 
 
+async def _initialise_pubsub(conf: type[config.AppConfig], app: base.QuartApp):
+    pubsub_url = conf.PUBSUB_URL
+    pubsub_user = conf.PUBSUB_USER
+    pubsub_password = conf.PUBSUB_PASSWORD
+    parsed_pubsub_url = urllib.parse.urlparse(pubsub_url) if pubsub_url else 
None
+    valid_pubsub_url = bool(parsed_pubsub_url and parsed_pubsub_url.scheme and 
parsed_pubsub_url.netloc)
+
+    if valid_pubsub_url and pubsub_url and pubsub_user and pubsub_password:
+        log.info("Starting PubSub SVN listener")
+        listener = pubsub.SVNListener(
+            working_copy_root=conf.SVN_STORAGE_DIR,
+            url=pubsub_url,
+            username=pubsub_user,
+            password=pubsub_password,
+        )
+        task = asyncio.create_task(listener.start())
+        app.extensions["svn_listener"] = task
+        log.info("PubSub SVN listener task created")
+    else:
+        log.info(
+            "PubSub SVN listener not started: "
+            f"pubsub_url={bool(valid_pubsub_url)} "
+            f"pubsub_user={bool(pubsub_user)} "
+            # Essential to use bool(...) here to avoid logging the password
+            # TODO: We plan to add secret scanning when we migrate to t-strings
+            f"pubsub_password={bool(pubsub_password)}",
+        )
+
+
 async def _initialise_test_environment(conf: type[config.AppConfig]) -> None:
     if not conf.ALLOW_TESTS:
         return
diff --git a/atr/worker.py b/atr/worker.py
index 612c5da..be0268c 100644
--- a/atr/worker.py
+++ b/atr/worker.py
@@ -58,6 +58,7 @@ def main() -> None:
         os.chdir(conf.STATE_DIR)
 
     _setup_logging()
+    log.add_context(worker_pid=os.getpid())
     log.info(f"Starting worker process with pid {os.getpid()}")
 
     tasks: list[asyncio.Task] = []
@@ -92,14 +93,45 @@ def main() -> None:
 
 
 def _setup_logging() -> None:
-    import logging
+    import logging.handlers
 
-    # Configure logging
-    log_format = "[%(asctime)s.%(msecs)03d] [%(process)d] [%(levelname)s] 
%(message)s"
-    date_format = "%Y-%m-%d %H:%M:%S"
+    import structlog
 
     os.makedirs("logs", exist_ok=True)
-    logging.basicConfig(filename="logs/atr-worker.log", format=log_format, 
datefmt=date_format, level=logging.INFO)
+    # Configure logging
+    shared_processors: list[structlog.types.Processor] = [
+        structlog.contextvars.merge_contextvars,
+        structlog.stdlib.add_log_level,
+        structlog.stdlib.add_logger_name,
+        structlog.stdlib.PositionalArgumentsFormatter(),
+        structlog.processors.TimeStamper(fmt="iso"),
+        structlog.processors.StackInfoRenderer(),
+        structlog.processors.UnicodeDecoder(),
+    ]
+    output_handler = logging.FileHandler("logs/atr-worker.log")
+    renderer = structlog.processors.JSONRenderer()
+    output_handler.setFormatter(
+        structlog.stdlib.ProcessorFormatter(
+            processors=[
+                structlog.stdlib.ProcessorFormatter.remove_processors_meta,
+                renderer,
+            ],
+            foreign_pre_chain=shared_processors,
+        )
+    )
+
+    logging.basicConfig(level=logging.INFO, handlers=[output_handler], 
force=True)
+
+    structlog.configure(
+        processors=[
+            *shared_processors,
+            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
+        ],
+        wrapper_class=structlog.stdlib.BoundLogger,
+        context_class=dict,
+        logger_factory=structlog.stdlib.LoggerFactory(),
+        cache_logger_on_first_use=True,
+    )
 
 
 # Task functions
@@ -266,10 +298,13 @@ async def _worker_loop_run() -> None:
     processed = 0
     max_to_process = 10
     while True:
+        log.clear_context()
         try:
+            log.add_context(worker_pid=os.getpid())
             task = await _task_next_claim()
             if task:
                 task_id, task_type, task_args, asf_uid = task
+                log.add_context(task_id=task_id, task_type=task_type, 
asf_uid=asf_uid)
                 await _task_process(task_id, task_type, task_args, asf_uid)
                 processed += 1
                 # Only process max_to_process tasks and then exit
diff --git a/pyproject.toml b/pyproject.toml
index 7d42d65..4a8466b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -48,6 +48,7 @@ dependencies = [
   "semver>=3.0.4",
   "sqlmodel~=0.0.24",
   "standard-imghdr>=3.13.0",
+  "structlog>=25.5.0",
   "yyjson>=4.0.6",
 ]
 
diff --git a/uv.lock b/uv.lock
index c270089..250ba1a 100644
--- a/uv.lock
+++ b/uv.lock
@@ -3,7 +3,7 @@ revision = 3
 requires-python = "==3.13.*"
 
 [options]
-exclude-newer = "2026-01-16T20:26:29Z"
+exclude-newer = "2026-01-22T11:52:25Z"
 
 [[package]]
 name = "aiofiles"
@@ -178,7 +178,7 @@ wheels = [
 [[package]]
 name = "asfquart"
 version = "0.1.13"
-source = { git = 
"https://github.com/apache/infrastructure-asfquart.git?rev=main#a00d184c94912959d5ceaa9bbfd27976e63874ac";
 }
+source = { git = 
"https://github.com/apache/infrastructure-asfquart.git?rev=main#f43799f2d40ec023e2547dcdd895ae7a504205e1";
 }
 dependencies = [
     { name = "aiohttp" },
     { name = "asfpy" },
@@ -1041,11 +1041,11 @@ wheels = [
 
 [[package]]
 name = "packaging"
-version = "25.0"
+version = "26.0"
 source = { registry = "https://pypi.org/simple"; }
-sdist = { url = 
"https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz";,
 hash = 
"sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size 
= 165727, upload-time = "2025-04-19T11:48:59.673Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz";,
 hash = 
"sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size 
= 143416, upload-time = "2026-01-21T20:50:39.064Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl";,
 hash = 
"sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size 
= 66469, upload-time = "2025-04-19T11:48:57.875Z" },
+    { url = 
"https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl";,
 hash = 
"sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size 
= 74366, upload-time = "2026-01-21T20:50:37.788Z" },
 ]
 
 [[package]]
@@ -1222,11 +1222,11 @@ wheels = [
 
 [[package]]
 name = "pycparser"
-version = "2.23"
+version = "3.0"
 source = { registry = "https://pypi.org/simple"; }
-sdist = { url = 
"https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz";,
 hash = 
"sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size 
= 173734, upload-time = "2025-09-09T13:23:47.91Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz";,
 hash = 
"sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size 
= 103492, upload-time = "2026-01-21T14:26:51.89Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl";,
 hash = 
"sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size 
= 118140, upload-time = "2025-09-09T13:23:46.651Z" },
+    { url = 
"https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl";,
 hash = 
"sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size 
= 48172, upload-time = "2026-01-21T14:26:50.693Z" },
 ]
 
 [[package]]
@@ -1752,23 +1752,26 @@ wheels = [
 
 [[package]]
 name = "sqlalchemy"
-version = "2.0.45"
+version = "2.0.46"
 source = { registry = "https://pypi.org/simple"; }
 dependencies = [
     { name = "greenlet", marker = "platform_machine == 'AMD64' or 
platform_machine == 'WIN32' or platform_machine == 'aarch64' or 
platform_machine == 'amd64' or platform_machine == 'ppc64le' or 
platform_machine == 'win32' or platform_machine == 'x86_64'" },
     { name = "typing-extensions" },
 ]
-sdist = { url = 
"https://files.pythonhosted.org/packages/be/f9/5e4491e5ccf42f5d9cfc663741d261b3e6e1683ae7812114e7636409fcc6/sqlalchemy-2.0.45.tar.gz";,
 hash = 
"sha256:1632a4bda8d2d25703fdad6363058d882541bdaaee0e5e3ddfa0cd3229efce88", size 
= 9869912, upload-time = "2025-12-09T21:05:16.737Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/06/aa/9ce0f3e7a9829ead5c8ce549392f33a12c4555a6c0609bb27d882e9c7ddf/sqlalchemy-2.0.46.tar.gz";,
 hash = 
"sha256:cf36851ee7219c170bb0793dbc3da3e80c582e04a5437bc601bfe8c85c9216d7", size 
= 9865393, upload-time = "2026-01-21T18:03:45.119Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/6a/c8/7cc5221b47a54edc72a0140a1efa56e0a2730eefa4058d7ed0b4c4357ff8/sqlalchemy-2.0.45-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl";,
 hash = 
"sha256:fe187fc31a54d7fd90352f34e8c008cf3ad5d064d08fedd3de2e8df83eb4a1cf", size 
= 3277082, upload-time = "2025-12-09T22:11:06.167Z" },
-    { url = 
"https://files.pythonhosted.org/packages/0e/50/80a8d080ac7d3d321e5e5d420c9a522b0aa770ec7013ea91f9a8b7d36e4a/sqlalchemy-2.0.45-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl";,
 hash = 
"sha256:672c45cae53ba88e0dad74b9027dddd09ef6f441e927786b05bec75d949fbb2e", size 
= 3293131, upload-time = "2025-12-09T22:13:52.626Z" },
-    { url = 
"https://files.pythonhosted.org/packages/da/4c/13dab31266fc9904f7609a5dc308a2432a066141d65b857760c3bef97e69/sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_aarch64.whl";,
 hash = 
"sha256:470daea2c1ce73910f08caf10575676a37159a6d16c4da33d0033546bddebc9b", size 
= 3225389, upload-time = "2025-12-09T22:11:08.093Z" },
-    { url = 
"https://files.pythonhosted.org/packages/74/04/891b5c2e9f83589de202e7abaf24cd4e4fa59e1837d64d528829ad6cc107/sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_x86_64.whl";,
 hash = 
"sha256:9c6378449e0940476577047150fd09e242529b761dc887c9808a9a937fe990c8", size 
= 3266054, upload-time = "2025-12-09T22:13:54.262Z" },
-    { url = 
"https://files.pythonhosted.org/packages/f1/24/fc59e7f71b0948cdd4cff7a286210e86b0443ef1d18a23b0d83b87e4b1f7/sqlalchemy-2.0.45-cp313-cp313-win32.whl";,
 hash = 
"sha256:4b6bec67ca45bc166c8729910bd2a87f1c0407ee955df110d78948f5b5827e8a", size 
= 2110299, upload-time = "2025-12-09T21:39:33.486Z" },
-    { url = 
"https://files.pythonhosted.org/packages/c0/c5/d17113020b2d43073412aeca09b60d2009442420372123b8d49cc253f8b8/sqlalchemy-2.0.45-cp313-cp313-win_amd64.whl";,
 hash = 
"sha256:afbf47dc4de31fa38fd491f3705cac5307d21d4bb828a4f020ee59af412744ee", size 
= 2136264, upload-time = "2025-12-09T21:39:36.801Z" },
-    { url = 
"https://files.pythonhosted.org/packages/3d/8d/bb40a5d10e7a5f2195f235c0b2f2c79b0bf6e8f00c0c223130a4fbd2db09/sqlalchemy-2.0.45-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl";,
 hash = 
"sha256:83d7009f40ce619d483d26ac1b757dfe3167b39921379a8bd1b596cf02dab4a6", size 
= 3521998, upload-time = "2025-12-09T22:13:28.622Z" },
-    { url = 
"https://files.pythonhosted.org/packages/75/a5/346128b0464886f036c039ea287b7332a410aa2d3fb0bb5d404cb8861635/sqlalchemy-2.0.45-cp313-cp313t-musllinux_1_2_x86_64.whl";,
 hash = 
"sha256:d8a2ca754e5415cde2b656c27900b19d50ba076aa05ce66e2207623d3fe41f5a", size 
= 3473434, upload-time = "2025-12-09T22:13:30.188Z" },
-    { url = 
"https://files.pythonhosted.org/packages/bf/e1/3ccb13c643399d22289c6a9786c1a91e3dcbb68bce4beb44926ac2c557bf/sqlalchemy-2.0.45-py3-none-any.whl";,
 hash = 
"sha256:5225a288e4c8cc2308dbdd874edad6e7d0fd38eac1e9e5f23503425c8eee20d0", size 
= 1936672, upload-time = "2025-12-09T21:54:52.608Z" },
+    { url = 
"https://files.pythonhosted.org/packages/b3/4b/fa7838fe20bb752810feed60e45625a9a8b0102c0c09971e2d1d95362992/sqlalchemy-2.0.46-cp313-cp313-macosx_11_0_arm64.whl";,
 hash = 
"sha256:93a12da97cca70cea10d4b4fc602589c4511f96c1f8f6c11817620c021d21d00", size 
= 2150268, upload-time = "2026-01-21T19:05:56.621Z" },
+    { url = 
"https://files.pythonhosted.org/packages/46/c1/b34dccd712e8ea846edf396e00973dda82d598cb93762e55e43e6835eba9/sqlalchemy-2.0.46-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl";,
 hash = 
"sha256:af865c18752d416798dae13f83f38927c52f085c52e2f32b8ab0fef46fdd02c2", size 
= 3276511, upload-time = "2026-01-21T18:46:49.022Z" },
+    { url = 
"https://files.pythonhosted.org/packages/96/48/a04d9c94753e5d5d096c628c82a98c4793b9c08ca0e7155c3eb7d7db9f24/sqlalchemy-2.0.46-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl";,
 hash = 
"sha256:8d679b5f318423eacb61f933a9a0f75535bfca7056daeadbf6bd5bcee6183aee", size 
= 3292881, upload-time = "2026-01-21T18:40:13.089Z" },
+    { url = 
"https://files.pythonhosted.org/packages/be/f4/06eda6e91476f90a7d8058f74311cb65a2fb68d988171aced81707189131/sqlalchemy-2.0.46-cp313-cp313-musllinux_1_2_aarch64.whl";,
 hash = 
"sha256:64901e08c33462acc9ec3bad27fc7a5c2b6491665f2aa57564e57a4f5d7c52ad", size 
= 3224559, upload-time = "2026-01-21T18:46:50.974Z" },
+    { url = 
"https://files.pythonhosted.org/packages/ab/a2/d2af04095412ca6345ac22b33b89fe8d6f32a481e613ffcb2377d931d8d0/sqlalchemy-2.0.46-cp313-cp313-musllinux_1_2_x86_64.whl";,
 hash = 
"sha256:e8ac45e8f4eaac0f9f8043ea0e224158855c6a4329fd4ee37c45c61e3beb518e", size 
= 3262728, upload-time = "2026-01-21T18:40:14.883Z" },
+    { url = 
"https://files.pythonhosted.org/packages/31/48/1980c7caa5978a3b8225b4d230e69a2a6538a3562b8b31cea679b6933c83/sqlalchemy-2.0.46-cp313-cp313-win32.whl";,
 hash = 
"sha256:8d3b44b3d0ab2f1319d71d9863d76eeb46766f8cf9e921ac293511804d39813f", size 
= 2111295, upload-time = "2026-01-21T18:42:52.366Z" },
+    { url = 
"https://files.pythonhosted.org/packages/2d/54/f8d65bbde3d877617c4720f3c9f60e99bb7266df0d5d78b6e25e7c149f35/sqlalchemy-2.0.46-cp313-cp313-win_amd64.whl";,
 hash = 
"sha256:77f8071d8fbcbb2dd11b7fd40dedd04e8ebe2eb80497916efedba844298065ef", size 
= 2137076, upload-time = "2026-01-21T18:42:53.924Z" },
+    { url = 
"https://files.pythonhosted.org/packages/56/ba/9be4f97c7eb2b9d5544f2624adfc2853e796ed51d2bb8aec90bc94b7137e/sqlalchemy-2.0.46-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl";,
 hash = 
"sha256:a1e8cc6cc01da346dc92d9509a63033b9b1bda4fed7a7a7807ed385c7dccdc10", size 
= 3556533, upload-time = "2026-01-21T18:33:06.636Z" },
+    { url = 
"https://files.pythonhosted.org/packages/20/a6/b1fc6634564dbb4415b7ed6419cdfeaadefd2c39cdab1e3aa07a5f2474c2/sqlalchemy-2.0.46-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl";,
 hash = 
"sha256:96c7cca1a4babaaf3bfff3e4e606e38578856917e52f0384635a95b226c87764", size 
= 3523208, upload-time = "2026-01-21T18:45:08.436Z" },
+    { url = 
"https://files.pythonhosted.org/packages/a1/d8/41e0bdfc0f930ff236f86fccd12962d8fa03713f17ed57332d38af6a3782/sqlalchemy-2.0.46-cp313-cp313t-musllinux_1_2_aarch64.whl";,
 hash = 
"sha256:b2a9f9aee38039cf4755891a1e50e1effcc42ea6ba053743f452c372c3152b1b", size 
= 3464292, upload-time = "2026-01-21T18:33:08.208Z" },
+    { url = 
"https://files.pythonhosted.org/packages/f0/8b/9dcbec62d95bea85f5ecad9b8d65b78cc30fb0ffceeb3597961f3712549b/sqlalchemy-2.0.46-cp313-cp313t-musllinux_1_2_x86_64.whl";,
 hash = 
"sha256:db23b1bf8cfe1f7fda19018e7207b20cdb5168f83c437ff7e95d19e39289c447", size 
= 3473497, upload-time = "2026-01-21T18:45:10.552Z" },
+    { url = 
"https://files.pythonhosted.org/packages/fc/a1/9c4efa03300926601c19c18582531b45aededfb961ab3c3585f1e24f120b/sqlalchemy-2.0.46-py3-none-any.whl";,
 hash = 
"sha256:f9c11766e7e7c0a2767dda5acb006a118640c9fc0a4104214b96269bfb78399e", size 
= 1937882, upload-time = "2026-01-21T18:22:10.456Z" },
 ]
 
 [[package]]
@@ -1793,6 +1796,15 @@ wheels = [
     { url = 
"https://files.pythonhosted.org/packages/df/cb/e1da7e340586a078404c7e4328bfefc930867ace8a9a55916fd220cf9547/standard_imghdr-3.13.0-py3-none-any.whl";,
 hash = 
"sha256:30a1bff5465605bb496f842a6ac3cc1f2131bf3025b0da28d4877d6d4b7cc8e9", size 
= 4639, upload-time = "2024-10-30T16:01:13.829Z" },
 ]
 
+[[package]]
+name = "structlog"
+version = "25.5.0"
+source = { registry = "https://pypi.org/simple"; }
+sdist = { url = 
"https://files.pythonhosted.org/packages/ef/52/9ba0f43b686e7f3ddfeaa78ac3af750292662284b3661e91ad5494f21dbc/structlog-25.5.0.tar.gz";,
 hash = 
"sha256:098522a3bebed9153d4570c6d0288abf80a031dfdb2048d59a49e9dc2190fc98", size 
= 1460830, upload-time = "2025-10-27T08:28:23.028Z" }
+wheels = [
+    { url = 
"https://files.pythonhosted.org/packages/a8/45/a132b9074aa18e799b891b91ad72133c98d8042c70f6240e4c5f9dabee2f/structlog-25.5.0-py3-none-any.whl";,
 hash = 
"sha256:a8453e9b9e636ec59bd9e79bbd4a72f025981b3ba0f5837aebf48f02f37a7f9f", size 
= 72510, upload-time = "2025-10-27T08:28:21.535Z" },
+]
+
 [[package]]
 name = "text-unidecode"
 version = "1.3"
@@ -1844,6 +1856,7 @@ dependencies = [
     { name = "semver" },
     { name = "sqlmodel" },
     { name = "standard-imghdr" },
+    { name = "structlog" },
     { name = "yyjson" },
 ]
 
@@ -1903,6 +1916,7 @@ requires-dist = [
     { name = "semver", specifier = ">=3.0.4" },
     { name = "sqlmodel", specifier = "~=0.0.24" },
     { name = "standard-imghdr", specifier = ">=3.13.0" },
+    { name = "structlog", specifier = ">=25.5.0" },
     { name = "yyjson", specifier = ">=4.0.6" },
 ]
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to