This is an automated email from the ASF dual-hosted git repository.

arm pushed a commit to branch gha-distributions
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git

commit 919d61822c6c96ab319dfbf5f8ff0ea7c504704c
Author: Alastair McFarlane <[email protected]>
AuthorDate: Thu Jan 8 13:51:02 2026 +0000

    Rename GHA task as it's just for distributions now. Enable buttons to call 
it throughout the system. Monitor failed and in-progress distributions on 
finish page. Bump dependencies for vulnerability.
---
 .pre-commit-config.yaml              |   4 +-
 atr/api/__init__.py                  |   4 +-
 atr/form.py                          |   4 ++
 atr/get/distribution.py              |  85 ++++++++++++++++++++------
 atr/get/finish.py                    | 114 +++++++++++++++++++++++++++--------
 atr/models/results.py                |   6 +-
 atr/models/sql.py                    |   2 +-
 atr/post/distribution.py             | 105 +++++++++++++++++++++++++++++---
 atr/server.py                        |   3 +-
 atr/shared/distribution.py           |  13 ++--
 atr/storage/writers/distributions.py |  47 ++++++++++++++-
 atr/tasks/__init__.py                |   2 +-
 atr/tasks/gha.py                     |  93 +++++++++++++++++++++++-----
 atr/templates/check-selected.html    |   8 ++-
 uv.lock                              | 100 +++++++++++++++---------------
 15 files changed, 453 insertions(+), 137 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 346d982..26c1c25 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -92,7 +92,7 @@ repos:
       types_or: ['css']
       args: ['--fix', '--allow-empty-input']
 - repo: https://github.com/woodruffw/zizmor-pre-commit
-  rev: v1.19.0
+  rev: v1.20.0
   hooks:
     - id: zizmor
       args: [--min-severity, low]
@@ -101,7 +101,7 @@ repos:
   hooks:
     - id: pip-audit
 - repo: https://github.com/oxc-project/mirrors-oxlint
-  rev: v1.36.0
+  rev: v1.38.0
   hooks:
     - id: oxlint
       name: lint JS files with Oxlint
diff --git a/atr/api/__init__.py b/atr/api/__init__.py
index 93348be..eb3b6cf 100644
--- a/atr/api/__init__.py
+++ b/atr/api/__init__.py
@@ -279,7 +279,7 @@ async def distribution_record(data: 
models.api.DistributionRecordArgs) -> DictRe
     async with storage.write(asf_uid) as write:
         wacm = write.as_committee_member(release.committee.name)
         await wacm.distributions.record_from_data(
-            release,
+            release.name,
             data.staging,
             dd,
         )
@@ -619,7 +619,7 @@ async def publisher_distribution_record(data: 
models.api.PublisherDistributionRe
     async with storage.write(asf_uid) as write:
         wacm = write.as_committee_member(release.committee.name)
         await wacm.distributions.record_from_data(
-            release,
+            release.name,
             data.staging,
             dd,
         )
diff --git a/atr/form.py b/atr/form.py
index 6f40775..909b0e6 100644
--- a/atr/form.py
+++ b/atr/form.py
@@ -517,6 +517,10 @@ Email = pydantic.EmailStr
 
 
 class Enum[EnumType: enum.Enum]:
+    # These exist for type checkers - at runtime, the actual type is the enum
+    name: str
+    value: str | int
+
     @staticmethod
     def __class_getitem__(enum_class: type[EnumType]):
         def validator(v: Any) -> EnumType:
diff --git a/atr/get/distribution.py b/atr/get/distribution.py
index 4f166a9..9a284d7 100644
--- a/atr/get/distribution.py
+++ b/atr/get/distribution.py
@@ -29,30 +29,30 @@ import atr.util as util
 import atr.web as web
 
 
[email protected]("/distributions/list/<project>/<version>")
-async def list_get(session: web.Committer, project: str, version: str) -> str:
[email protected]("/distributions/list/<project_name>/<version_name>")
+async def list_get(session: web.Committer, project_name: str, version_name: 
str) -> str:
     async with db.session() as data:
         distributions = await data.distribution(
-            release_name=sql.release_name(project, version),
+            release_name=sql.release_name(project_name, version_name),
         ).all()
 
     block = htm.Block()
 
-    release = await shared.distribution.release_validated(project, version, 
staging=None)
+    release = await shared.distribution.release_validated(project_name, 
version_name, staging=None)
     staging = release.phase == sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT
-    render.html_nav_phase(block, project, version, staging)
+    render.html_nav_phase(block, project_name, version_name, staging)
 
     record_a_distribution = htm.a(
         ".btn.btn-primary",
         href=util.as_url(
-            stage if staging else record,
-            project=project,
-            version=version,
+            stage_record if staging else record,
+            project=project_name,
+            version=version_name,
         ),
     )["Record a distribution"]
 
     # Distribution list for project-version
-    block.h1["Distribution list for ", htm.em[f"{project}-{version}"]]
+    block.h1["Distribution list for ", 
htm.em[f"{project_name}-{version_name}"]]
     if not distributions:
         block.p["No distributions found."]
         block.p[record_a_distribution]
@@ -89,7 +89,7 @@ async def list_get(session: web.Committer, project: str, 
version: str) -> str:
 
         delete_form = form.render(
             model_cls=shared.distribution.DeleteForm,
-            action=util.as_url(post.distribution.delete, project=project, 
version=version),
+            action=util.as_url(post.distribution.delete, project=project_name, 
version=version_name),
             form_classes=".d-inline-block.m-0",
             submit_classes="btn-danger btn-sm",
             submit_label="Delete",
@@ -105,20 +105,71 @@ async def list_get(session: web.Committer, project: str, 
version: str) -> str:
         )
         block.append(htm.div(".mb-3")[delete_form])
 
-    title = f"Distribution list for {project} {version}"
+    title = f"Distribution list for {project_name} {version_name}"
     return await template.blank(title, content=block.collect())
 
 
[email protected]("/distribution/automate/<project>/<version>")
+async def automate(session: web.Committer, project: str, version: str) -> str:
+    return await _automate_form_page(project, version, staging=False)
+
+
[email protected]("/distribution/stage/automate/<project>/<version>")
+async def stage_automate(session: web.Committer, project: str, version: str) 
-> str:
+    return await _automate_form_page(project, version, staging=True)
+
+
 @get.committer("/distribution/record/<project>/<version>")
 async def record(session: web.Committer, project: str, version: str) -> str:
     return await _record_form_page(project, version, staging=False)
 
 
[email protected]("/distribution/stage/<project>/<version>")
-async def stage(session: web.Committer, project: str, version: str) -> str:
[email protected]("/distribution/stage/record/<project>/<version>")
+async def stage_record(session: web.Committer, project: str, version: str) -> 
str:
     return await _record_form_page(project, version, staging=True)
 
 
+async def _automate_form_page(project: str, version: str, staging: bool) -> 
str:
+    """Helper to render the distribution automation form page."""
+    await shared.distribution.release_validated(project, version, 
staging=staging)
+
+    block = htm.Block()
+    render.html_nav_phase(block, project, version, staging=staging)
+
+    title = "Create a staging distribution" if staging else "Create a 
distribution"
+    block.h1[title]
+
+    block.p[
+        "Create a distribution of ",
+        htm.strong[f"{project}-{version}"],
+        " using the form below.",
+    ]
+    block.p[
+        "You can also ",
+        htm.a(href=util.as_url(list_get, project_name=project, 
version_name=version))["view the distribution list"],
+        ".",
+    ]
+
+    # Determine the action based on staging
+    action = (
+        util.as_url(post.distribution.stage_automate_selected, 
project=project, version=version)
+        if staging
+        else util.as_url(post.distribution.automate_selected, project=project, 
version=version)
+    )
+
+    # TODO: Reuse the same form for now - maybe we can combine this and the 
function below adding an automate=True arg
+    # Render the distribution form
+    form_html = form.render(
+        model_cls=shared.distribution.DistributeForm,
+        submit_label="Distribute",
+        action=action,
+        defaults={"package": project, "version": version},
+    )
+    block.append(form_html)
+
+    return await template.blank(title, content=block.collect())
+
+
 async def _record_form_page(project: str, version: str, staging: bool) -> str:
     """Helper to render the distribution recording form page."""
     await shared.distribution.release_validated(project, version, 
staging=staging)
@@ -126,23 +177,23 @@ async def _record_form_page(project: str, version: str, 
staging: bool) -> str:
     block = htm.Block()
     render.html_nav_phase(block, project, version, staging=staging)
 
-    title = "Record a staging distribution" if staging else "Record a manual 
distribution"
+    title = "Record a manual staging distribution" if staging else "Record a 
manual distribution"
     block.h1[title]
 
     block.p[
-        "Record a distribution of ",
+        "Record a manual distribution of ",
         htm.strong[f"{project}-{version}"],
         " using the form below.",
     ]
     block.p[
         "You can also ",
-        htm.a(href=util.as_url(list_get, project=project, 
version=version))["view the distribution list"],
+        htm.a(href=util.as_url(list_get, project_name=project, 
version_name=version))["view the distribution list"],
         ".",
     ]
 
     # Determine the action based on staging
     action = (
-        util.as_url(post.distribution.stage_selected, project=project, 
version=version)
+        util.as_url(post.distribution.stage_record_selected, project=project, 
version=version)
         if staging
         else util.as_url(post.distribution.record_selected, project=project, 
version=version)
     )
diff --git a/atr/get/finish.py b/atr/get/finish.py
index 921bf83..fa00262 100644
--- a/atr/get/finish.py
+++ b/atr/get/finish.py
@@ -19,6 +19,7 @@
 import dataclasses
 import json
 import pathlib
+from collections.abc import Sequence
 
 import aiofiles.os
 import asfquart.base as base
@@ -42,6 +43,7 @@ import atr.mapping as mapping
 import atr.models.sql as sql
 import atr.render as render
 import atr.shared as shared
+import atr.tasks.gha as gha
 import atr.template as template
 import atr.util as util
 import atr.web as web
@@ -60,13 +62,9 @@ async def selected(
 ) -> tuple[web.QuartResponse, int] | web.WerkzeugResponse | str:
     """Finish a release preview."""
     try:
-        (
-            release,
-            source_files_rel,
-            target_dirs,
-            deletable_dirs,
-            rc_analysis,
-        ) = await _get_page_data(project_name, version_name)
+        (release, source_files_rel, target_dirs, deletable_dirs, rc_analysis, 
tasks) = await _get_page_data(
+            project_name, version_name
+        )
     except ValueError:
         async with db.session() as data:
             release_fallback = await data.release(
@@ -89,6 +87,7 @@ async def selected(
         target_dirs=target_dirs,
         deletable_dirs=deletable_dirs,
         rc_analysis=rc_analysis,
+        distribution_tasks=tasks,
     )
 
 
@@ -134,14 +133,31 @@ async def _deletable_choices(
 
 async def _get_page_data(
     project_name: str, version_name: str
-) -> tuple[sql.Release, list[pathlib.Path], set[pathlib.Path], list[tuple[str, 
str]], RCTagAnalysisResult]:
+) -> tuple[
+    sql.Release, list[pathlib.Path], set[pathlib.Path], list[tuple[str, str]], 
RCTagAnalysisResult, Sequence[sql.Task]
+]:
     """Get all the data needed to render the finish page."""
     async with db.session() as data:
+        via = sql.validate_instrumented_attribute
         release = await data.release(
             project_name=project_name,
             version=version_name,
             _committee=True,
         ).demand(base.ASFQuartException("Release does not exist", 
errorcode=404))
+        tasks = [
+            t
+            for t in (
+                await data.task(
+                    project_name=project_name,
+                    version_name=version_name,
+                    revision_number=release.latest_revision_number,
+                    task_type=sql.TaskType.DISTRIBUTION_WORKFLOW,
+                )
+                .order_by(sql.sqlmodel.desc(via(sql.Task.started)))
+                .all()
+            )
+            if t.status in [sql.TaskStatus.QUEUED, sql.TaskStatus.ACTIVE, 
sql.TaskStatus.FAILED]
+        ]
 
     if release.phase != sql.ReleasePhase.RELEASE_PREVIEW:
         raise ValueError("Release is not in preview phase")
@@ -151,7 +167,7 @@ async def _get_page_data(
     deletable_dirs = await _deletable_choices(latest_revision_dir, target_dirs)
     rc_analysis_result = await _analyse_rc_tags(latest_revision_dir)
 
-    return release, source_files_rel, target_dirs, deletable_dirs, 
rc_analysis_result
+    return release, source_files_rel, target_dirs, deletable_dirs, 
rc_analysis_result, tasks
 
 
 def _render_delete_directory_form(deletable_dirs: list[tuple[str, str]]) -> 
htm.Element:
@@ -247,6 +263,7 @@ async def _render_page(
     target_dirs: set,
     deletable_dirs: list[tuple[str, str]],
     rc_analysis: RCTagAnalysisResult,
+    distribution_tasks: Sequence[sql.Task],
 ) -> str:
     """Render the finish page using htm.py."""
     page = htm.Block()
@@ -275,8 +292,10 @@ async def _render_page(
         "such as Maven Central, PyPI, or Docker Hub."
     ]
 
-    # TODO alert
-    page.append(_render_todo_alert(release))
+    if len(distribution_tasks) > 0:
+        page.append(_render_distribution_tasks(release, distribution_tasks))
+
+    page.append(_render_distribution_buttons(release))
 
     # Move files section
     page.append(_render_move_section(max_files_to_show=10))
@@ -401,7 +420,7 @@ def _render_release_card(release: sql.Release) -> 
htm.Element:
                         version_name=release.version,
                     ),
                 )[
-                    htpy.i(".bi.bi-download"),
+                    htm.icon("download"),
                     " Download all files",
                 ],
                 htm.a(
@@ -413,7 +432,7 @@ def _render_release_card(release: sql.Release) -> 
htm.Element:
                         version_name=release.version,
                     ),
                 )[
-                    htpy.i(".bi.bi-archive"),
+                    htm.icon("archive"),
                     " Show files",
                 ],
                 htm.a(
@@ -425,7 +444,7 @@ def _render_release_card(release: sql.Release) -> 
htm.Element:
                         version_name=release.version,
                     ),
                 )[
-                    htpy.i(".bi.bi-clock-history"),
+                    htm.icon("clock-history"),
                     " Show revisions",
                 ],
                 htm.a(
@@ -437,7 +456,7 @@ def _render_release_card(release: sql.Release) -> 
htm.Element:
                         version_name=release.version,
                     ),
                 )[
-                    htpy.i(".bi.bi-check-circle"),
+                    htm.icon("check-circle"),
                     " Announce and distribute",
                 ],
             ],
@@ -446,25 +465,72 @@ def _render_release_card(release: sql.Release) -> 
htm.Element:
     return card
 
 
-def _render_todo_alert(release: sql.Release) -> htm.Element:
-    """Render the TODO alert about distribution tools."""
-    return htm.div(".alert.alert-warning.mb-4", role="alert")[
-        htm.p(".fw-semibold.mb-1")["TODO"],
+def _render_distribution_buttons(release: sql.Release) -> htm.Element:
+    """Render the distribution tool buttons."""
+    return htm.div()[
         htm.p(".mb-1")[
-            "We plan to add tools to help release managers to distribute 
release artifacts on distribution networks. "
-            "Currently you must do this manually. Once you've distributed your 
release artifacts, you can ",
             htm.a(
+                ".btn.btn-primary.me-2",
+                href=util.as_url(
+                    distribution.automate,
+                    project=release.project.name,
+                    version=release.version,
+                ),
+            )["Distribute"],
+            htm.a(
+                ".btn.btn-secondary.me-2",
                 href=util.as_url(
                     distribution.record,
                     project=release.project.name,
                     version=release.version,
-                )
-            )["record them on the ATR"],
-            ".",
+                ),
+            )["Record a manual distribution"],
         ],
     ]
 
 
+def _render_distribution_tasks(release: sql.Release, tasks: 
Sequence[sql.Task]) -> htm.Element:
+    """Render current and failed distribution tasks."""
+    failed_tasks = [t for t in tasks if t.status == sql.TaskStatus.FAILED]
+    in_progress_tasks = [t for t in tasks if t.status in 
[sql.TaskStatus.QUEUED, sql.TaskStatus.ACTIVE]]
+
+    block = htm.Block()
+
+    if len(failed_tasks) > 0:
+        summary = f"{len(failed_tasks)} distribution{'s' if len(failed_tasks) 
> 1 else ''} failed for this release"
+        block.append(
+            htm.div(".alert.alert-danger.mb-3")[
+                htm.h3["Failed distributions"],
+                htm.details[
+                    htm.summary[summary],
+                    htm.div[*[_render_task(f) for f in failed_tasks]],
+                ],
+            ]
+        )
+    if len(in_progress_tasks) > 0:
+        block.append(
+            htm.div(".alert.alert-info.mb-3")[
+                htm.h3["In-progress distributions"],
+                htm.p["One or more automatic distributions are still 
in-progress:"],
+                *[_render_task(f) for f in in_progress_tasks],
+                htm.button(
+                    ".btn.btn-success.me-2",
+                    {"onclick": "window.location.reload()"},
+                )["Refresh"],
+            ]
+        )
+    return block.collect()
+
+
+def _render_task(task: sql.Task) -> htm.Element:
+    """Render a distribution task's details."""
+    args: gha.DistributionWorkflow = 
gha.DistributionWorkflow.model_validate(task.task_args)
+    status = task.status.value
+    return htm.p[
+        f"{args.platform} ({args.package} {args.version}): {task.error if 
task.error else status.capitalize()}"
+    ]
+
+
 async def _sources_and_targets(latest_revision_dir: pathlib.Path) -> 
tuple[list[pathlib.Path], set[pathlib.Path]]:
     source_items_rel: list[pathlib.Path] = []
     target_dirs: set[pathlib.Path] = {pathlib.Path(".")}
diff --git a/atr/models/results.py b/atr/models/results.py
index 713291d..5dc1ce2 100644
--- a/atr/models/results.py
+++ b/atr/models/results.py
@@ -24,10 +24,10 @@ import atr.sbom.models.osv as osv
 from . import schema
 
 
-class GithubActionsWorkflow(schema.Strict):
+class DistributionWorkflow(schema.Strict):
     """Result of the task to run a Github workflow."""
 
-    kind: Literal["github_actions_workflow"] = schema.Field(alias="kind")
+    kind: Literal["distribution_workflow"] = schema.Field(alias="kind")
     name: str = schema.description("The name of the action being performed")
     run_id: int = schema.description("The ID of the workflow run")
     url: str = schema.description("The URL of the workflow run")
@@ -193,7 +193,7 @@ class MetadataUpdate(schema.Strict):
 
 
 Results = Annotated[
-    GithubActionsWorkflow
+    DistributionWorkflow
     | HashingCheck
     | MessageSend
     | MetadataUpdate
diff --git a/atr/models/sql.py b/atr/models/sql.py
index 02b502f..880b202 100644
--- a/atr/models/sql.py
+++ b/atr/models/sql.py
@@ -179,7 +179,7 @@ class TaskStatus(str, enum.Enum):
 
 
 class TaskType(str, enum.Enum):
-    GITHUB_ACTION_WORKFLOW = "github_action_workflow"
+    DISTRIBUTION_WORKFLOW = "distribution_workflow"
     HASHING_CHECK = "hashing_check"
     KEYS_IMPORT_FILE = "keys_import_file"
     LICENSE_FILES = "license_files"
diff --git a/atr/post/distribution.py b/atr/post/distribution.py
index 15fa395..88fb5f1 100644
--- a/atr/post/distribution.py
+++ b/atr/post/distribution.py
@@ -25,6 +25,9 @@ import atr.shared as shared
 import atr.storage as storage
 import atr.web as web
 
+_AUTOMATED_PLATFORMS = [shared.distribution.DistributionPlatform.MAVEN]
+_AUTOMATED_PLATFORMS_STAGE = [shared.distribution.DistributionPlatform.MAVEN]
+
 
 @post.committer("/distribution/delete/<project>/<version>")
 @post.form(shared.distribution.DeleteForm)
@@ -53,12 +56,82 @@ async def delete(
         )
     return await session.redirect(
         get.distribution.list_get,
-        project=project,
-        version=version,
+        project_name=project,
+        version_name=version,
         success="Distribution deleted",
     )
 
 
+async def automate_form_process_page(
+    session: web.Committer,
+    form_data: shared.distribution.DistributeForm,
+    project: str,
+    version: str,
+    /,
+    staging: bool = False,
+) -> web.WerkzeugResponse:
+    allowed_platforms = _AUTOMATED_PLATFORMS_STAGE if staging else 
_AUTOMATED_PLATFORMS
+    if form_data.platform not in allowed_platforms:
+        platform_str = form_data.platform.value
+        return await session.redirect(
+            get.distribution.stage_automate if staging else 
get.distribution.automate,
+            project=project,
+            version=version,
+            error=f"Platform {platform_str} is not supported for automated 
distribution",
+        )
+    sql_platform = form_data.platform.to_sql()  # type: ignore[attr-defined]
+    dd = distribution.Data(
+        platform=sql_platform,
+        owner_namespace=form_data.owner_namespace,
+        package=form_data.package,
+        version=form_data.version,
+        details=form_data.details,
+    )
+    release, committee = await 
shared.distribution.release_validated_and_committee(
+        project, version, staging=staging, release_policy=True
+    )
+    if release.release_policy is None or 
release.release_policy.github_repository_name == "":
+        return await session.redirect(
+            get.distribution.stage_automate if staging else 
get.distribution.automate,
+            project=project,
+            version=version,
+            error="Project does not have a release policy configured, or no 
GitHub repository is specified",
+        )
+    repo = release.release_policy.github_repository_name
+    async with 
storage.write_as_committee_member(committee_name=committee.name) as w:
+        try:
+            await w.distributions.automate(
+                release.name,
+                dd.platform,
+                dd.owner_namespace,
+                "apache",
+                repo,
+                project,
+                version,
+                release.latest_revision_number,
+                dd.package,
+                dd.version,
+                staging,
+            )
+        except storage.AccessError as e:
+            # Instead of calling record_form_page_new, redirect with error 
message
+            return await session.redirect(
+                get.distribution.stage_automate if staging else 
get.distribution.automate,
+                project=project,
+                version=version,
+                error=str(e),
+            )
+
+    # Success - redirect to distribution list with success message
+    message = "Distribution queued successfully."
+    return await session.redirect(
+        get.distribution.list_get if staging else get.finish.selected,
+        project_name=project,
+        version_name=version,
+        success=message,
+    )
+
+
 async def record_form_process_page(
     session: web.Committer,
     form_data: shared.distribution.DistributeForm,
@@ -84,14 +157,14 @@ async def record_form_process_page(
     async with 
storage.write_as_committee_member(committee_name=committee.name) as w:
         try:
             _dist, added, _metadata = await w.distributions.record_from_data(
-                release=release,
+                release=release.name,
                 staging=staging,
                 dd=dd,
             )
         except storage.AccessError as e:
             # Instead of calling record_form_page_new, redirect with error 
message
             return await session.redirect(
-                get.distribution.stage if staging else get.distribution.record,
+                get.distribution.stage_record if staging else 
get.distribution.record,
                 project=project,
                 version=version,
                 error=str(e),
@@ -101,12 +174,28 @@ async def record_form_process_page(
     message = "Distribution recorded successfully." if added else 
"Distribution was already recorded."
     return await session.redirect(
         get.distribution.list_get,
-        project=project,
-        version=version,
+        project_name=project,
+        version_name=version,
         success=message,
     )
 
 
[email protected]("/distribution/automate/<project>/<version>")
[email protected](shared.distribution.DistributeForm)
+async def automate_selected(
+    session: web.Committer, distribute_form: 
shared.distribution.DistributeForm, project: str, version: str
+) -> web.WerkzeugResponse:
+    return await automate_form_process_page(session, distribute_form, project, 
version, staging=False)
+
+
[email protected]("/distribution/stage/automate/<project>/<version>")
[email protected](shared.distribution.DistributeForm)
+async def stage_automate_selected(
+    session: web.Committer, distribute_form: 
shared.distribution.DistributeForm, project: str, version: str
+) -> web.WerkzeugResponse:
+    return await automate_form_process_page(session, distribute_form, project, 
version, staging=True)
+
+
 @post.committer("/distribution/record/<project>/<version>")
 @post.form(shared.distribution.DistributeForm)
 async def record_selected(
@@ -115,9 +204,9 @@ async def record_selected(
     return await record_form_process_page(session, distribute_form, project, 
version, staging=False)
 
 
[email protected]("/distribution/stage/<project>/<version>")
[email protected]("/distribution/stage/record/<project>/<version>")
 @post.form(shared.distribution.DistributeForm)
-async def stage_selected(
+async def stage_record_selected(
     session: web.Committer, distribute_form: 
shared.distribution.DistributeForm, project: str, version: str
 ) -> web.WerkzeugResponse:
     return await record_form_process_page(session, distribute_form, project, 
version, staging=True)
diff --git a/atr/server.py b/atr/server.py
index e39ffff..3f13739 100644
--- a/atr/server.py
+++ b/atr/server.py
@@ -335,9 +335,10 @@ def _app_setup_security_headers(app: base.QuartApp) -> 
None:
     # Both object-src 'none' and base-uri 'none' are required by ASVS v5 3.4.3 
(L2)
     # The frame-ancestors 'none' directive is required by ASVS v5 3.4.6 (L2)
     # Bootstrap uses data: URLs extensively, so we need to include that in 
img-src
+    # The script hash allows window.location.reload() and nothing else
     csp_directives = [
         "default-src 'self'",
-        "script-src 'self'",
+        "script-src 'self' 
'sha256-4TpZ3Tx5SLybDXPQaSHGuP1RU4D+pzck+02JLVY61BY=' 'unsafe-hashes'",
         "style-src 'self' 'unsafe-inline'",
         "img-src 'self' https://apache.org https://incubator.apache.org 
https://www.apache.org data:",
         "font-src 'self'",
diff --git a/atr/shared/distribution.py b/atr/shared/distribution.py
index 444d066..743c781 100644
--- a/atr/shared/distribution.py
+++ b/atr/shared/distribution.py
@@ -132,10 +132,7 @@ def html_tr_a(label: str, value: str | None) -> 
htm.Element:
 
 
 async def release_validated(
-    project: str,
-    version: str,
-    committee: bool = False,
-    staging: bool | None = None,
+    project: str, version: str, committee: bool = False, staging: bool | None 
= None, release_policy: bool = False
 ) -> sql.Release:
     match staging:
         case True:
@@ -149,6 +146,7 @@ async def release_validated(
             project_name=project,
             version=version,
             _committee=committee,
+            _release_policy=release_policy,
         ).demand(RuntimeError(f"Release {project} {version} not found"))
         if release.phase not in phase:
             raise RuntimeError(f"Release {project} {version} is not in 
{phase}")
@@ -158,12 +156,9 @@ async def release_validated(
 
 
 async def release_validated_and_committee(
-    project: str,
-    version: str,
-    *,
-    staging: bool | None = None,
+    project: str, version: str, *, staging: bool | None = None, 
release_policy: bool = False
 ) -> tuple[sql.Release, sql.Committee]:
-    release = await release_validated(project, version, committee=True, 
staging=staging)
+    release = await release_validated(project, version, committee=True, 
staging=staging, release_policy=release_policy)
     committee = release.committee
     if committee is None:
         raise RuntimeError(f"Release {project} {version} has no committee")
diff --git a/atr/storage/writers/distributions.py 
b/atr/storage/writers/distributions.py
index 2db5d85..a0748d4 100644
--- a/atr/storage/writers/distributions.py
+++ b/atr/storage/writers/distributions.py
@@ -31,6 +31,8 @@ import atr.models.distribution as distribution
 import atr.models.sql as sql
 import atr.storage as storage
 import atr.storage.outcome as outcome
+import atr.tasks.gha as gha
+import atr.util as util
 
 
 class GeneralPublic:
@@ -95,6 +97,47 @@ class CommitteeMember(CommitteeParticipant):
         self.__asf_uid = asf_uid
         self.__committee_name = committee_name
 
+    async def automate(
+        self,
+        release_name: str,
+        platform: sql.DistributionPlatform,
+        owner_namespace: str | None,
+        owner: str,
+        repo: str,
+        project_name: str,
+        version_name: str,
+        revision_number: str | None,
+        package: str,
+        version: str,
+        staging: bool,
+    ) -> sql.Task:
+        dist_task = sql.Task(
+            task_type=sql.TaskType.DISTRIBUTION_WORKFLOW,
+            task_args=gha.DistributionWorkflow(
+                name=release_name,
+                # "distribution-owner-namespace": owner_namespace, # TODO: Put 
into workflow
+                package=package,
+                version=version,
+                project_name=project_name,
+                version_name=version_name,
+                platform=platform.name,
+                owner=owner,
+                repo=repo,
+                ref="main",  # TODO: Un-hardcode
+                arguments={},
+            ).model_dump(),
+            asf_uid=util.unwrap(self.__asf_uid),
+            added=datetime.datetime.now(datetime.UTC),
+            status=sql.TaskStatus.QUEUED,
+            project_name=project_name,
+            version_name=version_name,
+            revision_number=revision_number,
+        )
+        self.__data.add(dist_task)
+        await self.__data.commit()
+        await self.__data.refresh(dist_task)
+        return dist_task
+
     async def record(
         self,
         release_name: str,
@@ -148,7 +191,7 @@ class CommitteeMember(CommitteeParticipant):
 
     async def record_from_data(
         self,
-        release: sql.Release,
+        release: str,
         staging: bool,
         dd: distribution.Data,
     ) -> tuple[sql.Distribution, bool, distribution.Metadata]:
@@ -176,7 +219,7 @@ class CommitteeMember(CommitteeParticipant):
             web_url=web_url,
         )
         dist, added = await self.record(
-            release_name=release.name,
+            release_name=release,
             platform=dd.platform,
             owner_namespace=dd.owner_namespace,
             package=dd.package,
diff --git a/atr/tasks/__init__.py b/atr/tasks/__init__.py
index dde92ed..9d89e31 100644
--- a/atr/tasks/__init__.py
+++ b/atr/tasks/__init__.py
@@ -189,7 +189,7 @@ def queued(
 
 def resolve(task_type: sql.TaskType) -> Callable[..., 
Awaitable[results.Results | None]]:  # noqa: C901
     match task_type:
-        case sql.TaskType.GITHUB_ACTION_WORKFLOW:
+        case sql.TaskType.DISTRIBUTION_WORKFLOW:
             return gha.trigger_workflow
         case sql.TaskType.HASHING_CHECK:
             return hashing.check
diff --git a/atr/tasks/gha.py b/atr/tasks/gha.py
index 3c811da..669d469 100644
--- a/atr/tasks/gha.py
+++ b/atr/tasks/gha.py
@@ -24,48 +24,76 @@ import aiohttp
 
 import atr.config as config
 import atr.log as log
+import atr.models.distribution as distribution
 import atr.models.results as results
 import atr.models.schema as schema
+import atr.models.sql as sql
+
+# import atr.shared as shared
+import atr.storage as storage
 import atr.tasks.checks as checks
 
 _BASE_URL: Final[str] = "https://api.github.com/repos";
 _IN_PROGRESS_STATUSES: Final[list[str]] = ["in_progress", "queued", 
"requested", "waiting", "pending", "expected"]
 _COMPLETED_STATUSES: Final[list[str]] = ["completed"]
 _FAILED_STATUSES: Final[list[str]] = ["failure", "startup_failure"]
-_TIMEOUT_S = 5
+_TIMEOUT_S = 60
 
 
-class GithubActionsWorkflow(schema.Strict):
+class DistributionWorkflow(schema.Strict):
     """Arguments for the task to start a Github Actions workflow."""
 
     owner: str = schema.description("Github owner of the repository")
     repo: str = schema.description("Repository in which to start the workflow")
-    workflow_id: str = schema.description("Workflow ID")
     ref: str = schema.description("Git ref to trigger the workflow")
+    package: str = schema.description("Package to distribute")
+    version: str = schema.description("Version to distribute")
+    project_name: str = schema.description("Project name in ATR")
+    version_name: str = schema.description("Version name in ATR")
+    platform: str = schema.description("Distribution platform")
     arguments: dict[str, str] = schema.description("Workflow arguments")
     name: str = schema.description("Name of the run")
 
 
[email protected]_model(GithubActionsWorkflow)
-async def trigger_workflow(args: GithubActionsWorkflow) -> results.Results | 
None:
[email protected]_model(DistributionWorkflow)
+async def trigger_workflow(args: DistributionWorkflow) -> results.Results | 
None:
     unique_id = f"{args.name}-{uuid.uuid4()}"
-    payload = {"ref": args.ref, "inputs": {"atr-id": unique_id, 
**args.arguments}}
+    workflow_id = "distribute-test.yml"
+    # release, committee = await 
shared.distribution.release_validated_and_committee(
+    #     args.project,
+    #     args.version,
+    #     staging=True, # TODO: Un-hardcode
+    # )
+    try:
+        sql_platform = sql.DistributionPlatform[args.platform]
+    except KeyError:
+        _fail(f"Invalid platform: {args.platform}")
+    payload = {
+        "ref": args.ref,
+        "inputs": {
+            "atr-id": unique_id,
+            "platform": args.platform,  # TODO: This should be one workflow 
per platform
+            "distribution-package": args.package,
+            "distribution-version": args.version,
+            **args.arguments,
+        },
+    }
     headers = {"Accept": "application/vnd.github+json", "Authorization": 
f"Bearer {config.get().GITHUB_TOKEN}"}
     log.info(
-        f"Triggering Github workflow 
{args.owner}/{args.repo}/{args.workflow_id} with args: {
+        f"Triggering Github workflow {args.owner}/{args.repo}/{workflow_id} 
with args: {
             json.dumps(args.arguments, indent=2)
         }"
     )
     async with aiohttp.ClientSession() as session:
         try:
             async with session.post(
-                
f"{_BASE_URL}/{args.owner}/{args.repo}/actions/workflows/{args.workflow_id}/dispatches",
+                
f"{_BASE_URL}/{args.owner}/{args.repo}/actions/workflows/{workflow_id}/dispatches",
                 headers=headers,
                 json=payload,
             ) as response:
                 response.raise_for_status()
         except aiohttp.ClientResponseError as e:
-            _fail(f"Failed to trigger workflow run: {e.message} ({e.status})")
+            _fail(f"Failed to trigger GitHub workflow: {e.message} 
({e.status})")
 
         run, run_id = await _find_triggered_run(session, args, headers, 
unique_id)
 
@@ -73,13 +101,46 @@ async def trigger_workflow(args: GithubActionsWorkflow) -> 
results.Results | Non
             run = await _wait_for_completion(session, args, headers, run_id, 
unique_id)
 
         if run.get("status") in _FAILED_STATUSES:
-            _fail(f"Github workflow 
{args.owner}/{args.repo}/{args.workflow_id} run {run_id} failed with error")
+            _fail(f"Github workflow {args.owner}/{args.repo}/{workflow_id} run 
{run_id} failed with error")
         if run.get("status") in _COMPLETED_STATUSES:
-            log.info(f"Workflow {args.owner}/{args.repo}/{args.workflow_id} 
run {run_id} completed successfully")
-            return results.GithubActionsWorkflow(
-                kind="github_actions_workflow", name=args.name, run_id=run_id, 
url=run.get("html_url", "")
+            log.info(f"Workflow {args.owner}/{args.repo}/{workflow_id} run 
{run_id} completed successfully")
+            await _record_distribution(
+                "committee.name",
+                "release",
+                sql_platform,
+                "",  # TODO: Needs set in args
+                package=args.package,
+                version=args.version,
+                staging=True,
+            )
+            return results.DistributionWorkflow(
+                kind="distribution_workflow", name=args.name, run_id=run_id, 
url=run.get("html_url", "")
             )
-        _fail(f"Timed out waiting for workflow 
{args.owner}/{args.repo}/{args.workflow_id}")
+        _fail(f"Timed out waiting for GitHub workflow 
{args.owner}/{args.repo}/{workflow_id}")
+
+
+async def _record_distribution(
+    committee_name: str,
+    release: str,
+    platform: sql.DistributionPlatform,
+    namespace: str,
+    package: str,
+    version: str,
+    staging: bool,
+):
+    log.info("Creating distribution record")
+    dd = distribution.Data(
+        platform=platform,
+        owner_namespace=namespace,
+        package=package,
+        version=version,
+        details=False,
+    )
+    async with 
storage.write_as_committee_member(committee_name=committee_name) as w:
+        try:
+            _dist, _added, _metadata = await 
w.distributions.record_from_data(release=release, staging=staging, dd=dd)
+        except storage.AccessError as e:
+            _fail(f"Failed to record distribution: {e}")
 
 
 def _fail(message: str) -> NoReturn:
@@ -89,7 +150,7 @@ def _fail(message: str) -> NoReturn:
 
 async def _find_triggered_run(
     session: aiohttp.ClientSession,
-    args: GithubActionsWorkflow,
+    args: DistributionWorkflow,
     headers: dict[str, str],
     unique_id: str,
 ) -> tuple[dict[str, Any], int]:
@@ -140,7 +201,7 @@ async def _request_and_retry(
 
 async def _wait_for_completion(
     session: aiohttp.ClientSession,
-    args: GithubActionsWorkflow,
+    args: DistributionWorkflow,
     headers: dict[str, str],
     run_id: int,
     unique_id: str,
diff --git a/atr/templates/check-selected.html 
b/atr/templates/check-selected.html
index 0bb1534..36f65fa 100644
--- a/atr/templates/check-selected.html
+++ b/atr/templates/check-selected.html
@@ -125,9 +125,15 @@
     </div>
   </div>
   {% if phase == "release_candidate_draft" %}
+    <h3 id="distribution" class="mt-4">Distribution</h3>
+    <p>
+      While this release is in draft, you can create a staging distribution. 
Use the buttons below to either create one automatically (where supported) or 
record a manual distribution performed outside of ATR.
+    </p>
     <p>
       <a class="btn btn-primary"
-         href="{{ as_url(get.distribution.stage, project=release.project.name, 
version=release.version) }}">Record a distribution</a>
+         href="{{ as_url(get.distribution.stage_automate, 
project=release.project.name, version=release.version) }}">Distribute</a>
+      <a class="btn btn-secondary"
+         href="{{ as_url(get.distribution.stage_record, 
project=release.project.name, version=release.version) }}">Record a manual 
distribution</a>
     </p>
     <h2 id="more-actions">More actions</h2>
     <h3 id="ignored-checks" class="mt-4">Ignored checks</h3>
diff --git a/uv.lock b/uv.lock
index 937a489..196e968 100644
--- a/uv.lock
+++ b/uv.lock
@@ -3,7 +3,7 @@ revision = 3
 requires-python = "==3.13.*"
 
 [options]
-exclude-newer = "2025-12-31T15:16:00Z"
+exclude-newer = "2026-01-08T12:17:26Z"
 
 [[package]]
 name = "aiofiles"
@@ -25,7 +25,7 @@ wheels = [
 
 [[package]]
 name = "aiohttp"
-version = "3.13.2"
+version = "3.13.3"
 source = { registry = "https://pypi.org/simple"; }
 dependencies = [
     { name = "aiohappyeyeballs" },
@@ -36,25 +36,25 @@ dependencies = [
     { name = "propcache" },
     { name = "yarl" },
 ]
-sdist = { url = 
"https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz";,
 hash = 
"sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size 
= 7837994, upload-time = "2025-10-28T20:59:39.937Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz";,
 hash = 
"sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size 
= 7844556, upload-time = "2026-01-03T17:33:05.204Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl";,
 hash = 
"sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size 
= 732139, upload-time = "2025-10-28T20:57:02.455Z" },
-    { url = 
"https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl";,
 hash = 
"sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size 
= 490082, upload-time = "2025-10-28T20:57:04.784Z" },
-    { url = 
"https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl";,
 hash = 
"sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size 
= 489035, upload-time = "2025-10-28T20:57:06.894Z" },
-    { url = 
"https://files.pythonhosted.org/packages/d2/04/db5279e38471b7ac801d7d36a57d1230feeee130bbe2a74f72731b23c2b1/aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl";,
 hash = 
"sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811", size 
= 1720387, upload-time = "2025-10-28T20:57:08.685Z" },
-    { url = 
"https://files.pythonhosted.org/packages/31/07/8ea4326bd7dae2bd59828f69d7fdc6e04523caa55e4a70f4a8725a7e4ed2/aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl";,
 hash = 
"sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a", size 
= 1688314, upload-time = "2025-10-28T20:57:10.693Z" },
-    { url = 
"https://files.pythonhosted.org/packages/48/ab/3d98007b5b87ffd519d065225438cc3b668b2f245572a8cb53da5dd2b1bc/aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl";,
 hash = 
"sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4", size 
= 1756317, upload-time = "2025-10-28T20:57:12.563Z" },
-    { url = 
"https://files.pythonhosted.org/packages/97/3d/801ca172b3d857fafb7b50c7c03f91b72b867a13abca982ed6b3081774ef/aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl";,
 hash = 
"sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a", size 
= 1858539, upload-time = "2025-10-28T20:57:14.623Z" },
-    { url = 
"https://files.pythonhosted.org/packages/f7/0d/4764669bdf47bd472899b3d3db91fffbe925c8e3038ec591a2fd2ad6a14d/aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl";,
 hash = 
"sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e", size 
= 1739597, upload-time = "2025-10-28T20:57:16.399Z" },
-    { url = 
"https://files.pythonhosted.org/packages/c4/52/7bd3c6693da58ba16e657eb904a5b6decfc48ecd06e9ac098591653b1566/aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl";,
 hash = 
"sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb", size 
= 1555006, upload-time = "2025-10-28T20:57:18.288Z" },
-    { url = 
"https://files.pythonhosted.org/packages/48/30/9586667acec5993b6f41d2ebcf96e97a1255a85f62f3c653110a5de4d346/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl";,
 hash = 
"sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded", size 
= 1683220, upload-time = "2025-10-28T20:57:20.241Z" },
-    { url = 
"https://files.pythonhosted.org/packages/71/01/3afe4c96854cfd7b30d78333852e8e851dceaec1c40fd00fec90c6402dd2/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl";,
 hash = 
"sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b", size 
= 1712570, upload-time = "2025-10-28T20:57:22.253Z" },
-    { url = 
"https://files.pythonhosted.org/packages/11/2c/22799d8e720f4697a9e66fd9c02479e40a49de3de2f0bbe7f9f78a987808/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl";,
 hash = 
"sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8", size 
= 1733407, upload-time = "2025-10-28T20:57:24.37Z" },
-    { url = 
"https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl";,
 hash = 
"sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size 
= 1550093, upload-time = "2025-10-28T20:57:26.257Z" },
-    { url = 
"https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl";,
 hash = 
"sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size 
= 1758084, upload-time = "2025-10-28T20:57:28.349Z" },
-    { url = 
"https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl";,
 hash = 
"sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size 
= 1716987, upload-time = "2025-10-28T20:57:30.233Z" },
-    { url = 
"https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl";,
 hash = 
"sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size 
= 425859, upload-time = "2025-10-28T20:57:32.105Z" },
-    { url = 
"https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl";,
 hash = 
"sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size 
= 452192, upload-time = "2025-10-28T20:57:34.166Z" },
+    { url = 
"https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl";,
 hash = 
"sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size 
= 734190, upload-time = "2026-01-03T17:30:45.832Z" },
+    { url = 
"https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl";,
 hash = 
"sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size 
= 491783, upload-time = "2026-01-03T17:30:47.466Z" },
+    { url = 
"https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl";,
 hash = 
"sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size 
= 490704, upload-time = "2026-01-03T17:30:49.373Z" },
+    { url = 
"https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl";,
 hash = 
"sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size 
= 1720652, upload-time = "2026-01-03T17:30:50.974Z" },
+    { url = 
"https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl";,
 hash = 
"sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size 
= 1692014, upload-time = "2026-01-03T17:30:52.729Z" },
+    { url = 
"https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl";,
 hash = 
"sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size 
= 1759777, upload-time = "2026-01-03T17:30:54.537Z" },
+    { url = 
"https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl";,
 hash = 
"sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size 
= 1861276, upload-time = "2026-01-03T17:30:56.512Z" },
+    { url = 
"https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl";,
 hash = 
"sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size 
= 1743131, upload-time = "2026-01-03T17:30:58.256Z" },
+    { url = 
"https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl";,
 hash = 
"sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size 
= 1556863, upload-time = "2026-01-03T17:31:00.445Z" },
+    { url = 
"https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl";,
 hash = 
"sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size 
= 1682793, upload-time = "2026-01-03T17:31:03.024Z" },
+    { url = 
"https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl";,
 hash = 
"sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size 
= 1716676, upload-time = "2026-01-03T17:31:04.842Z" },
+    { url = 
"https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl";,
 hash = 
"sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size 
= 1733217, upload-time = "2026-01-03T17:31:06.868Z" },
+    { url = 
"https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl";,
 hash = 
"sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size 
= 1552303, upload-time = "2026-01-03T17:31:08.958Z" },
+    { url = 
"https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl";,
 hash = 
"sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size 
= 1763673, upload-time = "2026-01-03T17:31:10.676Z" },
+    { url = 
"https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl";,
 hash = 
"sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size 
= 1721120, upload-time = "2026-01-03T17:31:12.575Z" },
+    { url = 
"https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl";,
 hash = 
"sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size 
= 427383, upload-time = "2026-01-03T17:31:14.382Z" },
+    { url = 
"https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl";,
 hash = 
"sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size 
= 453899, upload-time = "2026-01-03T17:31:15.958Z" },
 ]
 
 [[package]]
@@ -133,14 +133,14 @@ wheels = [
 
 [[package]]
 name = "anyio"
-version = "4.12.0"
+version = "4.12.1"
 source = { registry = "https://pypi.org/simple"; }
 dependencies = [
     { name = "idna" },
 ]
-sdist = { url = 
"https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz";,
 hash = 
"sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size 
= 228266, upload-time = "2025-11-28T23:37:38.911Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz";,
 hash = 
"sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size 
= 228685, upload-time = "2026-01-06T11:45:21.246Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl";,
 hash = 
"sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size 
= 113362, upload-time = "2025-11-28T23:36:57.897Z" },
+    { url = 
"https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl";,
 hash = 
"sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size 
= 113592, upload-time = "2026-01-06T11:45:19.497Z" },
 ]
 
 [[package]]
@@ -275,11 +275,11 @@ wheels = [
 
 [[package]]
 name = "certifi"
-version = "2025.11.12"
+version = "2026.1.4"
 source = { registry = "https://pypi.org/simple"; }
-sdist = { url = 
"https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz";,
 hash = 
"sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size 
= 160538, upload-time = "2025-11-12T02:54:51.517Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz";,
 hash = 
"sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size 
= 154268, upload-time = "2026-01-04T02:42:41.825Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl";,
 hash = 
"sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size 
= 159438, upload-time = "2025-11-12T02:54:49.735Z" },
+    { url = 
"https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl";,
 hash = 
"sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size 
= 152900, upload-time = "2026-01-04T02:42:40.15Z" },
 ]
 
 [[package]]
@@ -568,11 +568,11 @@ sdist = { url = 
"https://files.pythonhosted.org/packages/81/91/462c781b7f4e141eb
 
 [[package]]
 name = "filelock"
-version = "3.20.1"
+version = "3.20.2"
 source = { registry = "https://pypi.org/simple"; }
-sdist = { url = 
"https://files.pythonhosted.org/packages/a7/23/ce7a1126827cedeb958fc043d61745754464eb56c5937c35bbf2b8e26f34/filelock-3.20.1.tar.gz";,
 hash = 
"sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c", size 
= 19476, upload-time = "2025-12-15T23:54:28.027Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/c1/e0/a75dbe4bca1e7d41307323dad5ea2efdd95408f74ab2de8bd7dba9b51a1a/filelock-3.20.2.tar.gz";,
 hash = 
"sha256:a2241ff4ddde2a7cebddf78e39832509cb045d18ec1a09d7248d6bfc6bfbbe64", size 
= 19510, upload-time = "2026-01-02T15:33:32.582Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/e3/7f/a1a97644e39e7316d850784c642093c99df1290a460df4ede27659056834/filelock-3.20.1-py3-none-any.whl";,
 hash = 
"sha256:15d9e9a67306188a44baa72f569d2bfd803076269365fdea0934385da4dc361a", size 
= 16666, upload-time = "2025-12-15T23:54:26.874Z" },
+    { url = 
"https://files.pythonhosted.org/packages/9a/30/ab407e2ec752aa541704ed8f93c11e2a5d92c168b8a755d818b74a3c5c2d/filelock-3.20.2-py3-none-any.whl";,
 hash = 
"sha256:fbba7237d6ea277175a32c54bb71ef814a8546d8601269e1bfc388de333974e8", size 
= 16697, upload-time = "2026-01-02T15:33:31.133Z" },
 ]
 
 [[package]]
@@ -817,11 +817,11 @@ wheels = [
 
 [[package]]
 name = "json5"
-version = "0.12.1"
+version = "0.13.0"
 source = { registry = "https://pypi.org/simple"; }
-sdist = { url = 
"https://files.pythonhosted.org/packages/12/ae/929aee9619e9eba9015207a9d2c1c54db18311da7eb4dcf6d41ad6f0eb67/json5-0.12.1.tar.gz";,
 hash = 
"sha256:b2743e77b3242f8d03c143dd975a6ec7c52e2f2afe76ed934e53503dd4ad4990", size 
= 52191, upload-time = "2025-08-12T19:47:42.583Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/77/e8/a3f261a66e4663f22700bc8a17c08cb83e91fbf086726e7a228398968981/json5-0.13.0.tar.gz";,
 hash = 
"sha256:b1edf8d487721c0bf64d83c28e91280781f6e21f4a797d3261c7c828d4c165bf", size 
= 52441, upload-time = "2026-01-01T19:42:14.99Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/85/e2/05328bd2621be49a6fed9e3030b1e51a2d04537d3f816d211b9cc53c5262/json5-0.12.1-py3-none-any.whl";,
 hash = 
"sha256:d9c9b3bc34a5f54d43c35e11ef7cb87d8bdd098c6ace87117a7b7e83e705c1d5", size 
= 36119, upload-time = "2025-08-12T19:47:41.131Z" },
+    { url = 
"https://files.pythonhosted.org/packages/d7/9e/038522f50ceb7e74f1f991bf1b699f24b0c2bbe7c390dd36ad69f4582258/json5-0.13.0-py3-none-any.whl";,
 hash = 
"sha256:9a08e1dd65f6a4d4c6fa82d216cf2477349ec2346a38fd70cc11d2557499fbcc", size 
= 36163, upload-time = "2026-01-01T19:42:13.962Z" },
 ]
 
 [[package]]
@@ -835,7 +835,7 @@ wheels = [
 
 [[package]]
 name = "jsonschema"
-version = "4.25.1"
+version = "4.26.0"
 source = { registry = "https://pypi.org/simple"; }
 dependencies = [
     { name = "attrs" },
@@ -843,9 +843,9 @@ dependencies = [
     { name = "referencing" },
     { name = "rpds-py" },
 ]
-sdist = { url = 
"https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz";,
 hash = 
"sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size 
= 357342, upload-time = "2025-08-18T17:03:50.038Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz";,
 hash = 
"sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size 
= 366583, upload-time = "2026-01-07T13:41:07.246Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl";,
 hash = 
"sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size 
= 90040, upload-time = "2025-08-18T17:03:48.373Z" },
+    { url = 
"https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl";,
 hash = 
"sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size 
= 90630, upload-time = "2026-01-07T13:41:05.306Z" },
 ]
 
 [package.optional-dependencies]
@@ -1050,11 +1050,11 @@ wheels = [
 
 [[package]]
 name = "pathspec"
-version = "0.12.1"
+version = "1.0.2"
 source = { registry = "https://pypi.org/simple"; }
-sdist = { url = 
"https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz";,
 hash = 
"sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size 
= 51043, upload-time = "2023-12-10T22:30:45Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/41/b9/6eb731b52f132181a9144bbe77ff82117f6b2d2fbfba49aaab2c014c4760/pathspec-1.0.2.tar.gz";,
 hash = 
"sha256:fa32b1eb775ed9ba8d599b22c5f906dc098113989da2c00bf8b210078ca7fb92", size 
= 130502, upload-time = "2026-01-08T04:33:27.613Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl";,
 hash = 
"sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size 
= 31191, upload-time = "2023-12-10T22:30:43.14Z" },
+    { url = 
"https://files.pythonhosted.org/packages/78/6b/14fc9049d78435fd29e82846c777bd7ed9c470013dc8d0260fff3ff1c11e/pathspec-1.0.2-py3-none-any.whl";,
 hash = 
"sha256:62f8558917908d237d399b9b338ef455a814801a4688bc41074b25feefd93472", size 
= 54844, upload-time = "2026-01-08T04:33:26.4Z" },
 ]
 
 [[package]]
@@ -1356,15 +1356,15 @@ wheels = [
 
 [[package]]
 name = "pyright"
-version = "1.1.407"
+version = "1.1.408"
 source = { registry = "https://pypi.org/simple"; }
 dependencies = [
     { name = "nodeenv" },
     { name = "typing-extensions" },
 ]
-sdist = { url = 
"https://files.pythonhosted.org/packages/a6/1b/0aa08ee42948b61745ac5b5b5ccaec4669e8884b53d31c8ec20b2fcd6b6f/pyright-1.1.407.tar.gz";,
 hash = 
"sha256:099674dba5c10489832d4a4b2d302636152a9a42d317986c38474c76fe562262", size 
= 4122872, upload-time = "2025-10-24T23:17:15.145Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/74/b2/5db700e52554b8f025faa9c3c624c59f1f6c8841ba81ab97641b54322f16/pyright-1.1.408.tar.gz";,
 hash = 
"sha256:f28f2321f96852fa50b5829ea492f6adb0e6954568d1caa3f3af3a5f555eb684", size 
= 4400578, upload-time = "2026-01-08T08:07:38.795Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/dc/93/b69052907d032b00c40cb656d21438ec00b3a471733de137a3f65a49a0a0/pyright-1.1.407-py3-none-any.whl";,
 hash = 
"sha256:6dd419f54fcc13f03b52285796d65e639786373f433e243f8b94cf93a7444d21", size 
= 5997008, upload-time = "2025-10-24T23:17:13.159Z" },
+    { url = 
"https://files.pythonhosted.org/packages/0c/82/a2c93e32800940d9573fb28c346772a14778b84ba7524e691b324620ab89/pyright-1.1.408-py3-none-any.whl";,
 hash = 
"sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1", size 
= 6399144, upload-time = "2026-01-08T08:07:37.082Z" },
 ]
 
 [[package]]
@@ -1446,11 +1446,11 @@ wheels = [
 
 [[package]]
 name = "python-gnupg"
-version = "0.5.5"
+version = "0.5.6"
 source = { registry = "https://pypi.org/simple"; }
-sdist = { url = 
"https://files.pythonhosted.org/packages/42/d0/72a14a79f26c6119b281f6ccc475a787432ef155560278e60df97ce68a86/python-gnupg-0.5.5.tar.gz";,
 hash = 
"sha256:3fdcaf76f60a1b948ff8e37dc398d03cf9ce7427065d583082b92da7a4ff5a63", size 
= 66467, upload-time = "2025-08-04T19:26:55.778Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/98/2c/6cd2c7cff4bdbb434be5429ef6b8e96ee6b50155551361f30a1bb2ea3c1d/python_gnupg-0.5.6.tar.gz";,
 hash = 
"sha256:5743e96212d38923fc19083812dc127907e44dbd3bcf0db4d657e291d3c21eac", size 
= 66825, upload-time = "2025-12-31T17:19:33.19Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/aa/19/c147f78cc18c8788f54d4a16a22f6c05deba85ead5672d3ddf6dcba5a5fe/python_gnupg-0.5.5-py2.py3-none-any.whl";,
 hash = 
"sha256:51fa7b8831ff0914bc73d74c59b99c613de7247b91294323c39733bb85ac3fc1", size 
= 21916, upload-time = "2025-08-04T19:26:54.307Z" },
+    { url = 
"https://files.pythonhosted.org/packages/d2/ab/0ea9de971caf3cd2e268d2b05dfe9883b21cfe686a59249bd2dccb4bae33/python_gnupg-0.5.6-py2.py3-none-any.whl";,
 hash = 
"sha256:b5050a55663d8ab9fcc8d97556d229af337a87a3ebebd7054cbd8b7e2043394a", size 
= 22082, upload-time = "2025-12-31T17:16:22.743Z" },
 ]
 
 [[package]]
@@ -1984,11 +1984,11 @@ wheels = [
 
 [[package]]
 name = "urllib3"
-version = "2.6.2"
+version = "2.6.3"
 source = { registry = "https://pypi.org/simple"; }
-sdist = { url = 
"https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz";,
 hash = 
"sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size 
= 432930, upload-time = "2025-12-11T15:56:40.252Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz";,
 hash = 
"sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size 
= 435556, upload-time = "2026-01-07T16:24:43.925Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl";,
 hash = 
"sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size 
= 131182, upload-time = "2025-12-11T15:56:38.584Z" },
+    { url = 
"https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl";,
 hash = 
"sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size 
= 131584, upload-time = "2026-01-07T16:24:42.685Z" },
 ]
 
 [[package]]
@@ -2007,16 +2007,16 @@ wheels = [
 
 [[package]]
 name = "virtualenv"
-version = "20.35.4"
+version = "20.36.0"
 source = { registry = "https://pypi.org/simple"; }
 dependencies = [
     { name = "distlib" },
     { name = "filelock" },
     { name = "platformdirs" },
 ]
-sdist = { url = 
"https://files.pythonhosted.org/packages/20/28/e6f1a6f655d620846bd9df527390ecc26b3805a0c5989048c210e22c5ca9/virtualenv-20.35.4.tar.gz";,
 hash = 
"sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c", size 
= 6028799, upload-time = "2025-10-29T06:57:40.511Z" }
+sdist = { url = 
"https://files.pythonhosted.org/packages/78/49/87e23d8f742f10f965bce5d6b285fc88a4f436b11daf6b6225d4d66f8492/virtualenv-20.36.0.tar.gz";,
 hash = 
"sha256:a3601f540b515a7983508113f14e78993841adc3d83710fa70f0ac50f43b23ed", size 
= 6032237, upload-time = "2026-01-07T17:20:04.975Z" }
 wheels = [
-    { url = 
"https://files.pythonhosted.org/packages/79/0c/c05523fa3181fdf0c9c52a6ba91a23fbf3246cc095f26f6516f9c60e6771/virtualenv-20.35.4-py3-none-any.whl";,
 hash = 
"sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b", size 
= 6005095, upload-time = "2025-10-29T06:57:37.598Z" },
+    { url = 
"https://files.pythonhosted.org/packages/eb/6a/0af36875e0023a1f2d0b66b4051721fc26740e947696922df1665b75e5d3/virtualenv-20.36.0-py3-none-any.whl";,
 hash = 
"sha256:e7ded577f3af534fd0886d4ca03277f5542053bedb98a70a989d3c22cfa5c9ac", size 
= 6008261, upload-time = "2026-01-07T17:20:02.87Z" },
 ]
 
 [[package]]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to