This is an automated email from the ASF dual-hosted git repository.
sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git
The following commit(s) were added to refs/heads/main by this push:
new 8a787ee Add an admin route and script to fetch keys from external
sources
8a787ee is described below
commit 8a787ee1366bc6ee9d6328d043653a7324c3406d
Author: Sean B. Palmer <[email protected]>
AuthorDate: Fri Jun 13 16:37:59 2025 +0100
Add an admin route and script to fetch keys from external sources
---
atr/blueprints/admin/admin.py | 80 +++++++++++--
atr/blueprints/admin/templates/update-keys.html | 145 ++++++++++++++++++++++++
atr/db/interaction.py | 105 ++++++++++++++++-
atr/routes/keys.py | 108 ++----------------
atr/routes/vote.py | 1 +
atr/templates/includes/sidebar.html | 4 +-
poetry.lock | 92 ++++++++++++++-
pyproject.toml | 1 +
scripts/keys_import.py | 60 ++++++++++
9 files changed, 482 insertions(+), 114 deletions(-)
diff --git a/atr/blueprints/admin/admin.py b/atr/blueprints/admin/admin.py
index 2743870..9698726 100644
--- a/atr/blueprints/admin/admin.py
+++ b/atr/blueprints/admin/admin.py
@@ -97,8 +97,16 @@ class LdapLookupForm(util.QuartFormTyped):
submit = wtforms.SubmitField("Lookup")
[email protected]("/all-releases")
+async def admin_all_releases() -> str:
+ """Display a list of all releases across all phases."""
+ async with db.session() as data:
+ releases = await data.release(_project=True,
_committee=True).order_by(models.Release.name).all()
+ return await template.render("all-releases.html", releases=releases,
release_as_url=mapping.release_as_url)
+
+
@admin.BLUEPRINT.route("/browse-as", methods=["GET", "POST"])
-async def browse_as() -> str | response.Response:
+async def admin_browse_as() -> str | response.Response:
"""Allows an admin to browse as another user."""
# TODO: Enable this in debugging mode only?
from atr.routes import root
@@ -122,7 +130,7 @@ async def browse_as() -> str | response.Response:
if not ldap_params.results_list:
await quart.flash(f"User '{new_uid}' not found in LDAP.", "error")
- return quart.redirect(quart.url_for("admin.browse_as"))
+ return quart.redirect(quart.url_for("admin.admin_browse_as"))
ldap_projects_data = await apache.get_ldap_projects_data()
committee_data = await apache.get_active_committee_data()
@@ -347,6 +355,32 @@ async def admin_env() -> quart.wrappers.response.Response:
return quart.Response("\n".join(env_vars), mimetype="text/plain")
[email protected]("/keys/update", methods=["GET", "POST"])
+async def admin_keys_update() -> str | response.Response | tuple[Mapping[str,
Any], int]:
+ """Update keys from remote data."""
+ if quart.request.method == "POST":
+ try:
+ added_count, updated_count = await _update_keys()
+ return {
+ "message": f"Successfully added {added_count} and updated
{updated_count} keys",
+ "category": "success",
+ }, 200
+ except httpx.RequestError as e:
+ return {
+ "message": f"Failed to fetch data: {e!s}",
+ "category": "error",
+ }, 200
+ except Exception as e:
+ return {
+ "message": f"Failed to update projects: {e!s}",
+ "category": "error",
+ }, 200
+
+ # For GET requests, show the update form
+ empty_form = await util.EmptyForm.create_form()
+ return await template.render("update-keys.html", empty_form=empty_form)
+
+
@admin.BLUEPRINT.route("/performance")
async def admin_performance() -> str:
"""Display performance statistics for all routes."""
@@ -455,14 +489,6 @@ async def admin_projects_update() -> str |
response.Response | tuple[Mapping[str
return await template.render("update-projects.html", empty_form=empty_form)
[email protected]("/all-releases")
-async def admin_all_releases() -> str:
- """Display a list of all releases across all phases."""
- async with db.session() as data:
- releases = await data.release(_project=True,
_committee=True).order_by(models.Release.name).all()
- return await template.render("all-releases.html", releases=releases,
release_as_url=mapping.release_as_url)
-
-
@admin.BLUEPRINT.route("/tasks")
async def admin_tasks() -> str:
return await template.render("tasks.html")
@@ -765,6 +791,40 @@ async def _update_committees(
return added_count, updated_count
+async def _update_keys() -> tuple[int, int]:
+ import httpx
+
+ successes = 0
+ failures = 0
+ async with db.session() as data:
+ # Get all committees
+ committees = await data.committee().all()
+ for committee in committees:
+ # Get the KEYS file
+ async with httpx.AsyncClient() as client:
+ response = await
client.get(f"https://downloads.apache.org/{committee.name}/KEYS")
+ try:
+ response.raise_for_status()
+ except httpx.HTTPStatusError as e:
+ _LOGGER.error(f"Failed to fetch KEYS file for
{committee.name}: {e!s}")
+ continue
+ keys_data = await response.aread()
+ keys_text = keys_data.decode("utf-8", errors="replace")
+ # results, success_count, error_count, submitted_committees
+ try:
+ _result, yes, no, _committees = await interaction.upload_keys(
+ [committee.name], keys_text, [committee.name]
+ )
+ except interaction.InteractionError as e:
+ _LOGGER.error(f"Failed to update keys for {committee.name}:
{e!s}")
+ continue
+ _LOGGER.info(f"Updated keys for {committee.name}: {yes} successes,
{no} failures")
+ successes += yes
+ failures += no
+
+ return successes, failures
+
+
async def _update_metadata() -> tuple[int, int]:
ldap_projects = await apache.get_ldap_projects_data()
projects = await apache.get_projects_data()
diff --git a/atr/blueprints/admin/templates/update-keys.html
b/atr/blueprints/admin/templates/update-keys.html
new file mode 100644
index 0000000..6fa65fb
--- /dev/null
+++ b/atr/blueprints/admin/templates/update-keys.html
@@ -0,0 +1,145 @@
+{% extends "layouts/base-admin.html" %}
+
+{% block title %}
+ Update keys ~ ATR
+{% endblock title %}
+
+{% block description %}
+ Update keys from remote data sources.
+{% endblock description %}
+
+{% block stylesheets %}
+ {{ super() }}
+ <style>
+ .page-form-group {
+ margin-bottom: 1rem;
+ }
+
+ button {
+ margin-top: 1rem;
+ padding: 0.5rem 1rem;
+ background: #036;
+ color: white;
+ border: none;
+ border-radius: 4px;
+ cursor: pointer;
+ font-weight: 500;
+ }
+
+ button:hover {
+ background: #047;
+ }
+
+ button:disabled {
+ color: gray;
+ }
+
+ .page-warning {
+ margin: 1.5rem 0;
+ padding: 1rem;
+ background: #fff3cd;
+ border: 1px solid #ffeeba;
+ border-radius: 4px;
+ color: #856404;
+ }
+
+ .page-warning p:last-child {
+ margin-bottom: 0;
+ }
+
+ .page-warning strong {
+ color: #533f03;
+ }
+
+ .page-status-message {
+ margin: 1.5rem 0;
+ padding: 1rem;
+ border-radius: 4px;
+ }
+
+ .page-status-message.success {
+ background: #d4edda;
+ border: 1px solid #c3e6cb;
+ color: #155724;
+ }
+
+ .page-status-message.error {
+ background: #f8d7da;
+ border: 1px solid #f5c6cb;
+ color: #721c24;
+ }
+ </style>
+{% endblock stylesheets %}
+
+{% block content %}
+ <h1>Update keys</h1>
+ <p>
+ This page allows you to update keys in the database from remote data
sources.
+ </p>
+
+ <div class="page-warning">
+ <p>
+ <strong>Note:</strong> This operation will update all keys from remote
KEYS files.
+ </p>
+ </div>
+
+ <div id="status"></div>
+
+ <form action="javascript:submitForm().then(_ => { return false; })">
+ {{ empty_form.hidden_tag() }}
+
+ <button type="submit" id="submitButton">Update projects</button>
+ </form>
+
+ <script>
+ const submitForm = async () => {
+ const button = document.getElementById("submitButton");
+ button.disabled = true;
+ document.body.style.cursor = "wait";
+
+ const statusElement = document.getElementById("status");
+ while (statusElement.firstChild) {
+ statusElement.firstChild.remove();
+ }
+
+ const csrfToken =
document.querySelector("input[name='csrf_token']").value;
+
+ try {
+ const response = await fetch(window.location.href, {
+ method: "POST",
+ headers: {
+ "X-CSRFToken": csrfToken
+ }
+ });
+
+ if (!response.ok) {
+ addStatusMessage(statusElement, "Could not make network
request", "error");
+ return
+ }
+
+ const data = await response.json();
+ addStatusMessage(statusElement, data.message, data.category)
+ } catch (error) {
+ addStatusMessage(statusElement, error, "error")
+ } finally {
+ button.disabled = false;
+ document.body.style.cursor = "default";
+ }
+ };
+
+ function addStatusMessage(parentElement, message, category) {
+ const divElement = document.createElement("div");
+ divElement.classList.add("page-status-message");
+ divElement.classList.add(category);
+ if (category === "error") {
+ const prefixElement = document.createElement("strong");
+ const textElement = document.createTextNode("Error: ");
+ prefixElement.appendChild(textElement);
+ divElement.appendChild(prefixElement);
+ }
+ const textNode = document.createTextNode(message);
+ divElement.appendChild(textNode);
+ parentElement.appendChild(divElement);
+ }
+ </script>
+{% endblock content %}
diff --git a/atr/db/interaction.py b/atr/db/interaction.py
index 2b43a3a..193e635 100644
--- a/atr/db/interaction.py
+++ b/atr/db/interaction.py
@@ -46,6 +46,14 @@ class ApacheUserMissingError(RuntimeError):
self.primary_uid = primary_uid
+class InteractionError(RuntimeError):
+ pass
+
+
+class PublicKeyError(RuntimeError):
+ pass
+
+
class PathInfo(schema.Strict):
artifacts: set[pathlib.Path] = schema.factory(set)
errors: dict[pathlib.Path, list[models.CheckResult]] = schema.factory(dict)
@@ -63,7 +71,7 @@ async def ephemeral_gpg_home() -> AsyncGenerator[str]:
async def key_user_add(asf_uid: str | None, public_key: str,
selected_committees: list[str]) -> list[dict]:
if not public_key:
- raise RuntimeError("Public key is required")
+ raise PublicKeyError("Public key is required")
# Validate the key using GPG and get its properties
keys = await _key_user_add_validate_key_properties(public_key)
@@ -77,7 +85,7 @@ async def key_user_add(asf_uid: str | None, public_key: str,
selected_committees
asf_uid = match.group(1).lower()
break
else:
- _LOGGER.warning(f"key_user_add called with no ASF UID found in
key UIDs: {key.get('uids')}")
+ # _LOGGER.warning(f"key_user_add called with no ASF UID found
in key UIDs: {key.get('uids')}")
for uid_str in key.get("uids", []):
if asf_uid := await
asyncio.to_thread(_asf_uid_from_uid_str, uid_str):
break
@@ -282,6 +290,32 @@ async def unfinished_releases(asfuid: str) -> dict[str,
list[models.Release]]:
return releases
+async def upload_keys(
+ user_committees: list[str], keys_text: str, selected_committees: list[str]
+) -> tuple[list[dict], int, int, list[str]]:
+ key_blocks = util.parse_key_blocks(keys_text)
+ if not key_blocks:
+ raise InteractionError("No valid GPG keys found in the uploaded file")
+
+ # Ensure that the selected committees are ones of which the user is
actually a member
+ invalid_committees = [committee for committee in selected_committees if
(committee not in user_committees)]
+ if invalid_committees:
+ raise InteractionError(f"Invalid committee selection: {',
'.join(invalid_committees)}")
+
+ # TODO: Do we modify this? Store a copy just in case, for the template to
use
+ submitted_committees = selected_committees[:]
+
+ # Process each key block
+ results = await _upload_process_key_blocks(key_blocks, selected_committees)
+ # if not results:
+ # raise InteractionError("No keys were added")
+
+ success_count = sum(1 for result in results if result["status"] ==
"success")
+ error_count = len(results) - success_count
+
+ return results, success_count, error_count, submitted_committees
+
+
def _asf_uid_from_uid_str(uid_str: str) -> str | None:
if not (email_match := re.search(r"<([^>]+)>", uid_str)):
return None
@@ -297,7 +331,6 @@ def _asf_uid_from_uid_str(uid_str: str) -> str | None:
def _key_latest_self_signature(key: dict) -> datetime.datetime | None:
- print(key)
fingerprint = key["fingerprint"]
# TODO: Only 64 bits, which is not at all secure
fingerprint_suffix = fingerprint[-16:]
@@ -327,7 +360,7 @@ async def _key_user_add_validate_key_properties(public_key:
str) -> list[dict]:
import_result = await asyncio.to_thread(gpg.import_keys, public_key)
if not import_result.fingerprints:
- raise RuntimeError("Invalid public key format or failed import")
+ raise PublicKeyError("Invalid public key format or failed import")
# List keys to get details
keys = await asyncio.to_thread(gpg.list_keys,
keys=import_result.fingerprints, sigs=True)
@@ -349,7 +382,7 @@ async def _key_user_add_validate_key_properties(public_key:
str) -> list[dict]:
# https://infra.apache.org/release-signing.html#note
# Says that keys must be at least 2048 bits
if (key.get("algo") == "1") and (int(key.get("length", "0")) < 2048):
- raise RuntimeError("RSA Key is not long enough; must be at least
2048 bits")
+ raise PublicKeyError("RSA Key is not long enough; must be at least
2048 bits")
results.append(key)
return results
@@ -388,3 +421,65 @@ async def _successes_errors_warnings(
for error in errors:
if primary_rel_path := error.primary_rel_path:
info.errors.setdefault(pathlib.Path(primary_rel_path),
[]).append(error)
+
+
+async def _upload_process_key_blocks(key_blocks: list[str],
selected_committees: list[str]) -> list[dict]:
+ """Process GPG key blocks and add them to the user's account."""
+ results: list[dict] = []
+
+ # Process each key block
+ for i, key_block in enumerate(key_blocks):
+ try:
+ added_keys = await key_user_add(None, key_block,
selected_committees)
+ for key_info in added_keys:
+ key_info["status"] = key_info.get("status", "success")
+ key_info["email"] = key_info.get("email", "Unknown")
+ key_info["committee_statuses"] =
key_info.get("committee_statuses", {})
+ results.append(key_info)
+ if not added_keys:
+ results.append(
+ {
+ "status": "error",
+ "message": "Failed to process key (key_user_add
returned None)",
+ "key_id": f"Key #{i + 1}",
+ "fingerprint": "Unknown",
+ "user_id": "Unknown",
+ "email": "Unknown",
+ "committee_statuses": {},
+ }
+ )
+ except (InteractionError, PublicKeyError) as e:
+ # logging.warning(f"InteractionError processing key #{i + 1}: {e}")
+ results.append(
+ {
+ "status": "error",
+ "message": f"Validation Error: {e}",
+ "key_id": f"Key #{i + 1}",
+ "fingerprint": "Invalid",
+ "user_id": "Unknown",
+ "email": "Unknown",
+ "committee_statuses": {},
+ }
+ )
+ except Exception as e:
+ logging.exception(f"Exception processing key #{i + 1}:")
+ fingerprint, user_id = "Unknown", "None"
+ if isinstance(e, ApacheUserMissingError):
+ fingerprint = e.fingerprint or "Unknown"
+ user_id = e.primary_uid or "None"
+ results.append(
+ {
+ "status": "error",
+ "message": f"Internal Exception: {e}",
+ "key_id": f"Key #{i + 1}",
+ "fingerprint": fingerprint,
+ "user_id": user_id,
+ "email": user_id,
+ "committee_statuses": {},
+ }
+ )
+
+ # Primary key is email, secondary key is fingerprint
+ results_sorted = sorted(results, key=lambda x: (x.get("email",
"").lower(), x.get("fingerprint", "")))
+
+ return results_sorted
diff --git a/atr/routes/keys.py b/atr/routes/keys.py
index eea7320..ab86ce0 100644
--- a/atr/routes/keys.py
+++ b/atr/routes/keys.py
@@ -186,9 +186,12 @@ async def import_selected_revision(
if release.committee is None:
raise routes.FlashError("No committee found for release")
selected_committees = [release.committee.name]
- _upload_results, success_count, error_count, submitted_committees = await
_upload_keys(
- session, keys_text, selected_committees
- )
+ try:
+ upload_results, success_count, error_count, submitted_committees =
await interaction.upload_keys(
+ session.committees + session.projects, keys_text,
selected_committees
+ )
+ except interaction.InteractionError as e:
+ return await session.redirect(compose.selected, error=str(e))
message = f"Uploaded {success_count} keys,"
if error_count > 0:
message += f" failed to upload {error_count} keys for {',
'.join(submitted_committees)}"
@@ -497,9 +500,12 @@ async def upload(session: routes.CommitterSession) -> str:
keys_content = await asyncio.to_thread(key_file.read)
keys_text = keys_content.decode("utf-8", errors="replace")
- upload_results, success_count, error_count, submitted_committees =
await _upload_keys(
- session, keys_text, selected_committees
- )
+ try:
+ upload_results, success_count, error_count, submitted_committees =
await interaction.upload_keys(
+ project_list, keys_text, selected_committees
+ )
+ except interaction.InteractionError as e:
+ return await render(error=str(e))
# We use results in a closure
# So we have to mutate it, not replace it
results[:] = upload_results
@@ -516,96 +522,6 @@ async def upload(session: routes.CommitterSession) -> str:
return await render()
-async def _upload_keys(
- session: routes.CommitterSession, keys_text: str, selected_committees:
list[str]
-) -> tuple[list[dict], int, int, list[str]]:
- key_blocks = util.parse_key_blocks(keys_text)
- if not key_blocks:
- raise routes.FlashError("No valid GPG keys found in the uploaded file")
-
- # Ensure that the selected committees are ones of which the user is
actually a member
- invalid_committees = [
- committee for committee in selected_committees if (committee not in
(session.committees + session.projects))
- ]
- if invalid_committees:
- raise routes.FlashError(f"Invalid committee selection: {',
'.join(invalid_committees)}")
-
- # TODO: Do we modify this? Store a copy just in case, for the template to
use
- submitted_committees = selected_committees[:]
-
- # Process each key block
- results = await _upload_process_key_blocks(key_blocks, selected_committees)
- if not results:
- raise routes.FlashError("No keys were added")
-
- success_count = sum(1 for result in results if result["status"] ==
"success")
- error_count = len(results) - success_count
-
- return results, success_count, error_count, submitted_committees
-
-
-async def _upload_process_key_blocks(key_blocks: list[str],
selected_committees: list[str]) -> list[dict]:
- """Process GPG key blocks and add them to the user's account."""
- results: list[dict] = []
-
- # Process each key block
- for i, key_block in enumerate(key_blocks):
- try:
- added_keys = await interaction.key_user_add(None, key_block,
selected_committees)
- for key_info in added_keys:
- key_info["status"] = key_info.get("status", "success")
- key_info["email"] = key_info.get("email", "Unknown")
- key_info["committee_statuses"] =
key_info.get("committee_statuses", {})
- results.append(key_info)
- if not added_keys:
- results.append(
- {
- "status": "error",
- "message": "Failed to process key (key_user_add
returned None)",
- "key_id": f"Key #{i + 1}",
- "fingerprint": "Unknown",
- "user_id": "Unknown",
- "email": "Unknown",
- "committee_statuses": {},
- }
- )
- except routes.FlashError as e:
- logging.warning(f"FlashError processing key #{i + 1}: {e}")
- results.append(
- {
- "status": "error",
- "message": f"Validation Error: {e}",
- "key_id": f"Key #{i + 1}",
- "fingerprint": "Invalid",
- "user_id": "Unknown",
- "email": "Unknown",
- "committee_statuses": {},
- }
- )
- except Exception as e:
- logging.exception(f"Exception processing key #{i + 1}:")
- fingerprint, user_id = "Unknown", "None"
- if isinstance(e, interaction.ApacheUserMissingError):
- fingerprint = e.fingerprint or "Unknown"
- user_id = e.primary_uid or "None"
- results.append(
- {
- "status": "error",
- "message": f"Internal Exception: {e}",
- "key_id": f"Key #{i + 1}",
- "fingerprint": fingerprint,
- "user_id": user_id,
- "email": user_id,
- "committee_statuses": {},
- }
- )
-
- # Primary key is email, secondary key is fingerprint
- results_sorted = sorted(results, key=lambda x: (x.get("email",
"").lower(), x.get("fingerprint", "")))
-
- return results_sorted
-
-
async def _write_keys_file(
project: models.Project,
base_finished_dir: pathlib.Path,
diff --git a/atr/routes/vote.py b/atr/routes/vote.py
index 26ee96b..8c2690b 100644
--- a/atr/routes/vote.py
+++ b/atr/routes/vote.py
@@ -169,6 +169,7 @@ async def _task_archive_url(task_mid: str) -> str | None:
async with httpx.AsyncClient() as client:
response = await client.get(url)
response.raise_for_status()
+ # TODO: Check whether this blocks from network
email_data = response.json()
mid = email_data["mid"]
if not isinstance(mid, str):
diff --git a/atr/templates/includes/sidebar.html
b/atr/templates/includes/sidebar.html
index 40609fb..d568fcd 100644
--- a/atr/templates/includes/sidebar.html
+++ b/atr/templates/includes/sidebar.html
@@ -138,8 +138,8 @@
</li>
<li>
<i class="bi bi-person-plus"></i>
- <a href="{{ url_for('admin.browse_as') }}"
- {% if request.endpoint == 'admin.browse_as' %}class="active"{%
endif %}>Browse as user</a>
+ <a href="{{ url_for('admin.admin_browse_as') }}"
+ {% if request.endpoint == 'admin.admin_browse_as'
%}class="active"{% endif %}>Browse as user</a>
</li>
</ul>
{% endif %}
diff --git a/poetry.lock b/poetry.lock
index 34ed16e..57f38af 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -2191,6 +2191,96 @@ files = [
{file = "pyhumps-3.8.0.tar.gz", hash =
"sha256:498026258f7ee1a8e447c2e28526c0bea9407f9a59c03260aee4bd6c04d681a3"},
]
+[[package]]
+name = "pyinstrument"
+version = "5.0.2"
+description = "Call stack profiler for Python. Shows you why your code is
slow!"
+optional = false
+python-versions = ">=3.8"
+groups = ["test"]
+files = [
+ {file = "pyinstrument-5.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash
= "sha256:1aeaf6b39ad40b3f03bea5fa3a9bd453a92aeb721dde29c1597f842ed9c8566a"},
+ {file = "pyinstrument-5.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash =
"sha256:d734bd236d00e0e7f950019c689eaba1c9dd15e355867d8926c8b18b6077b221"},
+ {file =
"pyinstrument-5.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",
hash =
"sha256:520208a9b6c3985473aa9c3f30875ae5e78e77a81081df1d8aeb4fd8b4caf197"},
+ {file =
"pyinstrument-5.0.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",
hash =
"sha256:75e115b759288b8d65a0bf31a34a542ae102c58ef407e0614a43e0c39d261875"},
+ {file =
"pyinstrument-5.0.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl",
hash =
"sha256:091f93e6787c485a7ddf670608c00448e858a056677fc25ce349f8e44d6a9e54"},
+ {file =
"pyinstrument-5.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",
hash =
"sha256:28b07971afa2652cb4f2bdcffaef11aefa32b5384c0cfb32acf9955e96dd8df8"},
+ {file = "pyinstrument-5.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash =
"sha256:e1bcb28a21b80eea5986eb5cb3180689b1d489b7c6fddf34e1f4df1f95d467ad"},
+ {file = "pyinstrument-5.0.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash =
"sha256:80d28162070ff40c6d2ac7dc15b933ba20ef49e891a2e650cd2b91d30cd262b2"},
+ {file = "pyinstrument-5.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash =
"sha256:c75e52a9bf76f084ba074323835cba4927ab3e572adfc96439698b097e523780"},
+ {file = "pyinstrument-5.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash =
"sha256:ccefdd7dd938548ada43c95b24c42ec57e258ac7994a5ec7e4cc934fa4f1743b"},
+ {file = "pyinstrument-5.0.2-cp310-cp310-win32.whl", hash =
"sha256:6b617fb024c244738aa2f6b8c2a25853eac765360ac91062578bbbcc8e22ebfe"},
+ {file = "pyinstrument-5.0.2-cp310-cp310-win_amd64.whl", hash =
"sha256:6788c8f93c1a6e0ad8d0ccde1631d17eca3839945d0fa4d506cf5d4bd7a26b77"},
+ {file = "pyinstrument-5.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash
= "sha256:0eec7a263cc1ccfb101594e13256115366338fee2a156be4172fe5315f71ec45"},
+ {file = "pyinstrument-5.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash =
"sha256:ddd5effefb470d7f1886dc16467501b866e3b5883cf74773f13179e718b28393"},
+ {file =
"pyinstrument-5.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",
hash =
"sha256:6e7458a6aa4048c1703354fc8a4a3c8b59d27b1409aafb707cf339d3c0bc794c"},
+ {file =
"pyinstrument-5.0.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",
hash =
"sha256:2373dd699711463011ec14e4918427a777f7ab73b31ae374d960725dbd5d5a28"},
+ {file =
"pyinstrument-5.0.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl",
hash =
"sha256:38ef498fbe71c2bbd11247b71e722290da93a367d88a5a8e0f66f6cc764c2b60"},
+ {file =
"pyinstrument-5.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",
hash =
"sha256:0a58a8a50f0cb3ee1c2e43ffec51bf48f48945e141feed7ccd9194917b97fe5b"},
+ {file = "pyinstrument-5.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash =
"sha256:ad2a97c79ecf0e610df292abb5c46d01a4f99778598881d6e918650fa39801b6"},
+ {file = "pyinstrument-5.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash =
"sha256:57ec0277042ee198eb749b76a975fe60f006cd51ea0c7ce3054c937577d19315"},
+ {file = "pyinstrument-5.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash =
"sha256:73d34047266f27acb67218e331288c0241cf0080fe4b87dfad5596236c71abd7"},
+ {file = "pyinstrument-5.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash =
"sha256:cfdc23284a8e2f27637b357c226a15d52b96608d9dde187b68dfe33a947f4908"},
+ {file = "pyinstrument-5.0.2-cp311-cp311-win32.whl", hash =
"sha256:3e6fa135aee6af2c608e912d8d07906bbac3c5e564d94f92721831a957297c26"},
+ {file = "pyinstrument-5.0.2-cp311-cp311-win_amd64.whl", hash =
"sha256:6317df42a98a8074ccd25af5482312ec59a1f27c05dab408eb3c7b2081242733"},
+ {file = "pyinstrument-5.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash
= "sha256:d0b680ef269b528d8dcd8151362fba9683b0ac22ffe74cc8161c33b53c65b899"},
+ {file = "pyinstrument-5.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash =
"sha256:1c70b50ec90ae793b74733a6fc992723c6ee27c0fcb7d99848239316ded61189"},
+ {file =
"pyinstrument-5.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",
hash =
"sha256:3aae5f4f78515009f72393fdb271a15861534a586401383785f823cf8f60aa02"},
+ {file =
"pyinstrument-5.0.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",
hash =
"sha256:3aec8bc3d1c064ff849ca3568d6b0a7cfa0162d590a9d4d250c7118d09518b22"},
+ {file =
"pyinstrument-5.0.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl",
hash =
"sha256:28d87fac2bc0fed802b14a26982440f36c85dc53f303530ff7665a6e470315bb"},
+ {file =
"pyinstrument-5.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",
hash =
"sha256:3b9caac53c7eda8187ed122d4f7fcc6e3392f04c583d6d70b373351cede2b829"},
+ {file = "pyinstrument-5.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash =
"sha256:8124419e8731a7bdbb9f7f885a8956806a4e9ab9dd19294f8a99e74c0bbdd327"},
+ {file = "pyinstrument-5.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash =
"sha256:9990d9bd05fbb4fa83f24f0a62989b8e0a3ac15ff0fa19b49348c8ef5f9db50a"},
+ {file = "pyinstrument-5.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash =
"sha256:1dc35f3d200866a43d4bc7570799a405f001591c8f19a30eb7a983a717c1e1f7"},
+ {file = "pyinstrument-5.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash =
"sha256:a335a40d0ba1fe3658ef1a5ff2fc7a6870905828014645cb19dab5c1de379447"},
+ {file = "pyinstrument-5.0.2-cp312-cp312-win32.whl", hash =
"sha256:29e565ce85e03d2541330a8174124c1ecdb073d945962a8eb738d3b1c806ac83"},
+ {file = "pyinstrument-5.0.2-cp312-cp312-win_amd64.whl", hash =
"sha256:300b0cc453ffe7661d5f3ceb94cdd98996fd9118f5ff1182b5336489c7d4e45c"},
+ {file = "pyinstrument-5.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash
= "sha256:8141a5f78b927a88de46fb2bbb17e710e41d16e161fca99991635ff7196dbd5d"},
+ {file = "pyinstrument-5.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash =
"sha256:12a0095ae408dbbdd429501fd4c6a3ab51d1aeff5f31be36cc3eedc8c4870ede"},
+ {file =
"pyinstrument-5.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",
hash =
"sha256:eca651d840e8e75ae5330abfc5c90f6ea4af3f78f9f0269231328305a5f9c667"},
+ {file =
"pyinstrument-5.0.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",
hash =
"sha256:89d6ffc5459b19f1c85d4433bb9bbc8925ec04a8d7caf2694218b1f557555f23"},
+ {file =
"pyinstrument-5.0.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl",
hash =
"sha256:4c84845ccc5318072708dc5535b6bedd54494e92a68e282e6b97b53c1db65331"},
+ {file =
"pyinstrument-5.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",
hash =
"sha256:6511092384b5729bbbf4b35534120d2969c5fdfd4f39080badedd973676b8725"},
+ {file = "pyinstrument-5.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash =
"sha256:73f08cff7a8d9714be15440046289ab1a70cbc429e09967a3a106ac61538773e"},
+ {file = "pyinstrument-5.0.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash =
"sha256:3905b510cdab1a8255a23fbdedcba4685245cbf814fd80f5b2005b472161d16e"},
+ {file = "pyinstrument-5.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash =
"sha256:cd693a616166679da529168037c294ff25746c7ae5e8b547811fb25bb26439f5"},
+ {file = "pyinstrument-5.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash =
"sha256:83a1659a3bc4123c81fcddfcc86608f37bd6a951da9692766c2251500a77ac06"},
+ {file = "pyinstrument-5.0.2-cp313-cp313-win32.whl", hash =
"sha256:386d047db6c043dcc86bac592873234a89eaa258460e1ad8f47a11fcc7b024d5"},
+ {file = "pyinstrument-5.0.2-cp313-cp313-win_amd64.whl", hash =
"sha256:971c974c061019fa6177a021882255e639399bc15bf71b0a17979830702ad8d3"},
+ {file = "pyinstrument-5.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash =
"sha256:21527abac55b8a6361b7bea5a3f1753fa58e21118eec1d2a713fb55a17879a7a"},
+ {file = "pyinstrument-5.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash =
"sha256:228a2fcc02e75ce349b172159eecae46aa41ddf32e4e130ac305c6ff77b26af4"},
+ {file =
"pyinstrument-5.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",
hash =
"sha256:e44c822ae32cdcbf86c38bad2c9ffd3f640bd768c2374a489c75ae15007053c3"},
+ {file =
"pyinstrument-5.0.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",
hash =
"sha256:e976593cb78af7471c535bce70cd0aed87d6b3d3b43a83e5dbd181180858cf4a"},
+ {file =
"pyinstrument-5.0.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash
= "sha256:056605197b52bd4bf8679aaf2612b3c299d67b7b78b6b604a8204faccb24ca2d"},
+ {file =
"pyinstrument-5.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",
hash =
"sha256:8efa70b75baf2bce47d187d9700556a44726f31d74c31bcafc2029163ecad2fc"},
+ {file = "pyinstrument-5.0.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash =
"sha256:1cd597c2094bd51e2a2e335b96217d3437b64c241086a7aeaf7835db97310876"},
+ {file = "pyinstrument-5.0.2-cp38-cp38-musllinux_1_2_armv7l.whl", hash =
"sha256:c2264d6a0872933a30b9c0742eba02aa2fe25d598d51f3de1c41c086c459946a"},
+ {file = "pyinstrument-5.0.2-cp38-cp38-musllinux_1_2_i686.whl", hash =
"sha256:b0847845a60c4ec345829c6efe41310f3b14de89db8f1d06f2a983794c57d49b"},
+ {file = "pyinstrument-5.0.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash =
"sha256:0a0b24b81ff1da3e6ea930826b4dd7125d46c637e5bf3632e69044ec7f578c61"},
+ {file = "pyinstrument-5.0.2-cp38-cp38-win32.whl", hash =
"sha256:abd45e1b15b3b5abeb752fd9aa1f4b2e1aa001012bdfb39ba044dc531147c31b"},
+ {file = "pyinstrument-5.0.2-cp38-cp38-win_amd64.whl", hash =
"sha256:e39e3d80fef659664e60e83834959dfc98bebf23aa5aece1a045ff87de9c22fd"},
+ {file = "pyinstrument-5.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash =
"sha256:39cc70da2bf101e650bfcfbfea8270e308de47d785965203b42547f8af222d9e"},
+ {file = "pyinstrument-5.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash =
"sha256:230f6d4b5f41136a4f316fee2c9a9e7934ef89f498417d88aff5cbc8cf805b80"},
+ {file =
"pyinstrument-5.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",
hash =
"sha256:034245ff794d0f3d5d81ca87642df0b16e3a2fcbced374e28ad8a1fe34758672"},
+ {file =
"pyinstrument-5.0.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",
hash =
"sha256:953abfbae7bcefd496831451c33f15957b485df7f51a7108dc8e857bb8c4b5cf"},
+ {file =
"pyinstrument-5.0.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash
= "sha256:eceadce147d9e7332c26c3beecdd25bb054d531d6ed75ab804e15310aa93e0fd"},
+ {file =
"pyinstrument-5.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",
hash =
"sha256:52bfaff4bad998163f9884a70418270389835a775164b4886357818b5736068c"},
+ {file = "pyinstrument-5.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash =
"sha256:3daeb12ca1af4ded4acc017991e646c5ad6db4c8c81195917603756674e6a5d0"},
+ {file = "pyinstrument-5.0.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash =
"sha256:7ee5c9dc172879b7baa8884d38ef0ab4484fae382aee12a016a10aea8a67118e"},
+ {file = "pyinstrument-5.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash =
"sha256:4c990d675b7d9cb13af4abbb356c5bc65a4719befaf9deb6779aa9b194761654"},
+ {file = "pyinstrument-5.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash =
"sha256:640db54338ff66cb28c119f3c0ea0e158e112eb8477a12b94e85a19504a37235"},
+ {file = "pyinstrument-5.0.2-cp39-cp39-win32.whl", hash =
"sha256:7e611c9bffa0c446d694f40e56b2ab266ca97f0d093b16c360c1318625f0173b"},
+ {file = "pyinstrument-5.0.2-cp39-cp39-win_amd64.whl", hash =
"sha256:cdec7ff308930f349904fdc1cb45491a157900303975572ee2dfb55feba79405"},
+ {file = "pyinstrument-5.0.2.tar.gz", hash =
"sha256:e466033ead16a48ffa8bedbd633b90d416fa772b3b22f61226882ace0371f5f3"},
+]
+
+[package.extras]
+bin = ["click", "nox"]
+docs = ["furo (==2024.7.18)", "myst-parser (==3.0.1)", "sphinx (==7.4.7)",
"sphinx-autobuild (==2024.4.16)", "sphinxcontrib-programoutput (==0.17)"]
+examples = ["django", "litestar", "numpy"]
+test = ["cffi (>=1.17.0)", "flaky", "greenlet (>=3)", "ipython", "pytest",
"pytest-asyncio (==0.23.8)", "trio"]
+types = ["typing_extensions"]
+
[[package]]
name = "pyright"
version = "1.1.401"
@@ -3088,4 +3178,4 @@ propcache = ">=0.2.1"
[metadata]
lock-version = "2.1"
python-versions = "~=3.13"
-content-hash =
"2915c8c242f599b301912a5b4c5f8f4f6d82156a3e69b27404de109618d935ea"
+content-hash =
"b79174b297f7612b074db005aa6399eace902e49302d56e0168540befd71a8f1"
diff --git a/pyproject.toml b/pyproject.toml
index cf1a6bf..19f5ef1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -66,6 +66,7 @@ netifaces = "^0.11.0"
playwright = "^1.51.0"
pytest = ">=8.0"
pytest-asyncio = ">=0.24"
+pyinstrument = "^5.0.2"
[tool.poetry.group.dev.dependencies]
decouple-types = ">=1.0"
diff --git a/scripts/keys_import.py b/scripts/keys_import.py
new file mode 100644
index 0000000..3a4d4da
--- /dev/null
+++ b/scripts/keys_import.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+# Usage: poetry run python3 scripts/keys_import.py
+
+import asyncio
+import sys
+
+import httpx
+import pyinstrument
+
+sys.path.append(".")
+
+import atr.db as db
+import atr.db.interaction as interaction
+
+
+async def amain():
+ # This runs in serial, and takes several minutes
+ # We add about 5 keys per second, and there are around 2500 keys
+ # Therefore we expect it to take about 500 seconds, which is just over 8
minutes
+ profiler = pyinstrument.Profiler()
+ profiler = None
+ if profiler is not None:
+ profiler.start()
+ await db.init_database_for_worker()
+ async with db.session() as data:
+ committees = await data.committee().all()
+ committees = list(committees)
+ committees.sort(key=lambda c: c.name.lower())
+ limit = 10
+ for i, committee in enumerate(committees):
+ if (profiler is not None) and (i >= limit):
+ break
+ async with httpx.AsyncClient() as client:
+ response = await
client.get(f"https://downloads.apache.org/{committee.name}/KEYS")
+ try:
+ response.raise_for_status()
+ except httpx.HTTPStatusError:
+ print(committee.name + ": no KEYS file")
+ continue
+ keys_data = await response.aread()
+ keys_text = keys_data.decode("utf-8", errors="replace")
+ try:
+ _result, yes, no, _committees = await interaction.upload_keys(
+ [committee.name], keys_text, [committee.name]
+ )
+ except interaction.InteractionError as e:
+ print(committee.name + ":", e)
+ continue
+ print(f"{committee.name}: {yes} successes, {no} failures")
+ if profiler is not None:
+ profiler.stop()
+ print(profiler.output_text(show_all=True, color=True))
+
+
+def main():
+ asyncio.run(amain())
+
+
+if __name__ == "__main__":
+ main()
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]