This is an automated email from the ASF dual-hosted git repository.
sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git
The following commit(s) were added to refs/heads/main by this push:
new e6f922c Perform checks on .asc files separately
e6f922c is described below
commit e6f922ca8873a52843cd2100644c543aad205640
Author: Sean B. Palmer <[email protected]>
AuthorDate: Tue Mar 25 16:34:06 2025 +0200
Perform checks on .asc files separately
---
atr/routes/files.py | 42 +++++++++++++++++++++---------------------
atr/tasks/__init__.py | 51 +++++++++++++++++++++++++++++++--------------------
atr/tasks/rsync.py | 13 ++++++-------
3 files changed, 58 insertions(+), 48 deletions(-)
diff --git a/atr/routes/files.py b/atr/routes/files.py
index 63afced..dcb5846 100644
--- a/atr/routes/files.py
+++ b/atr/routes/files.py
@@ -124,6 +124,27 @@ def _authentication_failed() -> NoReturn:
raise base.ASFQuartException("Not authenticated", errorcode=401)
+async def _get_recent_tasks_by_type(
+ data: db.Session, release_name: str, file_path: str, modified: int
+) -> dict[str, models.Task]:
+ """Get the most recent task for each task type for a specific file."""
+ tasks = await data.task(
+ release_name=release_name,
+ path=str(file_path),
+ modified=modified,
+ ).all()
+
+ # Group by task_type and keep the most recent one
+ # We use the highest id to determine the most recent task
+ recent_tasks: dict[str, models.Task] = {}
+ for task in tasks:
+ # If we haven't seen this task type before or if this task is newer
+ if (task.task_type not in recent_tasks) or (task.id >
recent_tasks[task.task_type].id):
+ recent_tasks[task.task_type] = task
+
+ return recent_tasks
+
+
async def _number_of_release_files(release: models.Release) -> int:
"""Return the number of files in the release."""
path_project = release.project.name
@@ -332,27 +353,6 @@ async def root_files_add_project(
)
-async def _get_recent_tasks_by_type(
- data: db.Session, release_name: str, file_path: str, modified: int
-) -> dict[str, models.Task]:
- """Get the most recent task for each task type for a specific file."""
- tasks = await data.task(
- release_name=release_name,
- path=str(file_path),
- modified=modified,
- ).all()
-
- # Group by task_type and keep the most recent one
- # We use the highest id to determine the most recent task
- recent_tasks: dict[str, models.Task] = {}
- for task in tasks:
- # If we haven't seen this task type before or if this task is newer
- if (task.task_type not in recent_tasks) or (task.id >
recent_tasks[task.task_type].id):
- recent_tasks[task.task_type] = task
-
- return recent_tasks
-
-
@committer_route("/files/list/<project_name>/<version_name>")
async def root_files_list(session: CommitterSession, project_name: str,
version_name: str) -> str:
"""Show all the files in the rsync upload directory for a release."""
diff --git a/atr/tasks/__init__.py b/atr/tasks/__init__.py
index bd52b1d..441ace6 100644
--- a/atr/tasks/__init__.py
+++ b/atr/tasks/__init__.py
@@ -24,15 +24,42 @@ import atr.tasks.archive as archive
import atr.util as util
+async def asc_checks(release: models.Release, signature_path: str) ->
list[models.Task]:
+ tasks = []
+
+ draft_dir = util.get_candidate_draft_dir() / release.project.name /
release.version
+ full_signature_path = str(draft_dir / signature_path)
+ modified = int(await aiofiles.os.path.getmtime(full_signature_path))
+
+ artifact_path = signature_path.removesuffix(".asc")
+ full_artifact_path = str(draft_dir / artifact_path)
+ if not (await aiofiles.os.path.exists(full_artifact_path)):
+ raise RuntimeError(f"Artifact {full_artifact_path} does not exist")
+
+ if release.committee:
+ tasks.append(
+ models.Task(
+ status=models.TaskStatus.QUEUED,
+ task_type="verify_signature",
+ task_args=[
+ release.committee.name,
+ full_artifact_path,
+ full_signature_path,
+ ],
+ release_name=release.name,
+ path=signature_path,
+ modified=modified,
+ ),
+ )
+
+ return tasks
+
+
async def tar_gz_checks(release: models.Release, path: str, signature_path:
str | None = None) -> list[models.Task]:
# TODO: We should probably use an enum for task_type
full_path = str(util.get_candidate_draft_dir() / release.project.name /
release.version / path)
filename = os.path.basename(path)
modified = int(await aiofiles.os.path.getmtime(full_path))
- if signature_path is None:
- signature_path = path + ".asc"
- if not (await aiofiles.os.path.exists(signature_path)):
- signature_path = None
tasks = [
models.Task(
@@ -85,20 +112,4 @@ async def tar_gz_checks(release: models.Release, path: str,
signature_path: str
),
]
- if signature_path and release.committee:
- tasks.append(
- models.Task(
- status=models.TaskStatus.QUEUED,
- task_type="verify_signature",
- task_args=[
- release.committee.name,
- full_path,
- signature_path,
- ],
- release_name=release.name,
- path=path,
- modified=modified,
- ),
- )
-
return tasks
diff --git a/atr/tasks/rsync.py b/atr/tasks/rsync.py
index 80e18a9..e30701f 100644
--- a/atr/tasks/rsync.py
+++ b/atr/tasks/rsync.py
@@ -61,12 +61,11 @@ async def _analyse_core(asf_uid: str, project_name: str,
release_version: str) -
release = await data.release(name=release_name,
_committee=True).demand(RuntimeError("Release not found"))
for path in paths:
# Add new tasks for each path
- # We could use the SHA3 in input and output
- # Or, less securely, we could use path and mtime instead
- if not path.name.endswith(".tar.gz"):
- continue
- _LOGGER.info(f"Analyse {release_name} {path} {path!s}")
- for task in await tasks.tar_gz_checks(release, str(path)):
- data.add(task)
+ if path.name.endswith(".asc"):
+ for task in await tasks.asc_checks(release, str(path)):
+ data.add(task)
+ elif path.name.endswith(".tar.gz"):
+ for task in await tasks.tar_gz_checks(release, str(path)):
+ data.add(task)
await data.commit()
return {"paths": [str(path) for path in paths]}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]