This is an automated email from the ASF dual-hosted git repository. sbp pushed a commit to branch main in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git
The following commit(s) were added to refs/heads/main by this push: new dbef026 Move path information code to the storage interface dbef026 is described below commit dbef0269e768ac3fa56a123c623069823992370b Author: Sean B. Palmer <s...@miscoranda.com> AuthorDate: Wed Jul 30 14:55:56 2025 +0100 Move path information code to the storage interface --- atr/db/interaction.py | 64 ---------------------------- atr/routes/compose.py | 5 ++- atr/storage/__init__.py | 6 +++ atr/storage/readers/__init__.py | 3 +- atr/storage/readers/releases.py | 92 +++++++++++++++++++++++++++++++++++++++++ atr/storage/types.py | 9 ++++ 6 files changed, 113 insertions(+), 66 deletions(-) diff --git a/atr/db/interaction.py b/atr/db/interaction.py index 337b1dc..a50e127 100644 --- a/atr/db/interaction.py +++ b/atr/db/interaction.py @@ -17,7 +17,6 @@ import contextlib import pathlib -import re from collections.abc import AsyncGenerator, Sequence import aiofiles.os @@ -27,10 +26,8 @@ import quart import sqlalchemy import sqlmodel -import atr.analysis as analysis import atr.db as db import atr.log as log -import atr.models.schema as schema import atr.models.sql as sql import atr.user as user import atr.util as util @@ -51,14 +48,6 @@ class PublicKeyError(RuntimeError): pass -class PathInfo(schema.Strict): - artifacts: set[pathlib.Path] = schema.factory(set) - errors: dict[pathlib.Path, list[sql.CheckResult]] = schema.factory(dict) - metadata: set[pathlib.Path] = schema.factory(set) - successes: dict[pathlib.Path, list[sql.CheckResult]] = schema.factory(dict) - warnings: dict[pathlib.Path, list[sql.CheckResult]] = schema.factory(dict) - - async def candidate_drafts(project: sql.Project) -> list[sql.Release]: """Get the candidate drafts for the project.""" return await releases_by_phase(project, sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT) @@ -103,24 +92,6 @@ async def latest_revision(release: sql.Release) -> sql.Revision | None: return await data.revision(release_name=release.name, number=release.latest_revision_number).get() -async def path_info(release: sql.Release, paths: list[pathlib.Path]) -> PathInfo | None: - info = PathInfo() - latest_revision_number = release.latest_revision_number - if latest_revision_number is None: - return None - async with db.session() as data: - await _successes_errors_warnings(data, release, latest_revision_number, info) - for path in paths: - # Get artifacts and metadata - search = re.search(analysis.extension_pattern(), str(path)) - if search: - if search.group("artifact"): - info.artifacts.add(path) - elif search.group("metadata"): - info.metadata.add(path) - return info - - async def previews(project: sql.Project) -> list[sql.Release]: """Get the preview releases for the project.""" return await releases_by_phase(project, sql.ReleasePhase.RELEASE_PREVIEW) @@ -338,38 +309,3 @@ async def _delete_release_data_filesystem(release_dir: pathlib.Path, release_nam f"Database records for '{release_name}' deleted, but failed to delete filesystem directory: {e!s}", "warning", ) - - -async def _successes_errors_warnings( - data: db.Session, release: sql.Release, latest_revision_number: str, info: PathInfo -) -> None: - # Get successes, warnings, and errors - successes = await data.check_result( - release_name=release.name, - revision_number=latest_revision_number, - member_rel_path=None, - status=sql.CheckResultStatus.SUCCESS, - ).all() - for success in successes: - if primary_rel_path := success.primary_rel_path: - info.successes.setdefault(pathlib.Path(primary_rel_path), []).append(success) - - warnings = await data.check_result( - release_name=release.name, - revision_number=latest_revision_number, - member_rel_path=None, - status=sql.CheckResultStatus.WARNING, - ).all() - for warning in warnings: - if primary_rel_path := warning.primary_rel_path: - info.warnings.setdefault(pathlib.Path(primary_rel_path), []).append(warning) - - errors = await data.check_result( - release_name=release.name, - revision_number=latest_revision_number, - member_rel_path=None, - status=sql.CheckResultStatus.FAILURE, - ).all() - for error in errors: - if primary_rel_path := error.primary_rel_path: - info.errors.setdefault(pathlib.Path(primary_rel_path), []).append(error) diff --git a/atr/routes/compose.py b/atr/routes/compose.py index e5c3432..54e1017 100644 --- a/atr/routes/compose.py +++ b/atr/routes/compose.py @@ -27,6 +27,7 @@ import atr.models.sql as sql import atr.revision as revision import atr.routes as routes import atr.routes.draft as draft +import atr.storage as storage import atr.template as template import atr.util as util @@ -50,7 +51,9 @@ async def check( paths = [path async for path in util.paths_recursive(base_path)] paths.sort() - info = await interaction.path_info(release, paths) + async with storage.read() as read: + ragp = read.as_general_public() + info = await ragp.releases.path_info(release, paths) user_ssh_keys: Sequence[sql.SSHKey] = [] async with db.session() as data: diff --git a/atr/storage/__init__.py b/atr/storage/__init__.py index 66f9e93..1f44d19 100644 --- a/atr/storage/__init__.py +++ b/atr/storage/__init__.py @@ -77,6 +77,12 @@ class ReadAsGeneralPublic(AccessCredentialsRead): self.__data, self.__asf_uid, ) + self.releases = readers.releases.GeneralPublic( + self, + self.__read, + self.__data, + self.__asf_uid, + ) @property def authenticated(self) -> bool: diff --git a/atr/storage/readers/__init__.py b/atr/storage/readers/__init__.py index a23781f..6993e42 100644 --- a/atr/storage/readers/__init__.py +++ b/atr/storage/readers/__init__.py @@ -16,5 +16,6 @@ # under the License. import atr.storage.readers.checks as checks +import atr.storage.readers.releases as releases -__all__ = ["checks"] +__all__ = ["checks", "releases"] diff --git a/atr/storage/readers/releases.py b/atr/storage/readers/releases.py new file mode 100644 index 0000000..fff6e7a --- /dev/null +++ b/atr/storage/readers/releases.py @@ -0,0 +1,92 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Removing this will cause circular imports +from __future__ import annotations + +import pathlib +import re + +import atr.analysis as analysis +import atr.db as db +import atr.models.sql as sql +import atr.storage as storage +import atr.storage.types as types + + +class GeneralPublic: + def __init__( + self, + credentials: storage.ReadAsGeneralPublic, + read: storage.Read, + data: db.Session, + asf_uid: str | None = None, + ): + self.__credentials = credentials + self.__read = read + self.__data = data + self.__asf_uid = asf_uid + + async def path_info(self, release: sql.Release, paths: list[pathlib.Path]) -> types.PathInfo | None: + info = types.PathInfo() + latest_revision_number = release.latest_revision_number + if latest_revision_number is None: + return None + await self.__successes_errors_warnings(release, latest_revision_number, info) + for path in paths: + # Get artifacts and metadata + search = re.search(analysis.extension_pattern(), str(path)) + if search: + if search.group("artifact"): + info.artifacts.add(path) + elif search.group("metadata"): + info.metadata.add(path) + return info + + async def __successes_errors_warnings( + self, release: sql.Release, latest_revision_number: str, info: types.PathInfo + ) -> None: + # Get successes, warnings, and errors + successes = await self.__data.check_result( + release_name=release.name, + revision_number=latest_revision_number, + member_rel_path=None, + status=sql.CheckResultStatus.SUCCESS, + ).all() + for success in successes: + if primary_rel_path := success.primary_rel_path: + info.successes.setdefault(pathlib.Path(primary_rel_path), []).append(success) + + warnings = await self.__data.check_result( + release_name=release.name, + revision_number=latest_revision_number, + member_rel_path=None, + status=sql.CheckResultStatus.WARNING, + ).all() + for warning in warnings: + if primary_rel_path := warning.primary_rel_path: + info.warnings.setdefault(pathlib.Path(primary_rel_path), []).append(warning) + + errors = await self.__data.check_result( + release_name=release.name, + revision_number=latest_revision_number, + member_rel_path=None, + status=sql.CheckResultStatus.FAILURE, + ).all() + for error in errors: + if primary_rel_path := error.primary_rel_path: + info.errors.setdefault(pathlib.Path(primary_rel_path), []).append(error) diff --git a/atr/storage/types.py b/atr/storage/types.py index c250671..ef05592 100644 --- a/atr/storage/types.py +++ b/atr/storage/types.py @@ -17,6 +17,7 @@ import dataclasses import enum +import pathlib from collections.abc import Callable, Sequence from typing import NoReturn, TypeVar @@ -250,6 +251,14 @@ class LinkedCommittee: autogenerated_keys_file: Outcome[str] +class PathInfo(schema.Strict): + artifacts: set[pathlib.Path] = schema.factory(set) + errors: dict[pathlib.Path, list[sql.CheckResult]] = schema.factory(dict) + metadata: set[pathlib.Path] = schema.factory(set) + successes: dict[pathlib.Path, list[sql.CheckResult]] = schema.factory(dict) + warnings: dict[pathlib.Path, list[sql.CheckResult]] = schema.factory(dict) + + class PublicKeyError(Exception): def __init__(self, key: Key, original_error: Exception): self.__key = key --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@tooling.apache.org For additional commands, e-mail: commits-h...@tooling.apache.org