This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git


The following commit(s) were added to refs/heads/main by this push:
     new 844effc  Move generation task creation to a new storage interface 
writer
844effc is described below

commit 844effca13058cdd9e1b8e9f196dc09c71bd616d
Author: Sean B. Palmer <[email protected]>
AuthorDate: Thu Sep 11 14:12:48 2025 +0100

    Move generation task creation to a new storage interface writer
---
 atr/routes/draft.py                        | 33 ++---------
 atr/storage/__init__.py                    |  4 ++
 atr/storage/writers/__init__.py            |  2 +
 atr/storage/writers/{tokens.py => sbom.py} | 92 +++++++++++++++++-------------
 atr/storage/writers/tokens.py              |  5 +-
 5 files changed, 67 insertions(+), 69 deletions(-)

diff --git a/atr/routes/draft.py b/atr/routes/draft.py
index 73cf541..fe35546 100644
--- a/atr/routes/draft.py
+++ b/atr/routes/draft.py
@@ -19,7 +19,6 @@
 
 from __future__ import annotations
 
-import asyncio
 import datetime
 import hashlib
 import pathlib
@@ -42,7 +41,6 @@ import atr.routes.compose as compose
 import atr.routes.root as root
 import atr.routes.upload as upload
 import atr.storage as storage
-import atr.tasks.sbom as sbom
 import atr.template as template
 import atr.util as util
 
@@ -299,33 +297,12 @@ async def sbomgen(
             raise routes.FlashError("Internal error: New revision not found")
 
         # Create and queue the task, using paths within the new revision
-        # TODO: Move this to the storage interface
-        async with db.session() as data:
-            # We still need release.name for the task metadata
-            sbom_task = sql.Task(
-                task_type=sql.TaskType.SBOM_GENERATE_CYCLONEDX,
-                task_args=sbom.GenerateCycloneDX(
-                    artifact_path=str(path_in_new_revision.resolve()),
-                    output_path=str(sbom_path_in_new_revision.resolve()),
-                ).model_dump(),
-                asf_uid=util.unwrap(session.uid),
-                added=datetime.datetime.now(datetime.UTC),
-                status=sql.TaskStatus.QUEUED,
-                project_name=project_name,
-                version_name=version_name,
-                revision_number=creating.new.number,
+        async with storage.write(session.uid) as write:
+            wacp = await write.as_project_committee_member(project_name)
+            sbom_task = await wacp.sbom.generate_cyclonedx(
+                project_name, version_name, creating.new.number, 
path_in_new_revision, sbom_path_in_new_revision
             )
-            data.add(sbom_task)
-            await data.commit()
-
-            # We must wait until the sbom_task is complete before we can queue 
checks
-            # Maximum wait time is 60 * 100ms = 6000ms
-            for _attempt in range(60):
-                await data.refresh(sbom_task)
-                if sbom_task.status != sql.TaskStatus.QUEUED:
-                    break
-                # Wait 100ms before checking again
-                await asyncio.sleep(0.1)
+            await wacp.sbom.generate_cyclonedx_wait(sbom_task)
 
     except Exception as e:
         log.exception("Error generating SBOM:")
diff --git a/atr/storage/__init__.py b/atr/storage/__init__.py
index 1009410..7cd4140 100644
--- a/atr/storage/__init__.py
+++ b/atr/storage/__init__.py
@@ -139,6 +139,7 @@ class WriteAsGeneralPublic(WriteAs):
         self.checks = writers.checks.GeneralPublic(write, self, data)
         self.keys = writers.keys.GeneralPublic(write, self, data)
         self.release = writers.release.GeneralPublic(write, self, data)
+        self.sbom = writers.sbom.GeneralPublic(write, self, data)
         self.ssh = writers.ssh.GeneralPublic(write, self, data)
         self.tokens = writers.tokens.GeneralPublic(write, self, data)
         self.vote = writers.vote.GeneralPublic(write, self, data)
@@ -152,6 +153,7 @@ class WriteAsFoundationCommitter(WriteAsGeneralPublic):
         self.checks = writers.checks.FoundationCommitter(write, self, data)
         self.keys = writers.keys.FoundationCommitter(write, self, data)
         self.release = writers.release.FoundationCommitter(write, self, data)
+        self.sbom = writers.sbom.FoundationCommitter(write, self, data)
         self.ssh = writers.ssh.FoundationCommitter(write, self, data)
         self.tokens = writers.tokens.FoundationCommitter(write, self, data)
         self.vote = writers.vote.FoundationCommitter(write, self, data)
@@ -171,6 +173,7 @@ class 
WriteAsCommitteeParticipant(WriteAsFoundationCommitter):
         self.checks = writers.checks.CommitteeParticipant(write, self, data, 
committee_name)
         self.keys = writers.keys.CommitteeParticipant(write, self, data, 
committee_name)
         self.release = writers.release.CommitteeParticipant(write, self, data, 
committee_name)
+        self.sbom = writers.sbom.CommitteeParticipant(write, self, data, 
committee_name)
         self.ssh = writers.ssh.CommitteeParticipant(write, self, data, 
committee_name)
         self.tokens = writers.tokens.CommitteeParticipant(write, self, data, 
committee_name)
         self.vote = writers.vote.CommitteeParticipant(write, self, data, 
committee_name)
@@ -195,6 +198,7 @@ class WriteAsCommitteeMember(WriteAsCommitteeParticipant):
         self.distributions = writers.distributions.CommitteeMember(write, 
self, data, committee_name)
         self.keys = writers.keys.CommitteeMember(write, self, data, 
committee_name)
         self.release = writers.release.CommitteeMember(write, self, data, 
committee_name)
+        self.sbom = writers.sbom.CommitteeMember(write, self, data, 
committee_name)
         self.ssh = writers.ssh.CommitteeMember(write, self, data, 
committee_name)
         self.tokens = writers.tokens.CommitteeMember(write, self, data, 
committee_name)
         self.vote = writers.vote.CommitteeMember(write, self, data, 
committee_name)
diff --git a/atr/storage/writers/__init__.py b/atr/storage/writers/__init__.py
index 0d8b5b1..6e9daff 100644
--- a/atr/storage/writers/__init__.py
+++ b/atr/storage/writers/__init__.py
@@ -20,6 +20,7 @@ import atr.storage.writers.checks as checks
 import atr.storage.writers.distributions as distributions
 import atr.storage.writers.keys as keys
 import atr.storage.writers.release as release
+import atr.storage.writers.sbom as sbom
 import atr.storage.writers.ssh as ssh
 import atr.storage.writers.tokens as tokens
 import atr.storage.writers.vote as vote
@@ -30,6 +31,7 @@ __all__ = [
     "distributions",
     "keys",
     "release",
+    "sbom",
     "ssh",
     "tokens",
     "vote",
diff --git a/atr/storage/writers/tokens.py b/atr/storage/writers/sbom.py
similarity index 59%
copy from atr/storage/writers/tokens.py
copy to atr/storage/writers/sbom.py
index c96618e..25323f0 100644
--- a/atr/storage/writers/tokens.py
+++ b/atr/storage/writers/sbom.py
@@ -18,15 +18,18 @@
 # Removing this will cause circular imports
 from __future__ import annotations
 
+import asyncio
 import datetime
-import hashlib
-
-import sqlmodel
+from typing import TYPE_CHECKING
 
 import atr.db as db
-import atr.jwtoken as jwtoken
 import atr.models.sql as sql
 import atr.storage as storage
+import atr.tasks.sbom as sbom
+import atr.util as util
+
+if TYPE_CHECKING:
+    import pathlib
 
 
 class GeneralPublic:
@@ -53,41 +56,6 @@ class FoundationCommitter(GeneralPublic):
             raise storage.AccessError("No ASF UID")
         self.__asf_uid = asf_uid
 
-    async def add_token(
-        self, uid: str, token_hash: str, created: datetime.datetime, expires: 
datetime.datetime, label: str | None
-    ) -> sql.PersonalAccessToken:
-        pat = sql.PersonalAccessToken(
-            asfuid=uid,
-            token_hash=token_hash,
-            created=created,
-            expires=expires,
-            label=label,
-        )
-        self.__data.add(pat)
-        await self.__data.commit()
-        return pat
-
-    async def issue_jwt(self, pat_text: str) -> str:
-        pat_hash = hashlib.sha3_256(pat_text.encode()).hexdigest()
-        pat = await self.__data.query_one_or_none(
-            sqlmodel.select(sql.PersonalAccessToken).where(
-                sql.PersonalAccessToken.asfuid == self.__asf_uid,
-                sql.PersonalAccessToken.token_hash == pat_hash,
-            )
-        )
-        if pat is None:
-            raise storage.AccessError("Invalid PAT")
-        if pat.expires < datetime.datetime.now(datetime.UTC):
-            raise storage.AccessError("Expired PAT")
-        issued_jwt = jwtoken.issue(self.__asf_uid)
-        pat.last_used = datetime.datetime.now(datetime.UTC)
-        await self.__data.commit()
-        self.__write_as.append_to_audit_log(
-            asf_uid=self.__asf_uid,
-            pat_hash=pat_hash,
-        )
-        return issued_jwt
-
 
 class CommitteeParticipant(FoundationCommitter):
     def __init__(
@@ -101,9 +69,53 @@ class CommitteeParticipant(FoundationCommitter):
         self.__write = write
         self.__write_as = write_as
         self.__data = data
-        self.__asf_uid = write.authorisation.asf_uid
+        asf_uid = write.authorisation.asf_uid
+        if asf_uid is None:
+            raise storage.AccessError("No ASF UID")
+        self.__asf_uid = asf_uid
         self.__committee_name = committee_name
 
+    async def generate_cyclonedx(
+        self,
+        project_name: str,
+        version_name: str,
+        revision_number: str,
+        path_in_new_revision: pathlib.Path,
+        sbom_path_in_new_revision: pathlib.Path,
+    ) -> sql.Task:
+        # Create and queue the task, using paths within the new revision
+        # TODO: Move this to the storage interface
+        # We still need release.name for the task metadata
+        sbom_task = sql.Task(
+            task_type=sql.TaskType.SBOM_GENERATE_CYCLONEDX,
+            task_args=sbom.GenerateCycloneDX(
+                artifact_path=str(path_in_new_revision.resolve()),
+                output_path=str(sbom_path_in_new_revision.resolve()),
+            ).model_dump(),
+            asf_uid=util.unwrap(self.__asf_uid),
+            added=datetime.datetime.now(datetime.UTC),
+            status=sql.TaskStatus.QUEUED,
+            project_name=project_name,
+            version_name=version_name,
+            revision_number=revision_number,
+        )
+        self.__data.add(sbom_task)
+        await self.__data.commit()
+        await self.__data.refresh(sbom_task)
+        return sbom_task
+
+    # TODO: This is not a writer
+    # Move this to the readers
+    async def generate_cyclonedx_wait(self, sbom_task: sql.Task) -> None:
+        # We must wait until the sbom_task is complete before we can queue 
checks
+        # Maximum wait time is 60 * 100ms = 6000ms
+        for _attempt in range(60):
+            await self.__data.refresh(sbom_task)
+            if sbom_task.status != sql.TaskStatus.QUEUED:
+                break
+            # Wait 100ms before checking again
+            await asyncio.sleep(0.1)
+
 
 class CommitteeMember(CommitteeParticipant):
     def __init__(
diff --git a/atr/storage/writers/tokens.py b/atr/storage/writers/tokens.py
index c96618e..6250743 100644
--- a/atr/storage/writers/tokens.py
+++ b/atr/storage/writers/tokens.py
@@ -101,7 +101,10 @@ class CommitteeParticipant(FoundationCommitter):
         self.__write = write
         self.__write_as = write_as
         self.__data = data
-        self.__asf_uid = write.authorisation.asf_uid
+        asf_uid = write.authorisation.asf_uid
+        if asf_uid is None:
+            raise storage.AccessError("No ASF UID")
+        self.__asf_uid = asf_uid
         self.__committee_name = committee_name
 
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to