This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git


The following commit(s) were added to refs/heads/main by this push:
     new 225cc87  Fix the atomicity of adding revisions
225cc87 is described below

commit 225cc87397e45c6e71a0bd697ea8abca18161e77
Author: Sean B. Palmer <[email protected]>
AuthorDate: Wed May 21 19:39:59 2025 +0100

    Fix the atomicity of adding revisions
---
 atr/db/interaction.py   |   7 +++
 atr/revision.py         | 138 ++++++++++++++++++++----------------------------
 atr/routes/draft.py     | 116 +++++++++++++++++++---------------------
 atr/routes/finish.py    |  13 +++--
 atr/routes/keys.py      |   9 ++--
 atr/routes/resolve.py   |   7 ++-
 atr/routes/revisions.py |  14 +++--
 atr/routes/start.py     |   5 +-
 atr/routes/upload.py    |   7 +--
 atr/ssh.py              |  28 +++++-----
 atr/tasks/svn.py        |  25 +++++----
 atr/util.py             |   7 ++-
 12 files changed, 174 insertions(+), 202 deletions(-)

diff --git a/atr/db/interaction.py b/atr/db/interaction.py
index 678c024..1e5aa3d 100644
--- a/atr/db/interaction.py
+++ b/atr/db/interaction.py
@@ -199,6 +199,13 @@ async def key_user_session_add(
     }
 
 
+async def latest_revision(release: models.Release) -> models.Revision | None:
+    if release.latest_revision_number is None:
+        return None
+    async with db.session() as data:
+        return await data.revision(release_name=release.name, 
number=release.latest_revision_number).get()
+
+
 async def path_info(release: models.Release, paths: list[pathlib.Path]) -> 
PathInfo | None:
     info = PathInfo()
     latest_revision_number = release.latest_revision_number
diff --git a/atr/revision.py b/atr/revision.py
index 54ac770..37e60ad 100644
--- a/atr/revision.py
+++ b/atr/revision.py
@@ -15,118 +15,96 @@
 # specific language governing permissions and limitations
 # under the License.
 
+import asyncio
 import contextlib
+import dataclasses
 import datetime
-import logging
 import pathlib
+import tempfile
 from collections.abc import AsyncGenerator
-from typing import Final
 
 import aiofiles.os
+import aioshutil
 
 import atr.db as db
+import atr.db.interaction as interaction
 import atr.db.models as models
 import atr.tasks as tasks
 import atr.util as util
 
-_LOGGER: Final = logging.getLogger(__name__)
 
[email protected]
+class Creating:
+    old: models.Revision | None
+    interim_path: pathlib.Path
+    new: models.Revision | None
 
+
+# NOTE: The create_directory parameter is not used anymore
+# The temporary directory will always be created
 @contextlib.asynccontextmanager
 async def create_and_manage(
     project_name: str,
     version_name: str,
     asf_uid: str,
-    create_directory: bool = True,
     description: str | None = None,
-) -> AsyncGenerator[tuple[pathlib.Path, str]]:
+) -> AsyncGenerator[Creating]:
     """Manage the creation and symlinking of a mutable release revision."""
-    base_dir = util.get_unfinished_dir()
-    base_release_dir = base_dir / project_name / version_name
-
-    # Ensure that the base directory for the release exists
-    await aiofiles.os.makedirs(base_release_dir, exist_ok=True)
-
+    # Get the release
     release_name = models.release_name(project_name, version_name)
-    # Create and commit the new Revision
     async with db.session() as data:
-        release_one = await data.release(name=release_name, 
_project=True).demand(
+        release = await data.release(name=release_name).demand(
             RuntimeError("Release does not exist for new revision creation")
         )
+        old_revision = await interaction.latest_revision(release)
+    # Create a temporary directory
+    # Ensure that it's removed on any exception
+    temp_dir: str = await asyncio.to_thread(tempfile.mkdtemp)
+    temp_dir_path = pathlib.Path(temp_dir)
+    try:
+        # The directory was created by mkdtemp, but it's empty
+        if old_revision is not None:
+            # If this is not the first revision, hard link the previous 
revision
+            old_release_dir = util.release_directory(release)
+            await util.create_hard_link_clone(old_release_dir, temp_dir_path, 
do_not_create_dest_dir=True)
+        # The directory is either empty or its files are hard linked to the 
previous revision
+        creating = Creating(old=old_revision, interim_path=temp_dir_path, 
new=None)
+        yield creating
+    except Exception:
+        await aioshutil.rmtree(temp_dir)  # type: ignore[call-arg]
+        raise
 
+    # Create a revision row, but hold the write lock
+    async with db.session() as data, data.begin():
         new_revision = models.Revision(
-            # name is automatically computed in an event listener
-            release_name=release_one.name,
-            release=release_one,
-            # seq is automatically computed in an event listener
-            # number is automatically computed in an event listener
+            release_name=release_name,
+            release=release,
             asfuid=asf_uid,
             created=datetime.datetime.now(datetime.UTC),
-            phase=release_one.phase,
-            # parent_name is automatically computed in an event listener
-            # parent is automatically computed in an event listener
-            child=None,
+            phase=release.phase,
             description=description,
         )
         data.add(new_revision)
-        # TODO: Add a retry loop here in case of simultaneous creation of 
revisions?
-        await data.commit()
-
-        # After commit, new_revision has its .name, .seq, and .number 
populated by the listener
-        new_revision_name = new_revision.name
-        new_revision_number = new_revision.number
-
-    if not (new_revision_name and new_revision_number):
-        raise RuntimeError("Failed to obtain the name and number of the newly 
committed revision.")
-
-    # Details needed for directory structure and yield
-    parent_revision_dir: pathlib.Path | None = None
-
-    # Get details of the committed revision
-    async with db.session() as data:
-        new_revision_with_parent = await data.revision(name=new_revision_name, 
_parent=True).demand(
-            RuntimeError("Committed revision not found or parent could not be 
loaded")
-        )
-        if new_revision_with_parent.parent:
-            parent_revision_dir = base_release_dir / 
new_revision_with_parent.parent.number
-
-    new_revision_dir = base_release_dir / new_revision_number
-
-    try:
-        # Create the new revision directory
-        if parent_revision_dir:
-            _LOGGER.info(f"Creating new revision {new_revision_number} by hard 
linking from {parent_revision_dir.name}")
-            await util.create_hard_link_clone(parent_revision_dir, 
new_revision_dir)
-        elif create_directory:
-            _LOGGER.info(f"Creating new empty revision directory 
{new_revision_number}")
-            await aiofiles.os.makedirs(new_revision_dir)
-        else:
-            _LOGGER.info(f"Creating new empty revision with no directory for 
{new_revision_number}")
-
-        # Yield control to the block within "async with"
-        yield new_revision_dir, new_revision_number
-
-        # If the release is in the DRAFT phase, schedule the checks to be run
-        # The caller may have modified release_one, so we must get it again
-        async with db.session() as data:
-            release_two = await data.release(name=release_name).demand(
-                RuntimeError("Release not found for task scheduling")
-            )
-            if release_two.phase is 
models.ReleasePhase.RELEASE_CANDIDATE_DRAFT:
-                _LOGGER.warning(f"Scheduling checks for {project_name} 
{version_name} {new_revision_number}")
-                # TODO: Passing data=data here breaks the database session
-                # Should figure out why that happens
-                await tasks.draft_checks(project_name, version_name, 
new_revision_number)
-            else:
-                _LOGGER.warning(
-                    f"Skipping checks for {project_name} {version_name}"
-                    f" {new_revision_number} because release is not in DRAFT 
phase"
-                )
-
-    except Exception:
-        _LOGGER.exception(f"Error during revision management for 
{new_revision_number}")
-        # Consider adding cleanup for new_revision_dir if it was created 
before an error
-        raise
+        # Flush but do not commit the row to get its name and number
+        await data.flush()
+        # The row is still invisible to other sessions
+        creating.new = new_revision
+        # The caller will now have the details about the new revision
+
+        # Rename the directory to the new revision number
+        await data.refresh(release)
+        new_revision_dir = util.release_directory(release)
+        # Ensure that the parent directory exists
+        await aiofiles.os.makedirs(new_revision_dir.parent, exist_ok=True)
+        # Rename the temporary interim directory to the new revision number
+        await aiofiles.os.rename(temp_dir, new_revision_dir)
+        # creating.interim_path = None
+
+        # Run checks if in DRAFT phase
+        if release.phase == models.ReleasePhase.RELEASE_CANDIDATE_DRAFT:
+            # Must use caller_data here because we acquired the write lock
+            await tasks.draft_checks(project_name, version_name, 
new_revision.number, caller_data=data)
+        # Commit by leaving the data.begin() context manager
 
 
 async def latest_info(project_name: str, version_name: str) -> tuple[str, str, 
datetime.datetime] | None:
diff --git a/atr/routes/draft.py b/atr/routes/draft.py
index 9f09c99..de196f2 100644
--- a/atr/routes/draft.py
+++ b/atr/routes/draft.py
@@ -24,7 +24,7 @@ import datetime
 import hashlib
 import logging
 import pathlib
-from typing import TYPE_CHECKING, Protocol, TypeVar
+from typing import TYPE_CHECKING, Final, Protocol, TypeVar
 
 import aiofiles.os
 import aioshutil
@@ -48,7 +48,7 @@ if TYPE_CHECKING:
     import werkzeug.wrappers.response as response
 
 # _CONFIG: Final = config.get()
-# _LOGGER: Final = logging.getLogger(__name__)
+_LOGGER: Final = logging.getLogger(__name__)
 
 
 T = TypeVar("T")
@@ -141,21 +141,17 @@ async def delete_file(session: routes.CommitterSession, 
project_name: str, versi
 
     try:
         description = "File deletion through web interface"
-        async with revision.create_and_manage(project_name, version_name, 
session.uid, description=description) as (
-            new_revision_dir,
-            new_revision_number,
-        ):
+        async with revision.create_and_manage(
+            project_name, version_name, session.uid, description=description
+        ) as creating:
             # Uses new_revision_number for logging only
             # Path to delete within the new revision directory
-            path_in_new_revision = new_revision_dir / rel_path_to_delete
+            path_in_new_revision = creating.interim_path / rel_path_to_delete
 
             # Check that the file exists in the new revision
             if not await aiofiles.os.path.exists(path_in_new_revision):
                 # This indicates a potential severe issue with hard linking or 
logic
-                logging.error(
-                    f"SEVERE ERROR! File {rel_path_to_delete} not found in new 
revision"
-                    f" {new_revision_number} before deletion"
-                )
+                logging.error(f"SEVERE ERROR! File {rel_path_to_delete} not 
found in new revision before deletion")
                 raise routes.FlashError("File to delete was not found in the 
new revision")
 
             # Check whether the file is an artifact
@@ -163,7 +159,7 @@ async def delete_file(session: routes.CommitterSession, 
project_name: str, versi
                 # If so, delete all associated metadata files in the new 
revision
                 async for p in 
util.paths_recursive(path_in_new_revision.parent):
                     # Construct full path within the new revision
-                    metadata_path_obj = new_revision_dir / p
+                    metadata_path_obj = creating.interim_path / p
                     if p.name.startswith(rel_path_to_delete.name + "."):
                         await aiofiles.os.remove(metadata_path_obj)
                         metadata_files_deleted += 1
@@ -198,11 +194,10 @@ async def fresh(session: routes.CommitterSession, 
project_name: str, version_nam
     # This doesn't make sense unless the checks themselves have been updated
     # Therefore we only show the button for this to admins
     description = "Empty revision to restart all checks for the whole release 
candidate draft"
-    async with revision.create_and_manage(project_name, version_name, 
session.uid, description=description) as (
-        _new_revision_dir,
-        _new_revision_number,
-    ):
-        ...
+    async with revision.create_and_manage(
+        project_name, version_name, session.uid, description=description
+    ) as _creating:
+        pass
 
     return await session.redirect(
         compose.selected,
@@ -231,20 +226,17 @@ async def hashgen(
 
     try:
         description = "Hash generation through web interface"
-        async with revision.create_and_manage(project_name, version_name, 
session.uid, description=description) as (
-            new_revision_dir,
-            new_revision_number,
-        ):
+        async with revision.create_and_manage(
+            project_name, version_name, session.uid, description=description
+        ) as creating:
             # Uses new_revision_number for logging only
-            path_in_new_revision = new_revision_dir / rel_path
+            path_in_new_revision = creating.interim_path / rel_path
             hash_path_rel = rel_path.name + f".{hash_type}"
-            hash_path_in_new_revision = new_revision_dir / rel_path.parent / 
hash_path_rel
+            hash_path_in_new_revision = creating.interim_path / 
rel_path.parent / hash_path_rel
 
             # Check that the source file exists in the new revision
             if not await aiofiles.os.path.exists(path_in_new_revision):
-                logging.error(
-                    f"Source file {rel_path} not found in new revision 
{new_revision_number} for hash generation."
-                )
+                logging.error(f"Source file {rel_path} not found in new 
revision for hash generation.")
                 raise routes.FlashError("Source file not found in the new 
revision.")
 
             # Check that the hash file does not already exist in the new 
revision
@@ -291,53 +283,53 @@ async def sbomgen(
 
     try:
         description = "SBOM generation through web interface"
-        async with revision.create_and_manage(project_name, version_name, 
session.uid, description=description) as (
-            new_revision_dir,
-            new_revision_number,
-        ):
+        async with revision.create_and_manage(
+            project_name, version_name, session.uid, description=description
+        ) as creating:
             # Uses new_revision_number in a functional way
-            path_in_new_revision = new_revision_dir / rel_path
+            path_in_new_revision = creating.interim_path / rel_path
             sbom_path_rel = rel_path.with_suffix(rel_path.suffix + 
".cdx.json").name
-            sbom_path_in_new_revision = new_revision_dir / rel_path.parent / 
sbom_path_rel
+            sbom_path_in_new_revision = creating.interim_path / 
rel_path.parent / sbom_path_rel
 
             # Check that the source file exists in the new revision
             if not await aiofiles.os.path.exists(path_in_new_revision):
-                logging.error(
-                    f"Source file {rel_path} not found in new revision 
{new_revision_number} for SBOM generation."
-                )
+                logging.error(f"Source file {rel_path} not found in new 
revision for SBOM generation.")
                 raise routes.FlashError("Source artifact file not found in the 
new revision.")
 
             # Check that the SBOM file does not already exist in the new 
revision
             if await aiofiles.os.path.exists(sbom_path_in_new_revision):
                 raise base.ASFQuartException("SBOM file already exists", 
errorcode=400)
 
-            # Create and queue the task, using paths within the new revision
-            async with db.session() as data:
-                # We still need release.name for the task metadata
-                release = await session.release(project_name, version_name, 
data=data)
-
-                sbom_task = models.Task(
-                    task_type=models.TaskType.SBOM_GENERATE_CYCLONEDX,
-                    task_args=sbom.GenerateCycloneDX(
-                        artifact_path=str(path_in_new_revision.resolve()),
-                        output_path=str(sbom_path_in_new_revision.resolve()),
-                    ).model_dump(),
-                    added=datetime.datetime.now(datetime.UTC),
-                    status=models.TaskStatus.QUEUED,
-                    release_name=release.name,
-                    revision_number=new_revision_number,
-                )
-                data.add(sbom_task)
-                await data.commit()
-
-                # We must wait until the sbom_task is complete before we can 
queue checks
-                # Maximum wait time is 60 * 100ms = 6000ms
-                for _attempt in range(60):
-                    await data.refresh(sbom_task)
-                    if sbom_task.status != models.TaskStatus.QUEUED:
-                        break
-                    # Wait 100ms before checking again
-                    await asyncio.sleep(0.1)
+        if creating.new is None:
+            raise routes.FlashError("Internal error: New revision not found")
+
+        # Create and queue the task, using paths within the new revision
+        async with db.session() as data:
+            # We still need release.name for the task metadata
+            release = await session.release(project_name, version_name, 
data=data)
+
+            sbom_task = models.Task(
+                task_type=models.TaskType.SBOM_GENERATE_CYCLONEDX,
+                task_args=sbom.GenerateCycloneDX(
+                    artifact_path=str(path_in_new_revision.resolve()),
+                    output_path=str(sbom_path_in_new_revision.resolve()),
+                ).model_dump(),
+                added=datetime.datetime.now(datetime.UTC),
+                status=models.TaskStatus.QUEUED,
+                release_name=release.name,
+                revision_number=creating.new.number,
+            )
+            data.add(sbom_task)
+            await data.commit()
+
+            # We must wait until the sbom_task is complete before we can queue 
checks
+            # Maximum wait time is 60 * 100ms = 6000ms
+            for _attempt in range(60):
+                await data.refresh(sbom_task)
+                if sbom_task.status != models.TaskStatus.QUEUED:
+                    break
+                # Wait 100ms before checking again
+                await asyncio.sleep(0.1)
 
     except Exception as e:
         logging.exception("Error generating SBOM:")
diff --git a/atr/routes/finish.py b/atr/routes/finish.py
index 4c6b5ba..6c632dc 100644
--- a/atr/routes/finish.py
+++ b/atr/routes/finish.py
@@ -149,14 +149,13 @@ async def _move_file_to_revision(
 ) -> tuple[quart_response.Response, int] | response.Response | None:
     try:
         description = "File move through web interface"
-        async with revision.create_and_manage(project_name, version_name, 
session.uid, description=description) as (
-            new_revision_dir,
-            _new_revision_number,
-        ):
+        async with revision.create_and_manage(
+            project_name, version_name, session.uid, description=description
+        ) as creating:
             related_files = _related_files(source_file_rel)
-            bundle = [f for f in related_files if await 
aiofiles.os.path.exists(new_revision_dir / f)]
+            bundle = [f for f in related_files if await 
aiofiles.os.path.exists(creating.interim_path / f)]
             collisions = [
-                f.name for f in bundle if await 
aiofiles.os.path.exists(new_revision_dir / target_dir_rel / f.name)
+                f.name for f in bundle if await 
aiofiles.os.path.exists(creating.interim_path / target_dir_rel / f.name)
             ]
             if collisions:
                 msg = f"Files already exist in '{target_dir_rel}': {', 
'.join(collisions)}"
@@ -166,7 +165,7 @@ async def _move_file_to_revision(
                 return await session.redirect(selected, 
project_name=project_name, version_name=version_name)
 
             for f in bundle:
-                await aiofiles.os.rename(new_revision_dir / f, 
new_revision_dir / target_dir_rel / f.name)
+                await aiofiles.os.rename(creating.interim_path / f, 
creating.interim_path / target_dir_rel / f.name)
 
         await quart.flash(f"Moved {', '.join(f.name for f in bundle)}", 
"success")
         return await session.redirect(selected, project_name=project_name, 
version_name=version_name)
diff --git a/atr/routes/keys.py b/atr/routes/keys.py
index e30678e..8503da5 100644
--- a/atr/routes/keys.py
+++ b/atr/routes/keys.py
@@ -192,11 +192,10 @@ async def import_selected_revision(
     # Remove the KEYS file if 100% imported
     if (success_count > 0) and (error_count == 0):
         description = "Removed KEYS file after successful import through web 
interface"
-        async with revision.create_and_manage(project_name, version_name, 
session.uid, description=description) as (
-            new_revision_dir,
-            _new_revision_number,
-        ):
-            path_in_new_revision = new_revision_dir / "KEYS"
+        async with revision.create_and_manage(
+            project_name, version_name, session.uid, description=description
+        ) as creating:
+            path_in_new_revision = creating.interim_path / "KEYS"
             await aiofiles.os.remove(path_in_new_revision)
     return await session.redirect(
         compose.selected,
diff --git a/atr/routes/resolve.py b/atr/routes/resolve.py
index d23e2e5..7a7e72b 100644
--- a/atr/routes/resolve.py
+++ b/atr/routes/resolve.py
@@ -124,10 +124,9 @@ async def selected_post(
                 destination = compose.selected
 
     description = "Create a preview revision from the last candidate draft"
-    async with revision.create_and_manage(project_name, release.version, 
session.uid, description=description) as (
-        _new_revision_dir,
-        _new_revision_number,
-    ):
+    async with revision.create_and_manage(
+        project_name, release.version, session.uid, description=description
+    ) as _creating:
         pass
 
     error_message = await _send_resolution(session, release, vote_result, 
resolution_body)
diff --git a/atr/routes/revisions.py b/atr/routes/revisions.py
index 16a5dce..892046b 100644
--- a/atr/routes/revisions.py
+++ b/atr/routes/revisions.py
@@ -116,18 +116,16 @@ async def selected_post(session: routes.CommitterSession, 
project_name: str, ver
             )
 
     description = f"Copy of revision {selected_revision_number} through web 
interface"
-    async with revision.create_and_manage(project_name, version_name, 
session.uid, description=description) as (
-        new_revision_dir,
-        new_revision_number,
-    ):
-        # Uses new_revision_number after this block
+    async with revision.create_and_manage(project_name, version_name, 
session.uid, description=description) as creating:
         # TODO: Stop create_and_manage from hard linking the parent first
-        await aioshutil.rmtree(new_revision_dir)  # type: ignore[call-arg]
-        await util.create_hard_link_clone(selected_revision_dir, 
new_revision_dir)
+        await aioshutil.rmtree(creating.interim_path)  # type: ignore[call-arg]
+        await util.create_hard_link_clone(selected_revision_dir, 
creating.interim_path)
 
+    if creating.new is None:
+        raise base.ASFQuartException("Internal error: New revision not found", 
errorcode=500)
     return await session.redirect(
         selected,
-        success=f"Copied revision {selected_revision_number} to new latest 
revision, {new_revision_number}",
+        success=f"Copied revision {selected_revision_number} to new latest 
revision, {creating.new.number}",
         project_name=project_name,
         version_name=version_name,
     )
diff --git a/atr/routes/start.py b/atr/routes/start.py
index 94ea80e..dfd38e7 100644
--- a/atr/routes/start.py
+++ b/atr/routes/start.py
@@ -92,10 +92,7 @@ async def create_release_draft(project_name: str, version: 
str, asf_uid: str) ->
         await data.refresh(release)
 
     description = "Creation of empty release candidate draft through web 
interface"
-    async with revision.create_and_manage(project_name, version, asf_uid, 
description=description) as (
-        _new_revision_dir,
-        _new_revision_number,
-    ):
+    async with revision.create_and_manage(project_name, version, asf_uid, 
description=description) as _creating:
         pass
     return release, project
 
diff --git a/atr/routes/upload.py b/atr/routes/upload.py
index cdbd7c2..5e08150 100644
--- a/atr/routes/upload.py
+++ b/atr/routes/upload.py
@@ -137,10 +137,7 @@ async def _upload_files(
     """Process and save the uploaded files into a new draft revision."""
     number_of_files = len(files)
     description = f"Upload of {number_of_files} file{'' if number_of_files == 
1 else 's'} through web interface"
-    async with revision.create_and_manage(project_name, version_name, asf_uid, 
description=description) as (
-        new_revision_dir,
-        _new_revision_number,
-    ):
+    async with revision.create_and_manage(project_name, version_name, asf_uid, 
description=description) as creating:
 
         def get_target_path(file: datastructures.FileStorage) -> pathlib.Path:
             # Determine the target path within the new revision directory
@@ -156,7 +153,7 @@ async def _upload_files(
                 relative_file_path = file_name.relative_to(file_name.anchor)
 
             # Construct path inside the new revision directory
-            target_path = new_revision_dir / relative_file_path
+            target_path = creating.interim_path / relative_file_path
             return target_path
 
         # Save each uploaded file to the new revision directory
diff --git a/atr/ssh.py b/atr/ssh.py
index 63629e9..01005e1 100644
--- a/atr/ssh.py
+++ b/atr/ssh.py
@@ -484,29 +484,33 @@ async def _step_07b_process_validated_rsync_write(
 
         # Create the draft revision directory structure
         description = "File synchronisation through ssh, using rsync"
-        async with revision.create_and_manage(project_name, version_name, 
asf_uid, description=description) as (
-            new_revision_dir,
-            new_revision_number,
-        ):
+        async with revision.create_and_manage(project_name, version_name, 
asf_uid, description=description) as creating:
             # Uses new_revision_number for logging only
-            _LOGGER.info(f"Created draft revision directory: 
{new_revision_dir} ({new_revision_number})")
+            if creating.old is not None:
+                _LOGGER.info(f"Using old revision {creating.old.number} and 
interim path {creating.interim_path}")
             # Update the rsync command path to the new revision directory
-            argv[path_index] = str(new_revision_dir)
+            argv[path_index] = str(creating.interim_path)
 
             ###################################################
             ### Calls _step_08_execute_rsync_upload_command ###
             ###################################################
             exit_status = await _step_08_execute_rsync(process, argv)
             if exit_status != 0:
+                if creating.old is not None:
+                    for_revision = f"successor of revision 
{creating.old.number}"
+                else:
+                    for_revision = f"initial revision for release 
{project_name}-{version_name}"
                 _LOGGER.error(
-                    f"rsync upload failed with exit status {exit_status} for 
revision {new_revision_number}. "
+                    f"rsync upload failed with exit status {exit_status} for 
{for_revision}. "
                     f"Command: {process.command} (run as {' '.join(argv)})"
                 )
-
-            _LOGGER.info(f"rsync upload successful for revision 
{new_revision_number}")
-            # Close the connection unconditionally
-            # If we use "if not process.is_closing():" then it fails
-            process.exit(exit_status)
+        if creating.new is not None:
+            _LOGGER.info(f"rsync upload successful for revision 
{creating.new.number}")
+        else:
+            _LOGGER.info(f"rsync upload successful for release 
{project_name}-{version_name}")
+        # Close the connection unconditionally
+        # If we use "if not process.is_closing():" then it fails
+        process.exit(exit_status)
 
     except Exception as e:
         _LOGGER.exception(f"Error during draft revision processing for 
{project_name}-{version_name}")
diff --git a/atr/tasks/svn.py b/atr/tasks/svn.py
index 31e6911..c09f003 100644
--- a/atr/tasks/svn.py
+++ b/atr/tasks/svn.py
@@ -72,24 +72,21 @@ async def _import_files_core(args: SvnImport) -> str:
     description = "Import of files from subversion"
     async with revision.create_and_manage(
         args.project_name, args.version_name, args.asf_uid, 
description=description
-    ) as (
-        new_revision_dir,
-        new_revision_number,
-    ):
-        # Uses new_revision_number after this block
-        _LOGGER.debug(f"Created revision directory: {new_revision_dir}")
-
-        final_target_path = new_revision_dir
+    ) as creating:
+        # Uses creating.new after this block
+        _LOGGER.debug(f"Created revision directory: {creating.interim_path}")
+
+        final_target_path = creating.interim_path
         if args.target_subdirectory:
-            final_target_path = new_revision_dir / args.target_subdirectory
+            final_target_path = creating.interim_path / 
args.target_subdirectory
             # Validate that final_target_path is a subdirectory of 
new_revision_dir
-            if not final_target_path.is_relative_to(new_revision_dir):
+            if not final_target_path.is_relative_to(creating.interim_path):
                 raise SvnImportError(
-                    f"Target subdirectory {args.target_subdirectory} is not a 
subdirectory of {new_revision_dir}"
+                    f"Target subdirectory {args.target_subdirectory} is not a 
subdirectory of {creating.interim_path}"
                 )
             await aiofiles.os.makedirs(final_target_path, exist_ok=True)
 
-        temp_export_path = new_revision_dir / temp_export_dir_name
+        temp_export_path = creating.interim_path / temp_export_dir_name
 
         svn_command = [
             "svn",
@@ -121,7 +118,9 @@ async def _import_files_core(args: SvnImport) -> str:
         await aiofiles.os.rmdir(temp_export_path)
         _LOGGER.info(f"Removed temporary export directory: {temp_export_path}")
 
-    return f"Successfully imported files from SVN into revision 
{new_revision_number}"
+    if creating.new is None:
+        raise SvnImportError("Internal error: New revision not found")
+    return f"Successfully imported files from SVN into revision 
{creating.new.number}"
 
 
 async def _import_files_core_run_svn_export(svn_command: list[str], 
temp_export_path: pathlib.Path) -> None:
diff --git a/atr/util.py b/atr/util.py
index 27f8d20..e219175 100644
--- a/atr/util.py
+++ b/atr/util.py
@@ -182,7 +182,9 @@ async def content_list(
         )
 
 
-async def create_hard_link_clone(source_dir: pathlib.Path, dest_dir: 
pathlib.Path) -> None:
+async def create_hard_link_clone(
+    source_dir: pathlib.Path, dest_dir: pathlib.Path, do_not_create_dest_dir: 
bool = False
+) -> None:
     """Recursively create a clone of source_dir in dest_dir using hard links 
for files."""
     # TODO: We're currently using cp -al instead
     # Ensure source exists and is a directory
@@ -190,7 +192,8 @@ async def create_hard_link_clone(source_dir: pathlib.Path, 
dest_dir: pathlib.Pat
         raise ValueError(f"Source path is not a directory or does not exist: 
{source_dir}")
 
     # Create destination directory
-    await aiofiles.os.makedirs(dest_dir, exist_ok=False)
+    if do_not_create_dest_dir is False:
+        await aiofiles.os.makedirs(dest_dir, exist_ok=False)
 
     async def _clone_recursive(current_source: pathlib.Path, current_dest: 
pathlib.Path) -> None:
         for entry in await aiofiles.os.scandir(current_source):


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to