This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch sbp
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git


The following commit(s) were added to refs/heads/sbp by this push:
     new c777bb4  Add a merge module to automatically resolve conflicts between 
revisions
c777bb4 is described below

commit c777bb480a18c23db304a4cd28bc06ace23ad88b
Author: Sean B. Palmer <[email protected]>
AuthorDate: Tue Feb 10 20:36:48 2026 +0000

    Add a merge module to automatically resolve conflicts between revisions
---
 atr/merge.py             | 255 +++++++++++++++++++++++++++++
 atr/util.py              |   3 +-
 tests/unit/test_merge.py | 410 +++++++++++++++++++++++++++++++++++++++++++++++
 3 files changed, 667 insertions(+), 1 deletion(-)

diff --git a/atr/merge.py b/atr/merge.py
new file mode 100644
index 0000000..5ea0fe6
--- /dev/null
+++ b/atr/merge.py
@@ -0,0 +1,255 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import asyncio
+import os
+from typing import TYPE_CHECKING
+
+import aiofiles.os
+
+import atr.attestable as attestable
+import atr.util as util
+
+if TYPE_CHECKING:
+    import pathlib
+
+
+async def merge(
+    base_inodes: dict[str, int],
+    base_hashes: dict[str, str],
+    prior_dir: pathlib.Path,
+    project_name: str,
+    version_name: str,
+    prior_revision_number: str,
+    temp_dir: pathlib.Path,
+    n_inodes: dict[str, int],
+    n_hashes: dict[str, str],
+    n_sizes: dict[str, int],
+) -> None:
+    # Note: the present function modifies n_hashes and n_sizes in place
+    # This happens in the _add_from_prior and _replace_with_prior calls 
somewhat below
+    prior_inodes = await asyncio.to_thread(util.paths_to_inodes, prior_dir)
+    prior_hashes: dict[str, str] | None = None
+
+    # Collect implicit directory paths from new (N) files for type conflict 
detection
+    n_dirs: set[str] = set()
+    for file_path in n_inodes:
+        parts = file_path.split("/")
+        for i in range(1, len(parts)):
+            n_dirs.add("/".join(parts[:i]))
+
+    all_paths = base_inodes.keys() | prior_inodes.keys() | n_inodes.keys()
+
+    for path in sorted(all_paths):
+        b_ino = base_inodes.get(path)
+        p_ino = prior_inodes.get(path)
+        n_ino = n_inodes.get(path)
+
+        # Case 9: only the prior revision introduced this path
+        if (b_ino is None) and (p_ino is not None) and (n_ino is None):
+            if _has_type_conflict(path, n_inodes, n_dirs):
+                continue
+            if await aiofiles.os.path.isdir(temp_dir / path):
+                continue
+            prior_hashes = await _add_from_prior(
+                prior_dir,
+                temp_dir,
+                path,
+                n_hashes,
+                n_sizes,
+                prior_hashes,
+                project_name,
+                version_name,
+                prior_revision_number,
+            )
+            continue
+
+        # The prior revision deleted a path that both base and new have
+        if (b_ino is not None) and (p_ino is None) and (n_ino is not None):
+            if _content_matches(b_ino, n_ino, base_hashes[path], 
n_hashes[path]):
+                # Case 10: new still has the base content so the deletion 
applies
+                await aiofiles.os.remove(temp_dir / path)
+                # Update n_hashes and n_sizes in place
+                n_hashes.pop(path, None)
+                n_sizes.pop(path, None)
+            # Case 13: new has different content so new wins
+            continue
+
+        # Cases 4, 5, 6, 8, 11, and 15: all three revisions have this path
+        if (b_ino is not None) and (p_ino is not None) and (n_ino is not None):
+            prior_hashes = await _merge_all_present(
+                base_inodes,
+                base_hashes,
+                prior_dir,
+                temp_dir,
+                path,
+                b_ino,
+                p_ino,
+                n_ino,
+                n_hashes,
+                n_sizes,
+                prior_hashes,
+                project_name,
+                version_name,
+                prior_revision_number,
+            )
+
+
+async def _add_from_prior(
+    prior_dir: pathlib.Path,
+    temp_dir: pathlib.Path,
+    path: str,
+    n_hashes: dict[str, str],
+    n_sizes: dict[str, int],
+    prior_hashes: dict[str, str] | None,
+    project_name: str,
+    version_name: str,
+    prior_revision_number: str,
+) -> dict[str, str] | None:
+    target = temp_dir / path
+    await asyncio.to_thread(_makedirs_with_permissions, target.parent, 
temp_dir)
+    await aiofiles.os.link(prior_dir / path, target)
+    if prior_hashes is None:
+        prior_hashes = await attestable.load_paths(project_name, version_name, 
prior_revision_number)
+    # Update n_hashes and n_sizes in place
+    if (prior_hashes is not None) and (path in prior_hashes):
+        n_hashes[path] = prior_hashes[path]
+    else:
+        n_hashes[path] = await attestable.compute_file_hash(target)
+    stat_result = await aiofiles.os.stat(target)
+    n_sizes[path] = stat_result.st_size
+    return prior_hashes
+
+
+def _content_matches(
+    b_ino: int,
+    n_ino: int,
+    b_hash: str,
+    n_hash: str,
+) -> bool:
+    if b_ino == n_ino:
+        return True
+    return b_hash == n_hash
+
+
+def _has_type_conflict(path: str, n_inodes: dict[str, int], n_dirs: set[str]) 
-> bool:
+    if path in n_dirs:
+        return True
+    parts = path.split("/")
+    return any("/".join(parts[:i]) in n_inodes for i in range(1, len(parts)))
+
+
+def _makedirs_with_permissions(target_parent: pathlib.Path, root: 
pathlib.Path) -> None:
+    os.makedirs(target_parent, exist_ok=True)
+    current = target_parent
+    while current != root:
+        os.chmod(current, util.DIRECTORY_PERMISSIONS)
+        current = current.parent
+
+
+async def _merge_all_present(
+    _base_inodes: dict[str, int],
+    base_hashes: dict[str, str],
+    prior_dir: pathlib.Path,
+    temp_dir: pathlib.Path,
+    path: str,
+    b_ino: int,
+    p_ino: int,
+    n_ino: int,
+    n_hashes: dict[str, str],
+    n_sizes: dict[str, int],
+    prior_hashes: dict[str, str] | None,
+    project_name: str,
+    version_name: str,
+    prior_revision_number: str,
+) -> dict[str, str] | None:
+    # Cases 6, 8: prior and new share an inode so they already agree
+    if p_ino == n_ino:
+        return prior_hashes
+
+    # Cases 4, 5: base and prior share an inode so there was no intervening 
change
+    if b_ino == p_ino:
+        return prior_hashes
+
+    # Case 11 via inode: base and new share an inode so prior wins
+    if b_ino == n_ino:
+        return await _replace_with_prior(
+            prior_dir,
+            temp_dir,
+            path,
+            n_hashes,
+            n_sizes,
+            prior_hashes,
+            project_name,
+            version_name,
+            prior_revision_number,
+        )
+
+    # Cases 4, 5, 8, 11, 15: all inodes differ, so use hash to distinguish
+    b_hash = base_hashes[path]
+    n_hash = n_hashes[path]
+    if b_hash == n_hash:
+        if prior_hashes is None:
+            prior_hashes = await attestable.load_paths(project_name, 
version_name, prior_revision_number)
+        if (prior_hashes is not None) and (path in prior_hashes):
+            p_hash = prior_hashes[path]
+        else:
+            p_hash = await attestable.compute_file_hash(prior_dir / path)
+        if p_hash != b_hash:
+            # Case 11 via hash: base and new have the same content but prior 
differs
+            return await _replace_with_prior(
+                prior_dir,
+                temp_dir,
+                path,
+                n_hashes,
+                n_sizes,
+                prior_hashes,
+                project_name,
+                version_name,
+                prior_revision_number,
+            )
+
+    # Cases 4, 5, 8, 15: no merge action needed so new wins
+    return prior_hashes
+
+
+async def _replace_with_prior(
+    prior_dir: pathlib.Path,
+    temp_dir: pathlib.Path,
+    path: str,
+    n_hashes: dict[str, str],
+    n_sizes: dict[str, int],
+    prior_hashes: dict[str, str] | None,
+    project_name: str,
+    version_name: str,
+    prior_revision_number: str,
+) -> dict[str, str] | None:
+    await aiofiles.os.remove(temp_dir / path)
+    await aiofiles.os.link(prior_dir / path, temp_dir / path)
+    if prior_hashes is None:
+        prior_hashes = await attestable.load_paths(project_name, version_name, 
prior_revision_number)
+    # Update n_hashes and n_sizes in place
+    file_path = temp_dir / path
+    if (prior_hashes is not None) and (path in prior_hashes):
+        n_hashes[path] = prior_hashes[path]
+    else:
+        n_hashes[path] = await attestable.compute_file_hash(file_path)
+    stat_result = await aiofiles.os.stat(file_path)
+    n_sizes[path] = stat_result.st_size
+    return prior_hashes
diff --git a/atr/util.py b/atr/util.py
index 9c58a7f..13d9480 100644
--- a/atr/util.py
+++ b/atr/util.py
@@ -59,6 +59,7 @@ import atr.user as user
 T = TypeVar("T")
 
 ARCHIVE_ROOT_SUFFIXES: Final[tuple[str, ...]] = ("-source", "-src")
+DIRECTORY_PERMISSIONS: Final[int] = 0o755
 DEV_TEST_MID: Final[str] = 
"CAH5JyZo8QnWmg9CwRSwWY=givhxw4nilyenjo71fkdk81j5...@mail.gmail.com"
 DEV_THREAD_URLS: Final[dict[str, str]] = {
     "CAH5JyZo8QnWmg9CwRSwWY=givhxw4nilyenjo71fkdk81j5...@mail.gmail.com": 
"https://lists.apache.org/thread/z0o7xnjnyw2o886rxvvq2ql4rdfn754w";,
@@ -205,7 +206,7 @@ async def atomic_write_file(file_path: pathlib.Path, 
content: str, encoding: str
         raise
 
 
-def chmod_directories(path: pathlib.Path, permissions: int = 0o755) -> None:
+def chmod_directories(path: pathlib.Path, permissions: int = 
DIRECTORY_PERMISSIONS) -> None:
     # codeql[py/overly-permissive-file]
     os.chmod(path, permissions)
     for dir_path in path.rglob("*"):
diff --git a/tests/unit/test_merge.py b/tests/unit/test_merge.py
new file mode 100644
index 0000000..48c5d22
--- /dev/null
+++ b/tests/unit/test_merge.py
@@ -0,0 +1,410 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import os
+import pathlib
+from unittest.mock import AsyncMock, patch
+
+import pytest
+
+import atr.merge as merge
+import atr.util as util
+
+
[email protected]
+async def test_case_09_prior_adds_file(tmp_path: pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (prior_dir / "added.txt").write_text("new content")
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    base_hashes: dict[str, str] = {}
+    n_inodes = util.paths_to_inodes(temp_dir)
+    n_hashes: dict[str, str] = {}
+    n_sizes: dict[str, int] = {}
+
+    mock_prior_hashes = {"added.txt": "blake3:abc123"}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value=mock_prior_hashes):
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+
+    assert (temp_dir / "added.txt").exists()
+    assert (temp_dir / "added.txt").read_text() == "new content"
+    assert n_hashes["added.txt"] == "blake3:abc123"
+    assert n_sizes["added.txt"] == len("new content")
+
+
[email protected]
+async def test_case_09_prior_adds_file_in_subdirectory(tmp_path: pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (prior_dir / "apple").mkdir()
+    (prior_dir / "apple" / "banana.txt").write_text("nested")
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    base_hashes: dict[str, str] = {}
+    n_inodes = util.paths_to_inodes(temp_dir)
+    n_hashes: dict[str, str] = {}
+    n_sizes: dict[str, int] = {}
+
+    mock_prior_hashes = {"apple/banana.txt": "blake3:xyz890"}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value=mock_prior_hashes):
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+
+    assert (temp_dir / "apple" / "banana.txt").exists()
+    assert n_hashes["apple/banana.txt"] == "blake3:xyz890"
+
+
[email protected]
+async def test_case_10_prior_deletion_via_hash(tmp_path: pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (base_dir / "removed.txt").write_text("same content")
+    (temp_dir / "removed.txt").write_text("same content")
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    n_inodes = util.paths_to_inodes(temp_dir)
+
+    assert base_inodes["removed.txt"] != n_inodes["removed.txt"]
+
+    base_hashes = {"removed.txt": "blake3:matching"}
+    n_hashes = {"removed.txt": "blake3:matching"}
+    n_sizes = {"removed.txt": len("same content")}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value={}):
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+
+    assert not (temp_dir / "removed.txt").exists()
+    assert "removed.txt" not in n_hashes
+    assert "removed.txt" not in n_sizes
+
+
[email protected]
+async def test_case_10_prior_deletion_via_inode(tmp_path: pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (base_dir / "removed.txt").write_text("to be deleted")
+    os.link(base_dir / "removed.txt", temp_dir / "removed.txt")
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    base_hashes = {"removed.txt": "blake3:aaa"}
+    n_inodes = util.paths_to_inodes(temp_dir)
+    n_hashes = {"removed.txt": "blake3:aaa"}
+    n_sizes = {"removed.txt": len("to be deleted")}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value={}):
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+
+    assert not (temp_dir / "removed.txt").exists()
+    assert "removed.txt" not in n_hashes
+    assert "removed.txt" not in n_sizes
+
+
[email protected]
+async def test_case_11_prior_replacement_via_hash(tmp_path: pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (base_dir / "shared.txt").write_text("original")
+    (temp_dir / "shared.txt").write_text("original")
+    (prior_dir / "shared.txt").write_text("updated by prior")
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    n_inodes = util.paths_to_inodes(temp_dir)
+    prior_inodes = util.paths_to_inodes(prior_dir)
+
+    assert base_inodes["shared.txt"] != n_inodes["shared.txt"]
+    assert base_inodes["shared.txt"] != prior_inodes["shared.txt"]
+    assert n_inodes["shared.txt"] != prior_inodes["shared.txt"]
+
+    base_hashes = {"shared.txt": "blake3:original"}
+    n_hashes = {"shared.txt": "blake3:original"}
+    n_sizes = {"shared.txt": len("original")}
+
+    mock_prior_hashes = {"shared.txt": "blake3:updated"}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value=mock_prior_hashes):
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+
+    assert (temp_dir / "shared.txt").read_text() == "updated by prior"
+    assert n_hashes["shared.txt"] == "blake3:updated"
+    assert n_sizes["shared.txt"] == len("updated by prior")
+
+
[email protected]
+async def test_case_11_prior_replacement_via_inode(tmp_path: pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (base_dir / "shared.txt").write_text("original")
+    os.link(base_dir / "shared.txt", temp_dir / "shared.txt")
+    (prior_dir / "shared.txt").write_text("updated by prior")
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    base_hashes = {"shared.txt": "blake3:original"}
+    n_inodes = util.paths_to_inodes(temp_dir)
+    n_hashes = {"shared.txt": "blake3:original"}
+    n_sizes = {"shared.txt": len("original")}
+
+    mock_prior_hashes = {"shared.txt": "blake3:updated"}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value=mock_prior_hashes):
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+
+    assert (temp_dir / "shared.txt").read_text() == "updated by prior"
+    assert n_hashes["shared.txt"] == "blake3:updated"
+    assert n_sizes["shared.txt"] == len("updated by prior")
+
+
[email protected]
+async def test_case_13_new_wins_when_prior_deletes(tmp_path: pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (base_dir / "modified.txt").write_text("original")
+    (temp_dir / "modified.txt").write_text("new content")
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    base_hashes = {"modified.txt": "blake3:original"}
+    n_inodes = util.paths_to_inodes(temp_dir)
+    n_hashes = {"modified.txt": "blake3:new"}
+    n_sizes = {"modified.txt": len("new content")}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value={}):
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+
+    assert (temp_dir / "modified.txt").exists()
+    assert (temp_dir / "modified.txt").read_text() == "new content"
+    assert n_hashes["modified.txt"] == "blake3:new"
+    assert n_sizes["modified.txt"] == len("new content")
+
+
[email protected]
+async def test_noop_when_base_and_prior_agree(tmp_path: pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (base_dir / "unchanged.txt").write_text("same")
+    os.link(base_dir / "unchanged.txt", prior_dir / "unchanged.txt")
+    (temp_dir / "unchanged.txt").write_text("modified by new")
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    base_hashes = {"unchanged.txt": "blake3:same"}
+    n_inodes = util.paths_to_inodes(temp_dir)
+    n_hashes = {"unchanged.txt": "blake3:modified"}
+    n_sizes = {"unchanged.txt": len("modified by new")}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value={}) as mock_load:
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+        mock_load.assert_not_awaited()
+
+    assert (temp_dir / "unchanged.txt").read_text() == "modified by new"
+    assert n_hashes["unchanged.txt"] == "blake3:modified"
+
+
[email protected]
+async def test_type_conflict_prior_file_vs_new_directory(tmp_path: 
pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (prior_dir / "docs").write_text("a file in prior")
+    (temp_dir / "docs").mkdir()
+    (temp_dir / "docs" / "guide.txt").write_text("a file under a directory in 
new")
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    base_hashes: dict[str, str] = {}
+    n_inodes = util.paths_to_inodes(temp_dir)
+    n_hashes = {"docs/guide.txt": "blake3:guide"}
+    n_sizes = {"docs/guide.txt": len("a file under a directory in new")}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value={"docs": "blake3:docs"}):
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+
+    assert (temp_dir / "docs").is_dir()
+    assert (temp_dir / "docs" / "guide.txt").read_text() == "a file under a 
directory in new"
+    assert "docs" not in n_hashes
+    assert n_hashes["docs/guide.txt"] == "blake3:guide"
+
+
[email protected]
+async def test_type_conflict_prior_file_vs_new_empty_directory(tmp_path: 
pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (prior_dir / "empty").write_text("a file in prior")
+    (temp_dir / "empty").mkdir()
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    base_hashes: dict[str, str] = {}
+    n_inodes = util.paths_to_inodes(temp_dir)
+    n_hashes: dict[str, str] = {}
+    n_sizes: dict[str, int] = {}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value={"empty": "blake3:empty"}):
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+
+    assert (temp_dir / "empty").is_dir()
+    assert "empty" not in n_hashes
+
+
[email protected]
+async def test_type_conflict_prior_subdir_vs_new_file(tmp_path: pathlib.Path):
+    base_dir, prior_dir, temp_dir = _setup_dirs(tmp_path)
+
+    (prior_dir / "docs").mkdir()
+    (prior_dir / "docs" / "guide.txt").write_text("a file under a directory in 
prior")
+    (temp_dir / "docs").write_text("a file in new")
+
+    base_inodes = util.paths_to_inodes(base_dir)
+    base_hashes: dict[str, str] = {}
+    n_inodes = util.paths_to_inodes(temp_dir)
+    n_hashes = {"docs": "blake3:docs"}
+    n_sizes = {"docs": len("a file in new")}
+
+    with patch("atr.attestable.load_paths", new_callable=AsyncMock, 
return_value={"docs/guide.txt": "blake3:guide"}):
+        await merge.merge(
+            base_inodes,
+            base_hashes,
+            prior_dir,
+            "proj",
+            "ver",
+            "00002",
+            temp_dir,
+            n_inodes,
+            n_hashes,
+            n_sizes,
+        )
+
+    assert (temp_dir / "docs").is_file()
+    assert (temp_dir / "docs").read_text() == "a file in new"
+    assert n_hashes["docs"] == "blake3:docs"
+    assert "docs/guide.txt" not in n_hashes
+
+
+def _setup_dirs(tmp_path: pathlib.Path) -> tuple[pathlib.Path, pathlib.Path, 
pathlib.Path]:
+    base_dir = tmp_path / "base"
+    prior_dir = tmp_path / "prior"
+    temp_dir = tmp_path / "new"
+    base_dir.mkdir()
+    prior_dir.mkdir()
+    temp_dir.mkdir()
+    return base_dir, prior_dir, temp_dir


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to