This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch sbp
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git


The following commit(s) were added to refs/heads/sbp by this push:
     new e4ae4625 Rename interfaces and variables related to extracted archives
e4ae4625 is described below

commit e4ae4625192c5b2921aeadbdb2175670661664ff
Author: Sean B. Palmer <[email protected]>
AuthorDate: Thu Mar 12 19:58:41 2026 +0000

    Rename interfaces and variables related to extracted archives
---
 atr/hashes.py                            |  2 +-
 atr/tasks/checks/__init__.py             | 12 +++---
 atr/tasks/checks/compare.py              | 10 ++---
 atr/tasks/checks/license.py              | 34 ++++++++--------
 atr/tasks/checks/rat.py                  | 50 ++++++++++++------------
 atr/tasks/checks/targz.py                | 14 +++----
 atr/tasks/checks/zipformat.py            | 18 ++++-----
 atr/tasks/quarantine.py                  | 66 ++++++++++++++++----------------
 tests/unit/test_archive_root_variants.py | 24 ++++++------
 tests/unit/test_checks_compare.py        | 18 ++++-----
 tests/unit/test_quarantine_backfill.py   |  8 ++--
 tests/unit/test_quarantine_task.py       | 28 +++++++-------
 12 files changed, 141 insertions(+), 143 deletions(-)

diff --git a/atr/hashes.py b/atr/hashes.py
index 5b6cac71..8f3ef46e 100644
--- a/atr/hashes.py
+++ b/atr/hashes.py
@@ -74,5 +74,5 @@ async def file_sha3(path: str) -> str:
     return sha3.hexdigest()
 
 
-def filesystem_cache_archives_key(content_hash: str) -> str:
+def filesystem_archives_key(content_hash: str) -> str:
     return content_hash.replace(":", "_")
diff --git a/atr/tasks/checks/__init__.py b/atr/tasks/checks/__init__.py
index 26912a0d..842eca10 100644
--- a/atr/tasks/checks/__init__.py
+++ b/atr/tasks/checks/__init__.py
@@ -312,8 +312,8 @@ def function_key(func: Callable[..., Any] | str) -> str:
     return func.__module__ + "." + func.__name__ if callable(func) else func
 
 
-async def resolve_cache_dir(args: FunctionArguments) -> pathlib.Path | None:
-    """Resolve the quarantine extraction cache directory for the primary 
archive."""
+async def resolve_archive_dir(args: FunctionArguments) -> pathlib.Path | None:
+    """Resolve the extracted archive directory for the primary archive."""
     if args.primary_rel_path is None:
         return None
     paths_data = await attestable.load_paths(args.project_name, 
args.version_name, args.revision_number)
@@ -322,10 +322,10 @@ async def resolve_cache_dir(args: FunctionArguments) -> 
pathlib.Path | None:
     content_hash = paths_data.get(args.primary_rel_path)
     if content_hash is None:
         return None
-    cache_key = hashes.filesystem_cache_archives_key(content_hash)
-    cache_dir = file_paths.get_archives_dir() / str(args.project_name) / 
str(args.version_name) / cache_key
-    if await aiofiles.os.path.isdir(cache_dir):
-        return cache_dir
+    archive_key = hashes.filesystem_archives_key(content_hash)
+    archive_dir = file_paths.get_archives_dir() / str(args.project_name) / 
str(args.version_name) / archive_key
+    if await aiofiles.os.path.isdir(archive_dir):
+        return archive_dir
     return None
 
 
diff --git a/atr/tasks/checks/compare.py b/atr/tasks/checks/compare.py
index e131d190..dec520a3 100644
--- a/atr/tasks/checks/compare.py
+++ b/atr/tasks/checks/compare.py
@@ -101,8 +101,8 @@ async def source_trees(args: checks.FunctionArguments) -> 
results.Results | None
     if payload is not None:
         if not (primary_abs_path := await recorder.abs_path()):
             return None
-        cache_dir = await checks.resolve_cache_dir(args)
-        if cache_dir is None:
+        extracted_dir = await checks.resolve_archive_dir(args)
+        if extracted_dir is None:
             await recorder.failure(
                 "Extracted archive tree is not available",
                 {"rel_path": args.primary_rel_path},
@@ -120,11 +120,11 @@ async def source_trees(args: checks.FunctionArguments) -> 
results.Results | None
                     {"repo_url": 
f"https://github.com/{payload.repository}.git";, "sha": payload.sha},
                 )
                 return None
-            archive_root_result = await _find_archive_root(primary_abs_path, 
cache_dir)
+            archive_root_result = await _find_archive_root(primary_abs_path, 
extracted_dir)
             if archive_root_result.root is None:
                 await recorder.failure(
                     "Could not determine archive root directory for 
comparison",
-                    {"archive_path": str(primary_abs_path), "extract_dir": 
str(cache_dir)},
+                    {"archive_path": str(primary_abs_path), "extract_dir": 
str(extracted_dir)},
                 )
                 return None
             if archive_root_result.extra_entries:
@@ -137,7 +137,7 @@ async def source_trees(args: checks.FunctionArguments) -> 
results.Results | None
                     },
                 )
                 return None
-            archive_content_dir = cache_dir / archive_root_result.root
+            archive_content_dir = extracted_dir / archive_root_result.root
             archive_dir = str(archive_content_dir)
             try:
                 comparison = await _compare_trees(github_dir, 
archive_content_dir)
diff --git a/atr/tasks/checks/license.py b/atr/tasks/checks/license.py
index ff28087e..722e160f 100644
--- a/atr/tasks/checks/license.py
+++ b/atr/tasks/checks/license.py
@@ -143,8 +143,8 @@ async def files(args: checks.FunctionArguments) -> 
results.Results | None:
 
     is_podling = args.extra_args.get("is_podling", False)
 
-    cache_dir = await checks.resolve_cache_dir(args)
-    if cache_dir is None:
+    archive_dir = await checks.resolve_archive_dir(args)
+    if archive_dir is None:
         await recorder.failure(
             "Extracted archive tree is not available",
             {"rel_path": args.primary_rel_path},
@@ -154,7 +154,7 @@ async def files(args: checks.FunctionArguments) -> 
results.Results | None:
     log.info(f"Checking license files for {artifact_abs_path} (rel: 
{args.primary_rel_path})")
 
     try:
-        for result in await asyncio.to_thread(_files_check_core_logic, 
cache_dir, is_podling):
+        for result in await asyncio.to_thread(_files_check_core_logic, 
archive_dir, is_podling):
             match result:
                 case ArtifactResult():
                     await _record_artifact(recorder, result)
@@ -186,8 +186,8 @@ async def headers(args: checks.FunctionArguments) -> 
results.Results | None:
     #     log.info(f"Using cached license headers result for 
{artifact_abs_path} (rel: {args.primary_rel_path})")
     #     return None
 
-    cache_dir = await checks.resolve_cache_dir(args)
-    if cache_dir is None:
+    archive_dir = await checks.resolve_archive_dir(args)
+    if archive_dir is None:
         await recorder.failure(
             "Extracted archive tree is not available",
             {"rel_path": args.primary_rel_path},
@@ -208,7 +208,7 @@ async def headers(args: checks.FunctionArguments) -> 
results.Results | None:
         excludes_source = "none"
 
     artifact_basename = os.path.basename(str(artifact_abs_path))
-    return await _headers_core(recorder, cache_dir, artifact_basename, 
ignore_lines, excludes_source)
+    return await _headers_core(recorder, archive_dir, artifact_basename, 
ignore_lines, excludes_source)
 
 
 def headers_validate(content: bytes, _filename: str) -> tuple[bool, str | 
None]:
@@ -239,13 +239,13 @@ def headers_validate(content: bytes, _filename: str) -> 
tuple[bool, str | None]:
     return False, "Could not find Apache License header"
 
 
-def _files_check_core_logic(cache_dir: pathlib.Path, is_podling: bool) -> 
Iterator[Result]:
+def _files_check_core_logic(archive_dir: pathlib.Path, is_podling: bool) -> 
Iterator[Result]:
     """Verify that LICENSE and NOTICE files exist and are placed and formatted 
correctly."""
     license_results: dict[str, str | None] = {}
     notice_results: dict[str, tuple[bool, list[str], str]] = {}
     disclaimer_found = False
 
-    if not cache_dir.is_dir():
+    if not archive_dir.is_dir():
         # Already protected by the caller
         # We add it here again to make unit testing cleaner
         yield ArtifactResult(
@@ -256,8 +256,8 @@ def _files_check_core_logic(cache_dir: pathlib.Path, 
is_podling: bool) -> Iterat
         return
 
     # Check for license files in the root directory
-    top_entries = sorted(e for e in os.listdir(cache_dir) if not 
e.startswith("._"))
-    root_dirs = [e for e in top_entries if (cache_dir / e).is_dir()]
+    top_entries = sorted(e for e in os.listdir(archive_dir) if not 
e.startswith("._"))
+    root_dirs = [e for e in top_entries if (archive_dir / e).is_dir()]
     if len(root_dirs) != 1:
         yield ArtifactResult(
             status=sql.CheckResultStatus.FAILURE,
@@ -265,7 +265,7 @@ def _files_check_core_logic(cache_dir: pathlib.Path, 
is_podling: bool) -> Iterat
             data=None,
         )
         return
-    root_path = cache_dir / root_dirs[0]
+    root_path = archive_dir / root_dirs[0]
 
     for entry in sorted(os.listdir(root_path)):
         if entry.startswith("._"):
@@ -368,7 +368,7 @@ def _get_file_extension(filename: str) -> str | None:
 
 
 def _headers_check_core_logic(  # noqa: C901
-    cache_dir: pathlib.Path, artifact_basename: str, ignore_lines: list[str], 
excludes_source: str
+    archive_dir: pathlib.Path, artifact_basename: str, ignore_lines: 
list[str], excludes_source: str
 ) -> Iterator[Result]:
     """Verify Apache License headers in source files within an extracted cache 
tree."""
     # We could modify @Lucas-C/pre-commit-hooks instead for this
@@ -386,7 +386,7 @@ def _headers_check_core_logic(  # noqa: C901
     #         data=None,
     #     )
 
-    if not cache_dir.is_dir():
+    if not archive_dir.is_dir():
         yield ArtifactResult(
             status=sql.CheckResultStatus.FAILURE,
             message="Cache directory is not available",
@@ -396,7 +396,7 @@ def _headers_check_core_logic(  # noqa: C901
 
     # log.info(f"Ignore lines: {ignore_lines}")
 
-    for dirpath, dirnames, filenames in os.walk(cache_dir):
+    for dirpath, dirnames, filenames in os.walk(archive_dir):
         dirnames.sort()
         for filename in sorted(filenames):
             if filename.startswith("._"):
@@ -404,7 +404,7 @@ def _headers_check_core_logic(  # noqa: C901
                 continue
 
             file_path = pathlib.Path(dirpath) / filename
-            rel_path = str(file_path.relative_to(cache_dir))
+            rel_path = str(file_path.relative_to(archive_dir))
 
             ignore_path = "/" + artifact_basename + "/" + rel_path
             matcher = util.create_path_matcher(ignore_lines, 
pathlib.Path(ignore_path), pathlib.Path("/"))
@@ -508,14 +508,14 @@ def _headers_check_core_logic_should_check(filepath: str) 
-> bool:
 
 async def _headers_core(
     recorder: checks.Recorder,
-    cache_dir: pathlib.Path,
+    archive_dir: pathlib.Path,
     artifact_basename: str,
     ignore_lines: list[str],
     excludes_source: str,
 ) -> None:
     try:
         for result in await asyncio.to_thread(
-            _headers_check_core_logic, cache_dir, artifact_basename, 
ignore_lines, excludes_source
+            _headers_check_core_logic, archive_dir, artifact_basename, 
ignore_lines, excludes_source
         ):
             match result:
                 case ArtifactResult():
diff --git a/atr/tasks/checks/rat.py b/atr/tasks/checks/rat.py
index 16df15b3..acb1d474 100644
--- a/atr/tasks/checks/rat.py
+++ b/atr/tasks/checks/rat.py
@@ -89,8 +89,8 @@ async def check(args: checks.FunctionArguments) -> 
results.Results | None:
         log.info(f"Skipping RAT check for {artifact_abs_path} (mode is 
LIGHTWEIGHT)")
         return None
 
-    cache_dir = await checks.resolve_cache_dir(args)
-    if cache_dir is None:
+    archive_dir = await checks.resolve_archive_dir(args)
+    if archive_dir is None:
         await recorder.failure(
             "Extracted archive tree is not available",
             {"rel_path": args.primary_rel_path},
@@ -103,7 +103,7 @@ async def check(args: checks.FunctionArguments) -> 
results.Results | None:
     policy_excludes = project.policy_source_excludes_rat if is_source else []
 
     try:
-        await _check_core(args, recorder, cache_dir, policy_excludes)
+        await _check_core(args, recorder, archive_dir, policy_excludes)
     except Exception as e:
         # TODO: Or bubble for task failure?
         await recorder.failure("Error running Apache RAT check", {"error": 
str(e)})
@@ -161,12 +161,12 @@ def _build_rat_command(
 async def _check_core(
     args: checks.FunctionArguments,
     recorder: checks.Recorder,
-    cache_dir: pathlib.Path,
+    archive_dir: pathlib.Path,
     policy_excludes: list[str],
 ) -> None:
     result = await asyncio.to_thread(
         _synchronous,
-        cache_dir=str(cache_dir),
+        archive_dir=str(archive_dir),
         policy_excludes=policy_excludes,
         rat_jar_path=args.extra_args.get("rat_jar_path", 
_CONFIG.APACHE_RAT_JAR_PATH),
     )
@@ -347,12 +347,12 @@ def _summary_message(valid: bool, unapproved_licenses: 
int, unknown_licenses: in
 
 
 def _synchronous(
-    cache_dir: str,
+    archive_dir: str,
     policy_excludes: list[str],
     rat_jar_path: str = _CONFIG.APACHE_RAT_JAR_PATH,
 ) -> checkdata.Rat:
     """Verify license headers using Apache RAT."""
-    log.info(f"Verifying licenses with Apache RAT for {cache_dir}")
+    log.info(f"Verifying licenses with Apache RAT for {archive_dir}")
     log.info(f"PATH environment variable: {os.environ.get('PATH', 'PATH not 
found')}")
 
     java_check = _synchronous_check_java_installed()
@@ -367,7 +367,7 @@ def _synchronous(
     try:
         with tempfile.TemporaryDirectory(prefix="rat_scratch_") as scratch_dir:
             log.info(f"Created scratch directory: {scratch_dir}")
-            return _synchronous_core(cache_dir, scratch_dir, policy_excludes, 
rat_jar_path)
+            return _synchronous_core(archive_dir, scratch_dir, 
policy_excludes, rat_jar_path)
     except Exception as e:
         import traceback
 
@@ -459,16 +459,16 @@ def _synchronous_check_java_installed() -> checkdata.Rat 
| None:
 
 
 def _synchronous_core(  # noqa: C901
-    cache_dir: str,
+    archive_dir: str,
     scratch_dir: str,
     policy_excludes: list[str],
     rat_jar_path: str,
 ) -> checkdata.Rat:
     exclude_file_paths: list[str] = []
-    for dirpath, _dirnames, filenames in os.walk(cache_dir):
+    for dirpath, _dirnames, filenames in os.walk(archive_dir):
         for filename in filenames:
             if filename == _RAT_EXCLUDES_FILENAME:
-                
exclude_file_paths.append(os.path.relpath(os.path.join(dirpath, filename), 
cache_dir))
+                
exclude_file_paths.append(os.path.relpath(os.path.join(dirpath, filename), 
archive_dir))
     log.info(f"Found {len(exclude_file_paths)} {_RAT_EXCLUDES_FILENAME} 
file(s): {exclude_file_paths}")
 
     # Validate that we found at most one exclusion file
@@ -483,11 +483,11 @@ def _synchronous_core(  # noqa: C901
     archive_excludes_path: str | None = exclude_file_paths[0] if 
exclude_file_paths else None
 
     excludes_source, effective_excludes_path = 
_synchronous_core_excludes_source(
-        archive_excludes_path, policy_excludes, cache_dir, scratch_dir
+        archive_excludes_path, policy_excludes, archive_dir, scratch_dir
     )
 
     try:
-        scan_root = _synchronous_core_scan_root(archive_excludes_path, 
cache_dir)
+        scan_root = _synchronous_core_scan_root(archive_excludes_path, 
archive_dir)
     except RatError as e:
         return checkdata.Rat(
             message=f"Failed to determine scan root: {e}",
@@ -522,7 +522,7 @@ def _synchronous_core(  # noqa: C901
 
     # The unknown_license_files and unapproved_files contain FileEntry objects
     # The path is relative to scan_root, so we prepend the scan_root relative 
path
-    scan_root_rel = os.path.relpath(scan_root, cache_dir)
+    scan_root_rel = os.path.relpath(scan_root, archive_dir)
     result.directory = scan_root_rel
     if scan_root_rel != ".":
         for file in result.unknown_license_files:
@@ -537,7 +537,7 @@ def _synchronous_core(  # noqa: C901
 
 
 def _synchronous_core_excludes_source(
-    archive_excludes_path: str | None, policy_excludes: list[str], cache_dir: 
str, scratch_dir: str
+    archive_excludes_path: str | None, policy_excludes: list[str], 
archive_dir: str, scratch_dir: str
 ) -> tuple[str, str | None]:
     # Determine excludes_source and effective excludes file
     excludes_source: str
@@ -545,7 +545,7 @@ def _synchronous_core_excludes_source(
 
     if archive_excludes_path is not None:
         excludes_source = "archive"
-        effective_excludes_path = os.path.join(cache_dir, 
archive_excludes_path)
+        effective_excludes_path = os.path.join(archive_dir, 
archive_excludes_path)
         log.info(f"Using archive {_RAT_EXCLUDES_FILENAME}: 
{archive_excludes_path}")
     elif policy_excludes:
         excludes_source = "policy"
@@ -642,27 +642,27 @@ def _synchronous_core_parse_output_core(xml_file: str, 
base_dir: str) -> checkda
     )
 
 
-def _synchronous_core_scan_root(archive_excludes_path: str | None, cache_dir: 
str) -> str:
+def _synchronous_core_scan_root(archive_excludes_path: str | None, 
archive_dir: str) -> str:
     # Determine scan root based on archive .rat-excludes location
     if archive_excludes_path is not None:
-        scan_root = os.path.dirname(os.path.join(cache_dir, 
archive_excludes_path))
+        scan_root = os.path.dirname(os.path.join(archive_dir, 
archive_excludes_path))
 
-        # Verify that scan_root is inside cache_dir
+        # Verify that scan_root is inside archive_dir
         abs_scan_root = os.path.abspath(scan_root)
-        abs_cache_dir = os.path.abspath(cache_dir)
-        scan_root_is_inside = (abs_scan_root == abs_cache_dir) or 
abs_scan_root.startswith(abs_cache_dir + os.sep)
+        abs_archive_dir = os.path.abspath(archive_dir)
+        scan_root_is_inside = (abs_scan_root == abs_archive_dir) or 
abs_scan_root.startswith(abs_archive_dir + os.sep)
         if not scan_root_is_inside:
-            log.error(f"Scan root {scan_root} is outside cache_dir 
{cache_dir}")
+            log.error(f"Scan root {scan_root} is outside archive_dir 
{archive_dir}")
             raise RatError("Invalid archive structure: exclusion file path 
escapes extraction directory")
 
         log.info(f"Using {_RAT_EXCLUDES_FILENAME} directory as scan root: 
{scan_root}")
 
-        untracked_count = _count_files_outside_directory(cache_dir, scan_root)
+        untracked_count = _count_files_outside_directory(archive_dir, 
scan_root)
         if untracked_count > 0:
             log.error(f"Found {untracked_count} file(s) outside 
{_RAT_EXCLUDES_FILENAME} directory")
             raise RatError(f"Files exist outside {_RAT_EXCLUDES_FILENAME} 
directory ({untracked_count} found)")
     else:
-        scan_root = cache_dir
-        log.info(f"No archive {_RAT_EXCLUDES_FILENAME} found, using cache_dir 
as scan root: {scan_root}")
+        scan_root = archive_dir
+        log.info(f"No archive {_RAT_EXCLUDES_FILENAME} found, using 
archive_dir as scan root: {scan_root}")
 
     return scan_root
diff --git a/atr/tasks/checks/targz.py b/atr/tasks/checks/targz.py
index c4d76bc4..dee2988d 100644
--- a/atr/tasks/checks/targz.py
+++ b/atr/tasks/checks/targz.py
@@ -39,10 +39,10 @@ class RootDirectoryError(Exception):
     ...
 
 
-def root_directory(cache_dir: pathlib.Path) -> tuple[str, bytes | None]:
+def root_directory(archive_dir: pathlib.Path) -> tuple[str, bytes | None]:
     """Find root directory and read package/package.json from the extracted 
tree."""
     # The ._ prefix is a metadata convention
-    entries = sorted(e for e in os.listdir(cache_dir) if not 
e.startswith("._"))
+    entries = sorted(e for e in os.listdir(archive_dir) if not 
e.startswith("._"))
 
     if not entries:
         raise RootDirectoryError("No root directory found in archive")
@@ -50,7 +50,7 @@ def root_directory(cache_dir: pathlib.Path) -> tuple[str, 
bytes | None]:
         raise RootDirectoryError(f"Multiple root directories found: 
{entries[0]}, {entries[1]}")
 
     root = entries[0]
-    root_path = cache_dir / root
+    root_path = archive_dir / root
     try:
         root_stat = root_path.lstat()
     except OSError as e:
@@ -61,7 +61,7 @@ def root_directory(cache_dir: pathlib.Path) -> tuple[str, 
bytes | None]:
     package_json: bytes | None = None
 
     if root == "package":
-        package_json_path = cache_dir / "package" / "package.json"
+        package_json_path = archive_dir / "package" / "package.json"
         with contextlib.suppress(FileNotFoundError, OSError):
             package_json_stat = package_json_path.lstat()
             # We do this to avoid allowing package.json to be a symlink
@@ -81,8 +81,8 @@ async def structure(args: checks.FunctionArguments) -> 
results.Results | None:
     if await recorder.primary_path_is_binary():
         return None
 
-    cache_dir = await checks.resolve_cache_dir(args)
-    if cache_dir is None:
+    archive_dir = await checks.resolve_archive_dir(args)
+    if archive_dir is None:
         await recorder.failure(
             "Extracted archive tree is not available",
             {"rel_path": args.primary_rel_path},
@@ -101,7 +101,7 @@ async def structure(args: checks.FunctionArguments) -> 
results.Results | None:
     )
 
     try:
-        root, package_json = await asyncio.to_thread(root_directory, cache_dir)
+        root, package_json = await asyncio.to_thread(root_directory, 
archive_dir)
         data: dict[str, object] = {
             "root": root,
             "basename_from_filename": basename_from_filename,
diff --git a/atr/tasks/checks/zipformat.py b/atr/tasks/checks/zipformat.py
index d991d98c..defe2dbc 100644
--- a/atr/tasks/checks/zipformat.py
+++ b/atr/tasks/checks/zipformat.py
@@ -44,8 +44,8 @@ async def structure(args: checks.FunctionArguments) -> 
results.Results | None:
     if await recorder.primary_path_is_binary():
         return None
 
-    cache_dir = await checks.resolve_cache_dir(args)
-    if cache_dir is None:
+    archive_dir = await checks.resolve_archive_dir(args)
+    if archive_dir is None:
         await recorder.failure(
             "Extracted archive tree is not available",
             {"rel_path": args.primary_rel_path},
@@ -55,7 +55,7 @@ async def structure(args: checks.FunctionArguments) -> 
results.Results | None:
     log.info(f"Checking zip structure for {artifact_abs_path} (rel: 
{args.primary_rel_path})")
 
     try:
-        result_data = await asyncio.to_thread(_structure_check_core_logic, 
cache_dir, str(artifact_abs_path))
+        result_data = await asyncio.to_thread(_structure_check_core_logic, 
archive_dir, str(artifact_abs_path))
 
         if result_data.get("error"):
             await recorder.failure(result_data["error"], result_data)
@@ -67,10 +67,10 @@ async def structure(args: checks.FunctionArguments) -> 
results.Results | None:
     return None
 
 
-def _structure_check_core_logic(cache_dir: pathlib.Path, artifact_path: str) 
-> dict[str, Any]:
+def _structure_check_core_logic(archive_dir: pathlib.Path, artifact_path: str) 
-> dict[str, Any]:
     """Verify the internal structure of the zip archive."""
     # The ._ prefix is a metadata convention
-    entries = sorted(e for e in os.listdir(cache_dir) if not 
e.startswith("._"))
+    entries = sorted(e for e in os.listdir(archive_dir) if not 
e.startswith("._"))
     if not entries:
         return {"error": "Archive is empty"}
 
@@ -81,7 +81,7 @@ def _structure_check_core_logic(cache_dir: pathlib.Path, 
artifact_path: str) ->
     root_dirs: list[str] = []
     non_rooted_entries: list[str] = []
     for entry in entries:
-        entry_path = cache_dir / entry
+        entry_path = archive_dir / entry
         try:
             entry_stat = entry_path.lstat()
         except OSError as e:
@@ -105,7 +105,7 @@ def _structure_check_core_logic(cache_dir: pathlib.Path, 
artifact_path: str) ->
         return {"root_dir": actual_root, "expected_roots": expected_roots}
 
     if (actual_root == "package") and (
-        npm_result := _structure_npm_result(cache_dir, basename_from_filename, 
actual_root, expected_roots)
+        npm_result := _structure_npm_result(archive_dir, 
basename_from_filename, actual_root, expected_roots)
     ):
         return npm_result
 
@@ -117,9 +117,9 @@ def _structure_check_core_logic(cache_dir: pathlib.Path, 
artifact_path: str) ->
 
 
 def _structure_npm_result(
-    cache_dir: pathlib.Path, basename_from_filename: str, actual_root: str, 
expected_roots: list[str]
+    archive_dir: pathlib.Path, basename_from_filename: str, actual_root: str, 
expected_roots: list[str]
 ) -> dict[str, Any] | None:
-    package_json_path = cache_dir / "package" / "package.json"
+    package_json_path = archive_dir / "package" / "package.json"
     try:
         package_json_stat = package_json_path.lstat()
     except (FileNotFoundError, OSError):
diff --git a/atr/tasks/quarantine.py b/atr/tasks/quarantine.py
index cebf48a9..643e1af8 100644
--- a/atr/tasks/quarantine.py
+++ b/atr/tasks/quarantine.py
@@ -68,11 +68,11 @@ def backfill_archive_cache() -> list[tuple[str, 
pathlib.Path, float]]:
         done_file.touch()
         return []
 
-    cache_archives_dir = paths.get_archives_dir()
+    archives_dir = paths.get_archives_dir()
     staging_base = paths.get_tmp_dir()
     staging_base.mkdir(parents=True, exist_ok=True)
     extraction_cfg = _extraction_config()
-    seen_cache_keys: set[str] = set()
+    seen_archive_keys: set[str] = set()
     results_list: list[tuple[str, pathlib.Path, float]] = []
 
     for project_dir in sorted(unfinished_dir.iterdir()):
@@ -88,10 +88,10 @@ def backfill_archive_cache() -> list[tuple[str, 
pathlib.Path, float]]:
                     revision_dir,
                     project_dir.name,
                     version_dir.name,
-                    cache_archives_dir,
+                    archives_dir,
                     staging_base,
                     extraction_cfg,
-                    seen_cache_keys,
+                    seen_archive_keys,
                     results_list,
                 )
 
@@ -130,11 +130,9 @@ async def validate(args: QuarantineValidate) -> 
results.Results | None:
         return None
 
     try:
-        await _extract_archives_to_cache(
-            args.archives, quarantine_dir, str(project_name), 
str(version_name), file_entries
-        )
+        await _extract_archives(args.archives, quarantine_dir, 
str(project_name), str(version_name), file_entries)
     except Exception as exc:
-        await _mark_failed(quarantined, file_entries, f"Archive extraction to 
cache failed: {exc}")
+        await _mark_failed(quarantined, file_entries, f"Archive extraction 
failed: {exc}")
         await aioshutil.rmtree(quarantine_dir)
         return None
 
@@ -148,14 +146,14 @@ def _backfill_done_file() -> pathlib.Path:
 
 def _backfill_extract_archive(
     archive_path: pathlib.Path,
-    cache_dir: pathlib.Path,
+    archive_dir: pathlib.Path,
     staging_base: pathlib.Path,
     extraction_cfg: exarch.SecurityConfig,
     results_list: list[tuple[str, pathlib.Path, float]],
 ) -> None:
     try:
-        elapsed = _extract_archive_to_cache_dir(archive_path, cache_dir, 
staging_base, extraction_cfg)
-        results_list.append((str(archive_path), cache_dir, elapsed))
+        elapsed = _extract_archive_to_dir(archive_path, archive_dir, 
staging_base, extraction_cfg)
+        results_list.append((str(archive_path), archive_dir, elapsed))
     except Exception as exc:
         log.warning(f"Backfill: failed to extract {archive_path}: {exc}")
 
@@ -164,33 +162,33 @@ def _backfill_revision(
     revision_dir: pathlib.Path,
     project_name: str,
     version_name: str,
-    cache_archives_dir: pathlib.Path,
+    archives_dir: pathlib.Path,
     staging_base: pathlib.Path,
     extraction_cfg: exarch.SecurityConfig,
-    seen_cache_keys: set[str],
+    seen_archive_keys: set[str],
     results_list: list[tuple[str, pathlib.Path, float]],
 ) -> None:
-    cache_base = cache_archives_dir / project_name / version_name
+    archives_base = archives_dir / project_name / version_name
     for archive_path in sorted(revision_dir.rglob("*")):
         if not archive_path.is_file():
             continue
         if not _is_archive_suffix(archive_path.name):
             continue
         content_hash = hashes.compute_file_hash_sync(archive_path)
-        cache_key = hashes.filesystem_cache_archives_key(content_hash)
-        dedupe_key = f"{project_name}/{version_name}/{cache_key}"
-        if dedupe_key in seen_cache_keys:
+        archive_key = hashes.filesystem_archives_key(content_hash)
+        dedupe_key = f"{project_name}/{version_name}/{archive_key}"
+        if dedupe_key in seen_archive_keys:
             continue
-        seen_cache_keys.add(dedupe_key)
-        cache_dir = cache_base / cache_key
-        if cache_dir.is_dir():
+        seen_archive_keys.add(dedupe_key)
+        archive_dir = archives_base / archive_key
+        if archive_dir.is_dir():
             continue
-        _backfill_extract_archive(archive_path, cache_dir, staging_base, 
extraction_cfg, results_list)
+        _backfill_extract_archive(archive_path, archive_dir, staging_base, 
extraction_cfg, results_list)
 
 
-def _extract_archive_to_cache_dir(
+def _extract_archive_to_dir(
     archive_path: pathlib.Path,
-    cache_dir: pathlib.Path,
+    archive_dir: pathlib.Path,
     staging_base: pathlib.Path,
     extraction_cfg: exarch.SecurityConfig,
 ) -> float:
@@ -199,50 +197,50 @@ def _extract_archive_to_cache_dir(
         staging_dir.mkdir(parents=False, exist_ok=False)
         start = time.monotonic()
         exarch.extract_archive(str(archive_path), str(staging_dir), 
extraction_cfg)
-        cache_dir.parent.mkdir(parents=True, exist_ok=True)
+        archive_dir.parent.mkdir(parents=True, exist_ok=True)
         try:
-            os.rename(staging_dir, cache_dir)
+            os.rename(staging_dir, archive_dir)
         except OSError as err:
             if isinstance(err, FileExistsError) or err.errno in {errno.EEXIST, 
errno.ENOTEMPTY}:
                 shutil.rmtree(staging_dir, ignore_errors=True)
             else:
                 raise
-        _set_archive_permissions(cache_dir)
+        _set_archive_permissions(archive_dir)
         return time.monotonic() - start
     except Exception:
         shutil.rmtree(staging_dir, ignore_errors=True)
         raise
 
 
-async def _extract_archives_to_cache(
+async def _extract_archives(
     archives: list[QuarantineArchiveEntry],
     quarantine_dir: pathlib.Path,
     project_name: str,
     version_name: str,
     file_entries: list[sql.QuarantineFileEntryV1],
 ) -> None:
-    cache_base = paths.get_archives_dir() / project_name / version_name
+    archives_base = paths.get_archives_dir() / project_name / version_name
     staging_base = paths.get_tmp_dir()
-    await aiofiles.os.makedirs(cache_base, exist_ok=True)
+    await aiofiles.os.makedirs(archives_base, exist_ok=True)
     await aiofiles.os.makedirs(staging_base, exist_ok=True)
 
     extraction_config = _extraction_config()
 
     for archive in archives:
-        cache_dir = cache_base / 
hashes.filesystem_cache_archives_key(archive.content_hash)
-        if await aiofiles.os.path.isdir(cache_dir):
+        archive_dir = archives_base / 
hashes.filesystem_archives_key(archive.content_hash)
+        if await aiofiles.os.path.isdir(archive_dir):
             continue
         archive_path = quarantine_dir / archive.rel_path
         try:
             await asyncio.to_thread(
-                _extract_archive_to_cache_dir,
+                _extract_archive_to_dir,
                 archive_path,
-                cache_dir,
+                archive_dir,
                 staging_base,
                 extraction_config,
             )
         except Exception as exc:
-            log.exception(f"Failed to extract archive {archive.rel_path} to 
cache")
+            log.exception(f"Failed to extract archive {archive.rel_path}")
             for entry in file_entries:
                 if entry.rel_path == archive.rel_path:
                     entry.errors.append(f"Extraction failed: {exc}")
diff --git a/tests/unit/test_archive_root_variants.py 
b/tests/unit/test_archive_root_variants.py
index 07457a3d..cea22295 100644
--- a/tests/unit/test_archive_root_variants.py
+++ b/tests/unit/test_archive_root_variants.py
@@ -39,7 +39,7 @@ async def 
test_targz_structure_accepts_npm_pack_root(tmp_path: pathlib.Path) ->
     )
     recorder, args = await _targz_structure_args(tmp_path, "example-1.2.3.tgz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.SUCCESS.value for status, _, _ 
in recorder.messages)
@@ -51,7 +51,7 @@ async def 
test_targz_structure_accepts_source_suffix_variant(tmp_path: pathlib.P
     cache_dir = _make_cache_tree(tmp_path, ["apache-example-1.2.3/README.txt"])
     recorder, args = await _targz_structure_args(tmp_path, 
"apache-example-1.2.3-source.tar.gz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.SUCCESS.value for status, _, _ 
in recorder.messages)
@@ -62,7 +62,7 @@ async def 
test_targz_structure_accepts_src_suffix_variant(tmp_path: pathlib.Path
     cache_dir = _make_cache_tree(tmp_path, ["apache-example-1.2.3/README.txt"])
     recorder, args = await _targz_structure_args(tmp_path, 
"apache-example-1.2.3-src.tar.gz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.SUCCESS.value for status, _, _ 
in recorder.messages)
@@ -72,7 +72,7 @@ async def 
test_targz_structure_accepts_src_suffix_variant(tmp_path: pathlib.Path
 async def test_targz_structure_fails_when_cache_unavailable(tmp_path: 
pathlib.Path) -> None:
     recorder, args = await _targz_structure_args(tmp_path, 
"apache-example-1.2.3.tar.gz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=None)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=None)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.FAILURE.value for status, _, _ 
in recorder.messages)
@@ -90,7 +90,7 @@ async def 
test_targz_structure_rejects_npm_pack_filename_mismatch(tmp_path: path
     )
     recorder, args = await _targz_structure_args(tmp_path, "example-1.2.3.tgz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.FAILURE.value for status, _, _ 
in recorder.messages)
@@ -107,7 +107,7 @@ async def 
test_targz_structure_rejects_package_root_without_package_json(tmp_pat
     )
     recorder, args = await _targz_structure_args(tmp_path, "example-1.2.3.tgz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.FAILURE.value for status, _, _ 
in recorder.messages)
@@ -118,7 +118,7 @@ async def 
test_targz_structure_rejects_source_root_when_filename_has_no_suffix(t
     cache_dir = _make_cache_tree(tmp_path, 
["apache-example-1.2.3-source/README.txt"])
     recorder, args = await _targz_structure_args(tmp_path, 
"apache-example-1.2.3.tar.gz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.FAILURE.value for status, _, _ 
in recorder.messages)
@@ -129,7 +129,7 @@ async def 
test_targz_structure_rejects_source_root_when_filename_has_src_suffix(
     cache_dir = _make_cache_tree(tmp_path, 
["apache-example-1.2.3-source/README.txt"])
     recorder, args = await _targz_structure_args(tmp_path, 
"apache-example-1.2.3-src.tar.gz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.FAILURE.value for status, _, _ 
in recorder.messages)
@@ -140,7 +140,7 @@ async def 
test_targz_structure_rejects_src_root_when_filename_has_no_suffix(tmp_
     cache_dir = _make_cache_tree(tmp_path, 
["apache-example-1.2.3-src/README.txt"])
     recorder, args = await _targz_structure_args(tmp_path, 
"apache-example-1.2.3.tar.gz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.FAILURE.value for status, _, _ 
in recorder.messages)
@@ -151,7 +151,7 @@ async def 
test_targz_structure_rejects_src_root_when_filename_has_source_suffix(
     cache_dir = _make_cache_tree(tmp_path, 
["apache-example-1.2.3-src/README.txt"])
     recorder, args = await _targz_structure_args(tmp_path, 
"apache-example-1.2.3-source.tar.gz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.FAILURE.value for status, _, _ 
in recorder.messages)
@@ -164,7 +164,7 @@ async def 
test_targz_structure_rejects_symlink_root(tmp_path: pathlib.Path) -> N
     (cache_dir / "apache-example-1.2.3").symlink_to(cache_dir / 
"missing-target")
     recorder, args = await _targz_structure_args(tmp_path, 
"apache-example-1.2.3.tar.gz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.FAILURE.value for status, _, _ 
in recorder.messages)
@@ -182,7 +182,7 @@ async def 
test_targz_structure_rejects_symlinked_package_json(tmp_path: pathlib.
     (cache_dir / "package" / "package.json").symlink_to(cache_dir / 
"metadata.json")
     recorder, args = await _targz_structure_args(tmp_path, "example-1.2.3.tgz")
 
-    with mock.patch.object(checks, "resolve_cache_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
+    with mock.patch.object(checks, "resolve_archive_dir", 
new=mock.AsyncMock(return_value=cache_dir)):
         await targz.structure(args)
 
     assert any(status == sql.CheckResultStatus.FAILURE.value for status, _, _ 
in recorder.messages)
diff --git a/tests/unit/test_checks_compare.py 
b/tests/unit/test_checks_compare.py
index b608241d..52159c16 100644
--- a/tests/unit/test_checks_compare.py
+++ b/tests/unit/test_checks_compare.py
@@ -79,7 +79,7 @@ class CompareRecorder:
         return 
atr.tasks.checks.compare.TreeComparisonResult(set(self.invalid), 
set(self.repo_only))
 
 
-class CacheDirResolver:
+class ArchiveDirResolver:
     def __init__(self, cache_dir: pathlib.Path | None) -> None:
         self.cache_dir = cache_dir
 
@@ -622,7 +622,7 @@ async def 
test_source_trees_creates_temp_workspace_and_cleans_up(
 
     monkeypatch.setattr(atr.tasks.checks.compare, "_load_tp_payload", 
PayloadLoader(payload))
     monkeypatch.setattr(atr.tasks.checks.compare, "_checkout_github_source", 
checkout)
-    monkeypatch.setattr(atr.tasks.checks, "resolve_cache_dir", 
CacheDirResolver(cache_dir))
+    monkeypatch.setattr(atr.tasks.checks, "resolve_archive_dir", 
ArchiveDirResolver(cache_dir))
     monkeypatch.setattr(atr.tasks.checks.compare, "_find_archive_root", 
find_root)
     monkeypatch.setattr(atr.tasks.checks.compare, "_compare_trees", compare)
     monkeypatch.setattr(atr.tasks.checks.compare.paths, "get_tmp_dir", 
ReturnValue(tmp_root))
@@ -655,7 +655,7 @@ async def 
test_source_trees_payload_none_skips_temp_workspace(monkeypatch: pytes
         "_checkout_github_source",
         RaiseAsync("_checkout_github_source should not be called"),
     )
-    monkeypatch.setattr(atr.tasks.checks, "resolve_cache_dir", 
RaiseAsync("resolve_cache_dir should not be called"))
+    monkeypatch.setattr(atr.tasks.checks, "resolve_archive_dir", 
RaiseAsync("resolve_archive_dir should not be called"))
     monkeypatch.setattr(atr.tasks.checks.compare.paths, "get_tmp_dir", 
RaiseSync("get_tmp_dir should not be called"))
 
     await atr.tasks.checks.compare.source_trees(args)
@@ -678,7 +678,7 @@ async def 
test_source_trees_permits_pkg_info_when_pyproject_toml_exists(
 
     monkeypatch.setattr(atr.tasks.checks.compare, "_load_tp_payload", 
PayloadLoader(payload))
     monkeypatch.setattr(atr.tasks.checks.compare, "_checkout_github_source", 
checkout)
-    monkeypatch.setattr(atr.tasks.checks, "resolve_cache_dir", 
CacheDirResolver(cache_dir))
+    monkeypatch.setattr(atr.tasks.checks, "resolve_archive_dir", 
ArchiveDirResolver(cache_dir))
     monkeypatch.setattr(atr.tasks.checks.compare, "_find_archive_root", 
find_root)
     monkeypatch.setattr(atr.tasks.checks.compare, "_compare_trees", compare)
     monkeypatch.setattr(atr.tasks.checks.compare.paths, "get_tmp_dir", 
ReturnValue(tmp_root))
@@ -705,7 +705,7 @@ async def 
test_source_trees_records_failure_when_archive_has_invalid_files(
 
     monkeypatch.setattr(atr.tasks.checks.compare, "_load_tp_payload", 
PayloadLoader(payload))
     monkeypatch.setattr(atr.tasks.checks.compare, "_checkout_github_source", 
checkout)
-    monkeypatch.setattr(atr.tasks.checks, "resolve_cache_dir", 
CacheDirResolver(cache_dir))
+    monkeypatch.setattr(atr.tasks.checks, "resolve_archive_dir", 
ArchiveDirResolver(cache_dir))
     monkeypatch.setattr(atr.tasks.checks.compare, "_find_archive_root", 
find_root)
     monkeypatch.setattr(atr.tasks.checks.compare, "_compare_trees", compare)
     monkeypatch.setattr(atr.tasks.checks.compare.paths, "get_tmp_dir", 
ReturnValue(tmp_root))
@@ -736,7 +736,7 @@ async def 
test_source_trees_records_failure_when_archive_root_not_found(
 
     monkeypatch.setattr(atr.tasks.checks.compare, "_load_tp_payload", 
PayloadLoader(payload))
     monkeypatch.setattr(atr.tasks.checks.compare, "_checkout_github_source", 
checkout)
-    monkeypatch.setattr(atr.tasks.checks, "resolve_cache_dir", 
CacheDirResolver(cache_dir))
+    monkeypatch.setattr(atr.tasks.checks, "resolve_archive_dir", 
ArchiveDirResolver(cache_dir))
     monkeypatch.setattr(atr.tasks.checks.compare, "_find_archive_root", 
find_root)
     monkeypatch.setattr(atr.tasks.checks.compare.paths, "get_tmp_dir", 
ReturnValue(tmp_root))
 
@@ -757,7 +757,7 @@ async def 
test_source_trees_records_failure_when_cache_dir_unavailable(
     payload = _make_payload()
 
     monkeypatch.setattr(atr.tasks.checks.compare, "_load_tp_payload", 
PayloadLoader(payload))
-    monkeypatch.setattr(atr.tasks.checks, "resolve_cache_dir", 
CacheDirResolver(None))
+    monkeypatch.setattr(atr.tasks.checks, "resolve_archive_dir", 
ArchiveDirResolver(None))
     monkeypatch.setattr(
         atr.tasks.checks.compare,
         "_checkout_github_source",
@@ -788,7 +788,7 @@ async def 
test_source_trees_records_failure_when_extra_entries_in_archive(
 
     monkeypatch.setattr(atr.tasks.checks.compare, "_load_tp_payload", 
PayloadLoader(payload))
     monkeypatch.setattr(atr.tasks.checks.compare, "_checkout_github_source", 
checkout)
-    monkeypatch.setattr(atr.tasks.checks, "resolve_cache_dir", 
CacheDirResolver(cache_dir))
+    monkeypatch.setattr(atr.tasks.checks, "resolve_archive_dir", 
ArchiveDirResolver(cache_dir))
     monkeypatch.setattr(atr.tasks.checks.compare, "_find_archive_root", 
find_root)
     monkeypatch.setattr(atr.tasks.checks.compare.paths, "get_tmp_dir", 
ReturnValue(tmp_root))
 
@@ -819,7 +819,7 @@ async def 
test_source_trees_reports_repo_only_sample_limited_to_five(
 
     monkeypatch.setattr(atr.tasks.checks.compare, "_load_tp_payload", 
PayloadLoader(payload))
     monkeypatch.setattr(atr.tasks.checks.compare, "_checkout_github_source", 
checkout)
-    monkeypatch.setattr(atr.tasks.checks, "resolve_cache_dir", 
CacheDirResolver(cache_dir))
+    monkeypatch.setattr(atr.tasks.checks, "resolve_archive_dir", 
ArchiveDirResolver(cache_dir))
     monkeypatch.setattr(atr.tasks.checks.compare, "_find_archive_root", 
find_root)
     monkeypatch.setattr(atr.tasks.checks.compare, "_compare_trees", compare)
     monkeypatch.setattr(atr.tasks.checks.compare.paths, "get_tmp_dir", 
ReturnValue(tmp_root))
diff --git a/tests/unit/test_quarantine_backfill.py 
b/tests/unit/test_quarantine_backfill.py
index 5b29b801..fe02d03f 100644
--- a/tests/unit/test_quarantine_backfill.py
+++ b/tests/unit/test_quarantine_backfill.py
@@ -36,7 +36,7 @@ def test_backfill_already_cached(monkeypatch: 
pytest.MonkeyPatch, tmp_path: path
     _create_tar_gz(archive_path)
 
     content_hash = hashes.compute_file_hash_sync(archive_path)
-    cache_key = hashes.filesystem_cache_archives_key(content_hash)
+    cache_key = hashes.filesystem_archives_key(content_hash)
     existing_cache = cache_dir / "proj" / "1.0" / cache_key
     existing_cache.mkdir(parents=True)
 
@@ -60,11 +60,11 @@ def 
test_backfill_continues_after_extraction_failure(monkeypatch: pytest.MonkeyP
     assert "good.tar.gz" in result[0][0]
 
     good_hash = hashes.compute_file_hash_sync(revision_dir / "good.tar.gz")
-    good_cache = cache_dir / "proj" / "1.0" / 
hashes.filesystem_cache_archives_key(good_hash)
+    good_cache = cache_dir / "proj" / "1.0" / 
hashes.filesystem_archives_key(good_hash)
     assert good_cache.is_dir()
 
     bad_hash = hashes.compute_file_hash_sync(revision_dir / "bad.tar.gz")
-    bad_cache = cache_dir / "proj" / "1.0" / 
hashes.filesystem_cache_archives_key(bad_hash)
+    bad_cache = cache_dir / "proj" / "1.0" / 
hashes.filesystem_archives_key(bad_hash)
     assert not bad_cache.exists()
 
 
@@ -114,7 +114,7 @@ def 
test_backfill_extracts_same_content_into_different_namespaces(
     assert len(result) == 2
 
     content_hash = hashes.compute_file_hash_sync(revision_a / 
"artifact.tar.gz")
-    cache_key = hashes.filesystem_cache_archives_key(content_hash)
+    cache_key = hashes.filesystem_archives_key(content_hash)
     assert (cache_dir / "projA" / "1.0" / cache_key).is_dir()
     assert (cache_dir / "projB" / "2.0" / cache_key).is_dir()
 
diff --git a/tests/unit/test_quarantine_task.py 
b/tests/unit/test_quarantine_task.py
index 2979c911..74b0f8d8 100644
--- a/tests/unit/test_quarantine_task.py
+++ b/tests/unit/test_quarantine_task.py
@@ -66,7 +66,7 @@ async def 
test_clear_quarantine_transitions_failed_to_acknowledged():
 
 
 @pytest.mark.asyncio
-async def 
test_extract_archives_to_cache_discards_staging_dir_on_enotempty_collision(
+async def test_extract_archives_discards_staging_dir_on_enotempty_collision(
     monkeypatch: pytest.MonkeyPatch, tmp_path: pathlib.Path
 ) -> None:
     quarantine_dir = tmp_path / "quarantine"
@@ -95,7 +95,7 @@ async def 
test_extract_archives_to_cache_discards_staging_dir_on_enotempty_colli
 
     entries = [sql.QuarantineFileEntryV1(rel_path=archive_rel_path, 
size_bytes=7, content_hash="blake3:ghi", errors=[])]
 
-    await quarantine._extract_archives_to_cache(
+    await quarantine._extract_archives(
         [quarantine.QuarantineArchiveEntry(rel_path=archive_rel_path, 
content_hash="blake3:ghi")],
         quarantine_dir,
         "proj",
@@ -103,7 +103,7 @@ async def 
test_extract_archives_to_cache_discards_staging_dir_on_enotempty_colli
         entries,
     )
 
-    cache_dir = cache_root / "proj" / "1.0" / 
quarantine.hashes.filesystem_cache_archives_key("blake3:ghi")
+    cache_dir = cache_root / "proj" / "1.0" / 
quarantine.hashes.filesystem_archives_key("blake3:ghi")
 
     assert cache_dir.is_dir()
     assert (cache_dir / "winner.txt").read_text() == "winner"
@@ -111,7 +111,7 @@ async def 
test_extract_archives_to_cache_discards_staging_dir_on_enotempty_colli
 
 
 @pytest.mark.asyncio
-async def 
test_extract_archives_to_cache_discards_staging_dir_when_other_worker_wins(
+async def test_extract_archives_discards_staging_dir_when_other_worker_wins(
     monkeypatch: pytest.MonkeyPatch, tmp_path: pathlib.Path
 ) -> None:
     quarantine_dir = tmp_path / "quarantine"
@@ -140,7 +140,7 @@ async def 
test_extract_archives_to_cache_discards_staging_dir_when_other_worker_
 
     entries = [sql.QuarantineFileEntryV1(rel_path=archive_rel_path, 
size_bytes=7, content_hash="blake3:def", errors=[])]
 
-    await quarantine._extract_archives_to_cache(
+    await quarantine._extract_archives(
         [quarantine.QuarantineArchiveEntry(rel_path=archive_rel_path, 
content_hash="blake3:def")],
         quarantine_dir,
         "proj",
@@ -148,7 +148,7 @@ async def 
test_extract_archives_to_cache_discards_staging_dir_when_other_worker_
         entries,
     )
 
-    cache_dir = cache_root / "proj" / "1.0" / 
quarantine.hashes.filesystem_cache_archives_key("blake3:def")
+    cache_dir = cache_root / "proj" / "1.0" / 
quarantine.hashes.filesystem_archives_key("blake3:def")
 
     assert cache_dir.is_dir()
     assert (cache_dir / "winner.txt").read_text() == "winner"
@@ -156,7 +156,7 @@ async def 
test_extract_archives_to_cache_discards_staging_dir_when_other_worker_
 
 
 @pytest.mark.asyncio
-async def test_extract_archives_to_cache_propagates_exarch_error_to_file_entry(
+async def test_extract_archives_propagates_exarch_error_to_file_entry(
     monkeypatch: pytest.MonkeyPatch, tmp_path: pathlib.Path
 ) -> None:
     quarantine_dir = tmp_path / "quarantine"
@@ -176,7 +176,7 @@ async def 
test_extract_archives_to_cache_propagates_exarch_error_to_file_entry(
     entries = [sql.QuarantineFileEntryV1(rel_path=archive_rel_path, 
size_bytes=7, content_hash="blake3:bad", errors=[])]
 
     with pytest.raises(RuntimeError, match="unsafe zip detected"):
-        await quarantine._extract_archives_to_cache(
+        await quarantine._extract_archives(
             [quarantine.QuarantineArchiveEntry(rel_path=archive_rel_path, 
content_hash="blake3:bad")],
             quarantine_dir,
             "proj",
@@ -189,7 +189,7 @@ async def 
test_extract_archives_to_cache_propagates_exarch_error_to_file_entry(
 
 
 @pytest.mark.asyncio
-async def test_extract_archives_to_cache_stages_in_temporary_then_promotes(
+async def test_extract_archives_stages_in_temporary_then_promotes(
     monkeypatch: pytest.MonkeyPatch, tmp_path: pathlib.Path
 ) -> None:
     quarantine_dir = tmp_path / "quarantine"
@@ -212,7 +212,7 @@ async def 
test_extract_archives_to_cache_stages_in_temporary_then_promotes(
 
     entries = [sql.QuarantineFileEntryV1(rel_path=archive_rel_path, 
size_bytes=7, content_hash="blake3:abc", errors=[])]
 
-    await quarantine._extract_archives_to_cache(
+    await quarantine._extract_archives(
         [quarantine.QuarantineArchiveEntry(rel_path=archive_rel_path, 
content_hash="blake3:abc")],
         quarantine_dir,
         "proj",
@@ -220,7 +220,7 @@ async def 
test_extract_archives_to_cache_stages_in_temporary_then_promotes(
         entries,
     )
 
-    cache_dir = cache_root / "proj" / "1.0" / 
quarantine.hashes.filesystem_cache_archives_key("blake3:abc")
+    cache_dir = cache_root / "proj" / "1.0" / 
quarantine.hashes.filesystem_archives_key("blake3:abc")
     staging_base = tmp_root
 
     assert recorded["archive_path"] == str(quarantine_dir / archive_rel_path)
@@ -377,7 +377,7 @@ async def 
test_validate_extraction_failure_marks_failed_and_deletes_dir(tmp_path
         ),
         mock.patch.object(
             quarantine,
-            "_extract_archives_to_cache",
+            "_extract_archives",
             new_callable=mock.AsyncMock,
             side_effect=RuntimeError("Extraction failure"),
         ),
@@ -389,7 +389,7 @@ async def 
test_validate_extraction_failure_marks_failed_and_deletes_dir(tmp_path
         )
 
     assert result is None
-    mock_mark.assert_awaited_once_with(row, ok_entries, "Archive extraction to 
cache failed: Extraction failure")
+    mock_mark.assert_awaited_once_with(row, ok_entries, "Archive extraction 
failed: Extraction failure")
     mock_rmtree.assert_awaited_once_with(quarantine_dir)
 
 
@@ -480,7 +480,7 @@ async def test_validate_success_calls_promote(tmp_path: 
pathlib.Path):
             new_callable=mock.AsyncMock,
             return_value=(ok_entries, False),
         ),
-        mock.patch.object(quarantine, "_extract_archives_to_cache", 
new_callable=mock.AsyncMock),
+        mock.patch.object(quarantine, "_extract_archives", 
new_callable=mock.AsyncMock),
         mock.patch.object(quarantine, "_promote", new_callable=mock.AsyncMock) 
as mock_promote,
         mock.patch.object(quarantine, "_mark_failed", 
new_callable=mock.AsyncMock) as mock_mark,
     ):


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to