This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git


The following commit(s) were added to refs/heads/main by this push:
     new b01237c  Fix function ordering in some top level modules
b01237c is described below

commit b01237c6a3026b15334782b49129de81996a7c08
Author: Sean B. Palmer <[email protected]>
AuthorDate: Mon Dec 29 14:20:14 2025 +0000

    Fix function ordering in some top level modules
---
 atr/archives.py |  88 ++++++++++++++++++------------------
 atr/log.py      |   8 ++--
 atr/server.py   | 112 +++++++++++++++++++++++-----------------------
 atr/tabulate.py | 136 ++++++++++++++++++++++++++++----------------------------
 atr/validate.py |  18 ++++----
 5 files changed, 181 insertions(+), 181 deletions(-)

diff --git a/atr/archives.py b/atr/archives.py
index bea109f..b3bcbf4 100644
--- a/atr/archives.py
+++ b/atr/archives.py
@@ -82,50 +82,6 @@ def total_size(tgz_path: str, chunk_size: int = 4096) -> int:
     return total_size
 
 
-def _archive_extract_safe_process_file(
-    tf: tarfile.TarFile,
-    member: tarfile.TarInfo,
-    extract_dir: str,
-    total_extracted: int,
-    max_size: int,
-    chunk_size: int,
-) -> int:
-    """Process a single file member during safe archive extraction."""
-    target_path = os.path.join(extract_dir, member.name)
-    if not 
os.path.abspath(target_path).startswith(os.path.abspath(extract_dir)):
-        log.warning(f"Skipping potentially unsafe path: {member.name}")
-        return 0
-
-    os.makedirs(os.path.dirname(target_path), exist_ok=True)
-
-    source = tf.extractfile(member)
-    if source is None:
-        # Should not happen if member.isreg() is true
-        log.warning(f"Could not extract file object for member: {member.name}")
-        return 0
-
-    extracted_file_size = 0
-    try:
-        with open(target_path, "wb") as target:
-            while chunk := source.read(chunk_size):
-                target.write(chunk)
-                extracted_file_size += len(chunk)
-
-                # Check size limits during extraction
-                if (total_extracted + extracted_file_size) > max_size:
-                    # Clean up the partial file before raising
-                    target.close()
-                    os.unlink(target_path)
-                    raise ExtractionError(
-                        f"Extraction exceeded maximum size limit of {max_size} 
bytes",
-                        {"max_size": max_size, "current_size": 
total_extracted},
-                    )
-    finally:
-        source.close()
-
-    return extracted_file_size
-
-
 def _archive_extract_member(
     tf: tarfile.TarFile,
     member: tarfile.TarInfo,
@@ -179,6 +135,50 @@ def _archive_extract_member(
     return total_extracted, extracted_paths
 
 
+def _archive_extract_safe_process_file(
+    tf: tarfile.TarFile,
+    member: tarfile.TarInfo,
+    extract_dir: str,
+    total_extracted: int,
+    max_size: int,
+    chunk_size: int,
+) -> int:
+    """Process a single file member during safe archive extraction."""
+    target_path = os.path.join(extract_dir, member.name)
+    if not 
os.path.abspath(target_path).startswith(os.path.abspath(extract_dir)):
+        log.warning(f"Skipping potentially unsafe path: {member.name}")
+        return 0
+
+    os.makedirs(os.path.dirname(target_path), exist_ok=True)
+
+    source = tf.extractfile(member)
+    if source is None:
+        # Should not happen if member.isreg() is true
+        log.warning(f"Could not extract file object for member: {member.name}")
+        return 0
+
+    extracted_file_size = 0
+    try:
+        with open(target_path, "wb") as target:
+            while chunk := source.read(chunk_size):
+                target.write(chunk)
+                extracted_file_size += len(chunk)
+
+                # Check size limits during extraction
+                if (total_extracted + extracted_file_size) > max_size:
+                    # Clean up the partial file before raising
+                    target.close()
+                    os.unlink(target_path)
+                    raise ExtractionError(
+                        f"Extraction exceeded maximum size limit of {max_size} 
bytes",
+                        {"max_size": max_size, "current_size": 
total_extracted},
+                    )
+    finally:
+        source.close()
+
+    return extracted_file_size
+
+
 def _archive_extract_safe_process_hardlink(member: tarfile.TarInfo, 
extract_dir: str) -> None:
     """Safely create a hard link from the TarInfo entry."""
     target_path = _safe_path(extract_dir, member.name)
diff --git a/atr/log.py b/atr/log.py
index 13a68d7..6d91f1d 100644
--- a/atr/log.py
+++ b/atr/log.py
@@ -86,10 +86,6 @@ def log(level: int, msg: str) -> None:
     _event(level, msg)
 
 
-def python_repr(object_name: str) -> str:
-    return f"<{object_name}>"
-
-
 def performance(msg: str) -> None:
     if PERFORMANCE is not None:
         PERFORMANCE.info(msg)
@@ -100,6 +96,10 @@ def performance_init() -> None:
     PERFORMANCE = _performance_logger()
 
 
+def python_repr(object_name: str) -> str:
+    return f"<{object_name}>"
+
+
 def secret(msg: str, data: bytes) -> None:
     import base64
 
diff --git a/atr/server.py b/atr/server.py
index 2352660..25b9e1e 100644
--- a/atr/server.py
+++ b/atr/server.py
@@ -160,62 +160,6 @@ def _app_setup_context(app: base.QuartApp) -> None:
         }
 
 
-def _app_setup_security_headers(app: base.QuartApp) -> None:
-    """Setup security headers including a Content Security Policy."""
-
-    # Both object-src 'none' and base-uri 'none' are required by ASVS v5 3.4.3 
(L2)
-    # The frame-ancestors 'none' directive is required by ASVS v5 3.4.6 (L2)
-    # Bootstrap uses data: URLs extensively, so we need to include that in 
img-src
-    csp_directives = [
-        "default-src 'self'",
-        "script-src 'self'",
-        "style-src 'self' 'unsafe-inline'",
-        "img-src 'self' https://apache.org https://incubator.apache.org 
https://www.apache.org data:",
-        "font-src 'self'",
-        "connect-src 'self'",
-        "frame-src 'none'",
-        "object-src 'none'",
-        "base-uri 'none'",
-        "form-action 'self'",
-        "frame-ancestors 'none'",
-    ]
-    csp_header = "; ".join(csp_directives)
-
-    permissions_policy = ", ".join(
-        [
-            "accelerometer=()",
-            "autoplay=()",
-            "camera=()",
-            "clipboard-read=()",
-            "clipboard-write=(self)",
-            "display-capture=()",
-            "geolocation=()",
-            "gyroscope=()",
-            "magnetometer=()",
-            "microphone=()",
-            "midi=()",
-            "payment=()",
-            "usb=()",
-            "xr-spatial-tracking=()",
-        ]
-    )
-
-    # X-Content-Type-Options: nosniff is required by ASVS v5 3.4.4 (L2)
-    # A strict Referrer-Policy is required by ASVS v5 3.4.5 (L2)
-    # ASVS does not specify exactly what is meant by strict
-    # We can't use Referrer-Policy: no-referrer because it breaks form 
redirection
-    # TODO: We could automatically include a form field noting the form action 
URL
-    @app.after_request
-    async def add_security_headers(response: quart.Response) -> quart.Response:
-        response.headers["Content-Security-Policy"] = csp_header
-        response.headers["Permissions-Policy"] = permissions_policy
-        response.headers["Referrer-Policy"] = "same-origin"
-        response.headers["X-Content-Type-Options"] = "nosniff"
-        response.headers["X-Frame-Options"] = "DENY"
-        response.headers["X-Permitted-Cross-Domain-Policies"] = "none"
-        return response
-
-
 def _app_setup_lifecycle(app: base.QuartApp) -> None:
     """Setup application lifecycle hooks."""
 
@@ -352,6 +296,62 @@ def _app_setup_logging(app: base.QuartApp, config_mode: 
config.Mode, app_config:
             log.info(f"STATE_DIR    = {app_config.STATE_DIR}")
 
 
+def _app_setup_security_headers(app: base.QuartApp) -> None:
+    """Setup security headers including a Content Security Policy."""
+
+    # Both object-src 'none' and base-uri 'none' are required by ASVS v5 3.4.3 
(L2)
+    # The frame-ancestors 'none' directive is required by ASVS v5 3.4.6 (L2)
+    # Bootstrap uses data: URLs extensively, so we need to include that in 
img-src
+    csp_directives = [
+        "default-src 'self'",
+        "script-src 'self'",
+        "style-src 'self' 'unsafe-inline'",
+        "img-src 'self' https://apache.org https://incubator.apache.org 
https://www.apache.org data:",
+        "font-src 'self'",
+        "connect-src 'self'",
+        "frame-src 'none'",
+        "object-src 'none'",
+        "base-uri 'none'",
+        "form-action 'self'",
+        "frame-ancestors 'none'",
+    ]
+    csp_header = "; ".join(csp_directives)
+
+    permissions_policy = ", ".join(
+        [
+            "accelerometer=()",
+            "autoplay=()",
+            "camera=()",
+            "clipboard-read=()",
+            "clipboard-write=(self)",
+            "display-capture=()",
+            "geolocation=()",
+            "gyroscope=()",
+            "magnetometer=()",
+            "microphone=()",
+            "midi=()",
+            "payment=()",
+            "usb=()",
+            "xr-spatial-tracking=()",
+        ]
+    )
+
+    # X-Content-Type-Options: nosniff is required by ASVS v5 3.4.4 (L2)
+    # A strict Referrer-Policy is required by ASVS v5 3.4.5 (L2)
+    # ASVS does not specify exactly what is meant by strict
+    # We can't use Referrer-Policy: no-referrer because it breaks form 
redirection
+    # TODO: We could automatically include a form field noting the form action 
URL
+    @app.after_request
+    async def add_security_headers(response: quart.Response) -> quart.Response:
+        response.headers["Content-Security-Policy"] = csp_header
+        response.headers["Permissions-Policy"] = permissions_policy
+        response.headers["Referrer-Policy"] = "same-origin"
+        response.headers["X-Content-Type-Options"] = "nosniff"
+        response.headers["X-Frame-Options"] = "DENY"
+        response.headers["X-Permitted-Cross-Domain-Policies"] = "none"
+        return response
+
+
 def _create_app(app_config: type[config.AppConfig]) -> base.QuartApp:
     """Create and configure the application."""
     if os.sep != "/":
diff --git a/atr/tabulate.py b/atr/tabulate.py
index 895315a..b7ea3ce 100644
--- a/atr/tabulate.py
+++ b/atr/tabulate.py
@@ -25,74 +25,6 @@ import atr.models.sql as sql
 import atr.util as util
 
 
-async def votes(
-    committee: sql.Committee | None, thread_id: str
-) -> tuple[int | None, dict[str, models.tabulate.VoteEmail]]:
-    """Tabulate votes."""
-    start = time.perf_counter_ns()
-    email_to_uid = await util.email_to_uid_map()
-    end = time.perf_counter_ns()
-    log.info(f"LDAP search took {(end - start) / 1000000} ms")
-    log.info(f"Email addresses from LDAP: {len(email_to_uid)}")
-
-    start = time.perf_counter_ns()
-    tabulated_votes = {}
-    start_unixtime = None
-    async for _mid, msg in util.thread_messages(thread_id):
-        from_raw = msg.get("from_raw", "")
-        ok, from_email_lower, asf_uid = _vote_identity(from_raw, email_to_uid)
-        if not ok:
-            continue
-
-        if asf_uid is not None:
-            asf_uid_or_email = asf_uid
-            list_raw = msg.get("list_raw", "")
-            status = await _vote_status(asf_uid, list_raw, committee)
-        else:
-            asf_uid_or_email = from_email_lower
-            status = models.tabulate.VoteStatus.UNKNOWN
-
-        if start_unixtime is None:
-            epoch = msg.get("epoch", "")
-            if epoch:
-                start_unixtime = int(epoch)
-
-        subject = msg.get("subject", "")
-        if "[RESULT]" in subject:
-            break
-
-        body = msg.get("body", "")
-        if not body:
-            continue
-
-        castings = _vote_castings(body)
-        if not castings:
-            continue
-
-        if len(castings) == 1:
-            vote_cast = castings[0][0]
-        else:
-            vote_cast = models.tabulate.Vote.UNKNOWN
-        quotation = " // ".join([c[1] for c in castings])
-
-        vote_email = models.tabulate.VoteEmail(
-            asf_uid_or_email=asf_uid_or_email,
-            from_email=from_email_lower,
-            status=status,
-            asf_eid=msg.get("mid", ""),
-            iso_datetime=msg.get("date", ""),
-            vote=vote_cast,
-            quotation=quotation,
-            updated=asf_uid_or_email in tabulated_votes,
-        )
-        tabulated_votes[asf_uid_or_email] = vote_email
-    end = time.perf_counter_ns()
-    log.info(f"Tabulated votes: {len(tabulated_votes)}")
-    log.info(f"Tabulation took {(end - start) / 1000000} ms")
-
-    return start_unixtime, tabulated_votes
-
-
 async def vote_committee(thread_id: str, release: sql.Release) -> 
sql.Committee | None:
     committee = release.project.committee
     if util.is_dev_environment():
@@ -203,6 +135,74 @@ def vote_summary(tabulated_votes: dict[str, 
models.tabulate.VoteEmail]) -> dict[
     return result
 
 
+async def votes(
+    committee: sql.Committee | None, thread_id: str
+) -> tuple[int | None, dict[str, models.tabulate.VoteEmail]]:
+    """Tabulate votes."""
+    start = time.perf_counter_ns()
+    email_to_uid = await util.email_to_uid_map()
+    end = time.perf_counter_ns()
+    log.info(f"LDAP search took {(end - start) / 1000000} ms")
+    log.info(f"Email addresses from LDAP: {len(email_to_uid)}")
+
+    start = time.perf_counter_ns()
+    tabulated_votes = {}
+    start_unixtime = None
+    async for _mid, msg in util.thread_messages(thread_id):
+        from_raw = msg.get("from_raw", "")
+        ok, from_email_lower, asf_uid = _vote_identity(from_raw, email_to_uid)
+        if not ok:
+            continue
+
+        if asf_uid is not None:
+            asf_uid_or_email = asf_uid
+            list_raw = msg.get("list_raw", "")
+            status = await _vote_status(asf_uid, list_raw, committee)
+        else:
+            asf_uid_or_email = from_email_lower
+            status = models.tabulate.VoteStatus.UNKNOWN
+
+        if start_unixtime is None:
+            epoch = msg.get("epoch", "")
+            if epoch:
+                start_unixtime = int(epoch)
+
+        subject = msg.get("subject", "")
+        if "[RESULT]" in subject:
+            break
+
+        body = msg.get("body", "")
+        if not body:
+            continue
+
+        castings = _vote_castings(body)
+        if not castings:
+            continue
+
+        if len(castings) == 1:
+            vote_cast = castings[0][0]
+        else:
+            vote_cast = models.tabulate.Vote.UNKNOWN
+        quotation = " // ".join([c[1] for c in castings])
+
+        vote_email = models.tabulate.VoteEmail(
+            asf_uid_or_email=asf_uid_or_email,
+            from_email=from_email_lower,
+            status=status,
+            asf_eid=msg.get("mid", ""),
+            iso_datetime=msg.get("date", ""),
+            vote=vote_cast,
+            quotation=quotation,
+            updated=asf_uid_or_email in tabulated_votes,
+        )
+        tabulated_votes[asf_uid_or_email] = vote_email
+    end = time.perf_counter_ns()
+    log.info(f"Tabulated votes: {len(tabulated_votes)}")
+    log.info(f"Tabulation took {(end - start) / 1000000} ms")
+
+    return start_unixtime, tabulated_votes
+
+
 def _format_duration(duration_hours: float | int) -> str:
     hours = int(duration_hours)
     minutes = round((duration_hours - hours) * 60)
diff --git a/atr/validate.py b/atr/validate.py
index 1b03b20..575f2e5 100644
--- a/atr/validate.py
+++ b/atr/validate.py
@@ -77,6 +77,15 @@ def committee_components(
     return wrap
 
 
+@committee_components("Committee.child_committees")
+def committee_child_committees(c: sql.Committee) -> Divergences:
+    """Check that a committee has no child_committees."""
+
+    expected: list[object] = []
+    actual = c.child_committees
+    yield from divergences(expected, actual)
+
+
 @committee_components("Committee.full_name")
 def committee_full_name(c: sql.Committee) -> Divergences:
     """Validate the Committee.full_name value."""
@@ -111,15 +120,6 @@ def committee_full_name(c: sql.Committee) -> Divergences:
     )
 
 
-@committee_components("Committee.child_committees")
-def committee_child_committees(c: sql.Committee) -> Divergences:
-    """Check that a committee has no child_committees."""
-
-    expected: list[object] = []
-    actual = c.child_committees
-    yield from divergences(expected, actual)
-
-
 def committees(cs: Iterable[sql.Committee]) -> AnnotatedDivergences:
     """Validate multiple committees."""
     for c in cs:


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to