This is an automated email from the ASF dual-hosted git repository.
sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git
The following commit(s) were added to refs/heads/main by this push:
new 229c16d Add CI debugging
229c16d is described below
commit 229c16d1e2cace7d696d4c97174c65f72d112e7a
Author: Sean B. Palmer <[email protected]>
AuthorDate: Sun Jan 4 20:22:06 2026 +0000
Add CI debugging
---
atr/admin/__init__.py | 1 +
atr/log.py | 28 ++++++++++++++++++++++++++++
atr/server.py | 10 ++++++++++
atr/storage/readers/releases.py | 19 +++++++++++++------
atr/tasks/__init__.py | 4 ++++
atr/tasks/checks/__init__.py | 5 +++++
playwright/test.py | 27 +++++++++++++++++++++++++++
7 files changed, 88 insertions(+), 6 deletions(-)
diff --git a/atr/admin/__init__.py b/atr/admin/__init__.py
index 3b54161..9aee31a 100644
--- a/atr/admin/__init__.py
+++ b/atr/admin/__init__.py
@@ -1051,6 +1051,7 @@ async def _ongoing_tasks(
) -> web.QuartResponse:
try:
ongoing = await interaction.tasks_ongoing(project_name, version_name,
revision)
+ log.info(f"DEBUG _ongoing_tasks:
{project_name}/{version_name}/{revision} -> {ongoing} ongoing")
return web.TextResponse(str(ongoing))
except Exception:
log.exception(f"Error fetching ongoing task count for {project_name}
{version_name} rev {revision}:")
diff --git a/atr/log.py b/atr/log.py
index 6d91f1d..f838a3a 100644
--- a/atr/log.py
+++ b/atr/log.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
+import collections
import inspect
import logging
import logging.handlers
@@ -23,6 +24,20 @@ from typing import Final
PERFORMANCE: logging.Logger | None = None
+# Ring buffer for recent log entries
+_RECENT_LOGS: collections.deque[str] = collections.deque(maxlen=500)
+
+
+class BufferingHandler(logging.Handler):
+ """Handler that stores formatted log records in a ring buffer."""
+
+ def emit(self, record: logging.LogRecord) -> None:
+ try:
+ msg = self.format(record)
+ _RECENT_LOGS.append(msg)
+ except Exception:
+ self.handleError(record)
+
def caller_name(depth: int = 1) -> str:
frame = inspect.currentframe()
@@ -73,10 +88,23 @@ def exception(msg: str) -> None:
_event(logging.ERROR, msg, exc_info=True)
+def get_recent_logs() -> list[str]:
+ """Return recent log entries for debugging."""
+ return list(_RECENT_LOGS)
+
+
def info(msg: str) -> None:
_event(logging.INFO, msg)
+def install_debug_handler() -> None:
+ """Install the buffering handler on the root logger."""
+ handler = BufferingHandler()
+ handler.setFormatter(logging.Formatter("%(asctime)s %(name)s
%(levelname)s: %(message)s"))
+ handler.setLevel(logging.DEBUG)
+ logging.getLogger().addHandler(handler)
+
+
def interface_name(depth: int = 1) -> str:
return caller_name(depth=depth)
diff --git a/atr/server.py b/atr/server.py
index 120e302..f5d175b 100644
--- a/atr/server.py
+++ b/atr/server.py
@@ -317,6 +317,16 @@ def _app_setup_logging(app: base.QuartApp, config_mode:
config.Mode, app_config:
if config_mode == config.Mode.Debug:
logging.getLogger(atr.__name__).setLevel(logging.DEBUG)
+ # Install the debug log handler
+ if config_mode != config.Mode.Production:
+ log.install_debug_handler()
+
+ @app.route("/debug-logs")
+ async def debug_logs() -> str:
+ """Return recent log entries for debugging (non-production
only)."""
+ logs = log.get_recent_logs()
+ return "\n".join(logs)
+
# Only log in the worker process
@app.before_serving
async def log_debug_info() -> None:
diff --git a/atr/storage/readers/releases.py b/atr/storage/readers/releases.py
index 9d2a157..a52d517 100644
--- a/atr/storage/readers/releases.py
+++ b/atr/storage/readers/releases.py
@@ -24,6 +24,7 @@ import re
import atr.analysis as analysis
import atr.db as db
+import atr.log as log
import atr.models.sql as sql
import atr.storage as storage
import atr.storage.types as types
@@ -145,14 +146,20 @@ class GeneralPublic:
cs.info.errors.setdefault(pathlib.Path(primary_rel_path),
[]).append(error)
async def __successes(self, cs: types.ChecksSubset) -> None:
- successes = await self.__data.check_result(
- release_name=cs.release.name,
- revision_number=cs.latest_revision_number,
- member_rel_path=None,
- status=sql.CheckResultStatus.SUCCESS,
- ).all()
+ successes = list(
+ await self.__data.check_result(
+ release_name=cs.release.name,
+ revision_number=cs.latest_revision_number,
+ member_rel_path=None,
+ status=sql.CheckResultStatus.SUCCESS,
+ ).all()
+ )
+ log.info(
+ f"DEBUG __successes: Found {len(successes)} successes for
{cs.release.name} rev {cs.latest_revision_number}"
+ )
for success in successes:
# Successes cannot be ignored
+ log.info(f"DEBUG __successes: {success.checker} ->
{success.primary_rel_path}")
if primary_rel_path := success.primary_rel_path:
cs.info.successes.setdefault(pathlib.Path(primary_rel_path),
[]).append(success)
diff --git a/atr/tasks/__init__.py b/atr/tasks/__init__.py
index 963f765..bc26990 100644
--- a/atr/tasks/__init__.py
+++ b/atr/tasks/__init__.py
@@ -19,6 +19,7 @@ from collections.abc import Awaitable, Callable, Coroutine
from typing import Any, Final
import atr.db as db
+import atr.log as log
import atr.models.results as results
import atr.models.sql as sql
import atr.tasks.checks.hashing as hashing
@@ -92,6 +93,9 @@ async def draft_checks(
for task in await task_function(asf_uid, release,
revision_number, path_str):
task.revision_number = revision_number
data.add(task)
+ log.info(f"DEBUG draft_checks: Added task {task.task_type}
for {path_str}")
+ else:
+ log.info(f"DEBUG draft_checks: No task function for
{path_str}")
# TODO: Should we check .json files for their content?
# Ideally we would not have to do that
if path.name.endswith(".cdx.json"):
diff --git a/atr/tasks/checks/__init__.py b/atr/tasks/checks/__init__.py
index 6bcfb36..50032a3 100644
--- a/atr/tasks/checks/__init__.py
+++ b/atr/tasks/checks/__init__.py
@@ -34,6 +34,7 @@ if TYPE_CHECKING:
import atr.models.schema as schema
import atr.db as db
+import atr.log as log
import atr.models.sql as sql
import atr.util as util
@@ -147,6 +148,10 @@ class Recorder:
async with db.session() as session:
session.add(result)
await session.commit()
+ log.info(
+ f"DEBUG Recorder._add: {self.checker} {status.value}"
+ f" for {result.primary_rel_path} rev {result.revision_number}"
+ )
return result
async def abs_path(self, rel_path: str | None = None) -> pathlib.Path |
None:
diff --git a/playwright/test.py b/playwright/test.py
index ebe6fc2..24f3999 100755
--- a/playwright/test.py
+++ b/playwright/test.py
@@ -622,6 +622,33 @@ def test_checks_02_license_files(page: Page, credentials:
Credentials) -> None:
row_locator = page.locator(f"tr:has(:text('{filename_targz}'))")
evaluate_link_title = f"Show report for {filename_targz}"
evaluate_link_locator =
row_locator.locator(f'a[title="{evaluate_link_title}"]')
+
+ if not evaluate_link_locator.is_visible():
+ logging.error("DEBUG: Link not visible. Dumping page diagnostics...")
+ all_rows = page.locator("tr").all()
+ logging.error(f"DEBUG: Found {len(all_rows)} table rows")
+ for i, row in enumerate(all_rows):
+ row_text = row.inner_text()
+ logging.error(f"DEBUG: Row {i}: {row_text[:200]!r}")
+ all_links = page.locator("a[title]").all()
+ logging.error(f"DEBUG: Found {len(all_links)} links with titles")
+ for link in all_links:
+ logging.error(f"DEBUG: Link title={link.get_attribute('title')!r}")
+ page_html = page.content()
+ logging.error(f"DEBUG: Page HTML length: {len(page_html)}")
+ logging.error(f"DEBUG: Page HTML (first 2000 chars):
{page_html[:2000]}")
+
+ logging.error("DEBUG: Fetching server debug logs...")
+ try:
+ debug_logs_url = f"{ATR_BASE_URL}/debug-logs"
+ response = page.request.get(debug_logs_url)
+ if response.ok:
+ logging.error(f"DEBUG: Server logs:\n{response.text()}")
+ else:
+ logging.error(f"DEBUG: Failed to fetch server logs:
{response.status}")
+ except Exception as e:
+ logging.error(f"DEBUG: Error fetching server logs: {e}")
+
expect(evaluate_link_locator).to_be_visible()
logging.info(f"Clicking 'Show report' link for {filename_targz}")
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]