This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-atr-experiments.git


The following commit(s) were added to refs/heads/main by this push:
     new c9cf64d  Convert to use aiosqlite throughout
c9cf64d is described below

commit c9cf64d0ccf79a751f62980b89f209b8e0f1352d
Author: Sean B. Palmer <s...@miscoranda.com>
AuthorDate: Fri Feb 14 19:10:25 2025 +0200

    Convert to use aiosqlite throughout
---
 .pre-commit-config.yaml         |   1 +
 atr/routes.py                   | 307 +++++++++++++++++++++-------------------
 atr/server.py                   |  33 +++--
 migrations/env.py               |  49 ++-----
 poetry.lock                     |  22 ++-
 pyproject.toml                  |   3 +
 scripts/poetry/sync-dev         |   4 +-
 typestubs/asfquart/generics.pyi |   6 +-
 uv.lock                         |  16 +++
 9 files changed, 244 insertions(+), 197 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a023b9d..8394d5b 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,3 +1,4 @@
+# TODO: Add a shellcheck lint
 exclude: '^(asfquart)'
 repos:
 - repo: https://github.com/pre-commit/pre-commit-hooks
diff --git a/atr/routes.py b/atr/routes.py
index 8721dbf..b7f0421 100644
--- a/atr/routes.py
+++ b/atr/routes.py
@@ -22,7 +22,7 @@ from io import BufferedReader
 import json
 import pprint
 from pathlib import Path
-from typing import List, Tuple, Optional, Dict, Any
+from typing import List, Tuple, Optional, Dict, Any, cast
 import datetime
 import asyncio
 
@@ -31,10 +31,13 @@ from asfquart.auth import Requirements as R, require
 from asfquart.base import ASFQuartException
 from asfquart.session import read as session_read, ClientSession
 from quart import current_app, render_template, request
-from sqlmodel import Session, select
+from sqlmodel import select
+from sqlalchemy.ext.asyncio import AsyncSession
 from sqlalchemy.exc import IntegrityError
+from sqlalchemy.orm.attributes import InstrumentedAttribute
 import httpx
 import gnupg
+from sqlalchemy.orm import selectinload
 
 from .models import (
     DistributionChannel,
@@ -137,36 +140,36 @@ async def root_add_release_candidate() -> str:
         checksum_512 = compute_sha512(artifact_path)
 
         # Store in database
-        with Session(current_app.config["engine"]) as db_session:
-            # Get PMC
-            statement = select(PMC).where(PMC.project_name == project_name)
-            pmc = db_session.exec(statement).first()
-            if not pmc:
-                raise ASFQuartException("PMC not found", errorcode=404)
-
-            # Create release record using artifact hash as storage key
-            # At some point this presumably won't work, because we can have 
many artifacts
-            # But meanwhile it's fine
-            # TODO: Extract version from filename or add to form
-            release = Release(
-                storage_key=artifact_hash,
-                stage=ReleaseStage.CANDIDATE,
-                phase=ReleasePhase.RELEASE_CANDIDATE,
-                pmc_id=pmc.id,
-                version="",
-            )
-            db_session.add(release)
-
-            # Create package record
-            package = Package(
-                
file=str(artifact_path.relative_to(current_app.config["RELEASE_STORAGE_DIR"])),
-                
signature=str(signature_path.relative_to(current_app.config["RELEASE_STORAGE_DIR"])),
-                checksum=checksum_512,
-                release_key=release.storage_key,
-            )
-            db_session.add(package)
-
-            db_session.commit()
+        async_session = current_app.config["async_session"]
+        async with async_session() as db_session:
+            async with db_session.begin():
+                # Get PMC
+                statement = select(PMC).where(PMC.project_name == project_name)
+                pmc = (await 
db_session.execute(statement)).scalar_one_or_none()
+                if not pmc:
+                    raise ASFQuartException("PMC not found", errorcode=404)
+
+                # Create release record using artifact hash as storage key
+                # At some point this presumably won't work, because we can 
have many artifacts
+                # But meanwhile it's fine
+                # TODO: Extract version from filename or add to form
+                release = Release(
+                    storage_key=artifact_hash,
+                    stage=ReleaseStage.CANDIDATE,
+                    phase=ReleasePhase.RELEASE_CANDIDATE,
+                    pmc_id=pmc.id,
+                    version="",
+                )
+                db_session.add(release)
+
+                # Create package record
+                package = Package(
+                    
file=str(artifact_path.relative_to(current_app.config["RELEASE_STORAGE_DIR"])),
+                    
signature=str(signature_path.relative_to(current_app.config["RELEASE_STORAGE_DIR"])),
+                    checksum=checksum_512,
+                    release_key=release.storage_key,
+                )
+                db_session.add(package)
 
             return f"Successfully uploaded release candidate for 
{project_name}"
 
@@ -207,10 +210,11 @@ async def root_admin_database(model: str = "PMC") -> str:
         # Default to PMC if invalid model specified
         model = "PMC"
 
-    with Session(current_app.config["engine"]) as db_session:
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
         # Get all records for the selected model
         statement = select(models[model])
-        records = db_session.exec(statement).all()
+        records = (await db_session.execute(statement)).scalars().all()
 
         # Convert records to dictionaries for JSON serialization
         records_dict = []
@@ -244,8 +248,6 @@ async def root_admin_update_pmcs() -> str:
         raise ASFQuartException("You are not authorized to update PMCs", 
errorcode=403)
 
     if request.method == "POST":
-        # TODO: We should probably lift this branch
-        # Or have the "GET" in a branch, and then we can happy path this POST 
branch
         # Fetch committee-info.json from Whimsy
         WHIMSY_URL = "https://whimsy.apache.org/public/committee-info.json";
         async with httpx.AsyncClient() as client:
@@ -259,49 +261,49 @@ async def root_admin_update_pmcs() -> str:
         committees = data.get("committees", {})
         updated_count = 0
 
-        with Session(current_app.config["engine"]) as db_session:
-            for committee_id, info in committees.items():
-                # Skip non-PMC committees
-                if not info.get("pmc", False):
-                    continue
-
-                # Get or create PMC
-                statement = select(PMC).where(PMC.project_name == committee_id)
-                pmc = db_session.exec(statement).first()
-                if not pmc:
-                    pmc = PMC(project_name=committee_id)
-                    db_session.add(pmc)
-
-                # Update PMC data
-                roster = info.get("roster", {})
-                # All roster members are PMC members
-                pmc.pmc_members = list(roster.keys())
-                # All PMC members are also committers
-                pmc.committers = list(roster.keys())
-
-                # Mark chairs as release managers
-                # TODO: Who else is a release manager? How do we know?
-                chairs = [m["id"] for m in info.get("chairs", [])]
-                pmc.release_managers = chairs
-
-                updated_count += 1
-
-            # Add special entry for Tooling PMC
-            # Not clear why, but it's not in the Whimsy data
-            statement = select(PMC).where(PMC.project_name == "tooling")
-            tooling_pmc = db_session.exec(statement).first()
-            if not tooling_pmc:
-                tooling_pmc = PMC(project_name="tooling")
-                db_session.add(tooling_pmc)
-                updated_count += 1
-
-            # Update Tooling PMC data
-            # Could put this in the "if not tooling_pmc" block, perhaps
-            tooling_pmc.pmc_members = ["wave", "tn", "sbp"]
-            tooling_pmc.committers = ["wave", "tn", "sbp"]
-            tooling_pmc.release_managers = ["wave"]
-
-            db_session.commit()
+        async_session = current_app.config["async_session"]
+        async with async_session() as db_session:
+            async with db_session.begin():
+                for committee_id, info in committees.items():
+                    # Skip non-PMC committees
+                    if not info.get("pmc", False):
+                        continue
+
+                    # Get or create PMC
+                    statement = select(PMC).where(PMC.project_name == 
committee_id)
+                    pmc = (await 
db_session.execute(statement)).scalar_one_or_none()
+                    if not pmc:
+                        pmc = PMC(project_name=committee_id)
+                        db_session.add(pmc)
+
+                    # Update PMC data
+                    roster = info.get("roster", {})
+                    # All roster members are PMC members
+                    pmc.pmc_members = list(roster.keys())
+                    # All PMC members are also committers
+                    pmc.committers = list(roster.keys())
+
+                    # Mark chairs as release managers
+                    # TODO: Who else is a release manager? How do we know?
+                    chairs = [m["id"] for m in info.get("chairs", [])]
+                    pmc.release_managers = chairs
+
+                    updated_count += 1
+
+                # Add special entry for Tooling PMC
+                # Not clear why, but it's not in the Whimsy data
+                statement = select(PMC).where(PMC.project_name == "tooling")
+                tooling_pmc = (await 
db_session.execute(statement)).scalar_one_or_none()
+                if not tooling_pmc:
+                    tooling_pmc = PMC(project_name="tooling")
+                    db_session.add(tooling_pmc)
+                    updated_count += 1
+
+                # Update Tooling PMC data
+                # Could put this in the "if not tooling_pmc" block, perhaps
+                tooling_pmc.pmc_members = ["wave", "tn", "sbp"]
+                tooling_pmc.committers = ["wave", "tn", "sbp"]
+                tooling_pmc.release_managers = ["wave"]
 
         return f"Successfully updated {updated_count} PMCs from Whimsy"
 
@@ -312,9 +314,10 @@ async def root_admin_update_pmcs() -> str:
 @APP.get("/database/debug")
 async def root_database_debug() -> str:
     """Debug information about the database."""
-    with Session(current_app.config["engine"]) as session:
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
         statement = select(PMC)
-        pmcs = session.exec(statement).all()
+        pmcs = (await db_session.execute(statement)).scalars().all()
         return f"Database using {current_app.config['DATA_MODELS_FILE']} has 
{len(pmcs)} PMCs"
 
 
@@ -326,10 +329,13 @@ async def root_release_signatures_verify(release_key: 
str) -> str:
     if session is None:
         raise ASFQuartException("Not authenticated", errorcode=401)
 
-    with Session(current_app.config["engine"]) as db_session:
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
         # Get the release and its packages
-        statement = select(Release).where(Release.storage_key == release_key)
-        release = db_session.exec(statement).first()
+        release_packages = 
selectinload(cast(InstrumentedAttribute[List[Package]], Release.packages))
+        release_pmc = selectinload(cast(InstrumentedAttribute[PMC], 
Release.pmc))
+        statement = select(Release).options(release_packages, 
release_pmc).where(Release.storage_key == release_key)
+        release = (await db_session.execute(statement)).scalar_one_or_none()
         if not release:
             raise ASFQuartException("Release not found", errorcode=404)
 
@@ -368,9 +374,10 @@ async def root_pages() -> str:
 @APP.route("/pmc/<project_name>")
 async def root_pmc_arg(project_name: str) -> dict:
     "Get a specific PMC by project name."
-    with Session(current_app.config["engine"]) as session:
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
         statement = select(PMC).where(PMC.project_name == project_name)
-        pmc = session.exec(statement).first()
+        pmc = (await db_session.execute(statement)).scalar_one_or_none()
 
         if not pmc:
             raise ASFQuartException("PMC not found", errorcode=404)
@@ -394,16 +401,16 @@ async def root_pmc_create_arg(project_name: str) -> dict:
         release_managers=["alice"],
     )
 
-    with Session(current_app.config["engine"]) as session:
-        try:
-            session.add(pmc)
-            session.commit()
-            session.refresh(pmc)
-        except IntegrityError:
-            raise ASFQuartException(
-                f"PMC with name '{project_name}' already exists",
-                errorcode=409,  # HTTP 409 Conflict
-            )
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
+        async with db_session.begin():
+            try:
+                db_session.add(pmc)
+            except IntegrityError:
+                raise ASFQuartException(
+                    f"PMC with name '{project_name}' already exists",
+                    errorcode=409,  # HTTP 409 Conflict
+                )
 
         # Convert to dict for response
         return {
@@ -418,19 +425,21 @@ async def root_pmc_create_arg(project_name: str) -> dict:
 @APP.route("/pmc/directory")
 async def root_pmc_directory() -> str:
     "Main PMC directory page."
-    with Session(current_app.config["engine"]) as session:
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
         # Get all PMCs and their latest releases
         statement = select(PMC)
-        pmcs = session.exec(statement).all()
+        pmcs = (await db_session.execute(statement)).scalars().all()
         return await render_template("pmc-directory.html", pmcs=pmcs)
 
 
 @APP.route("/pmc/list")
 async def root_pmc_list() -> List[dict]:
     "List all PMCs in the database."
-    with Session(current_app.config["engine"]) as session:
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
         statement = select(PMC)
-        pmcs = session.exec(statement).all()
+        pmcs = (await db_session.execute(statement)).scalars().all()
 
         return [
             {
@@ -463,9 +472,10 @@ async def root_user_keys_add() -> str:
     user_keys = []
 
     # Get all existing keys for the user
-    with Session(current_app.config["engine"]) as db_session:
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
         statement = select(PublicSigningKey).where(PublicSigningKey.user_id == 
session.uid)
-        user_keys = db_session.exec(statement).all()
+        user_keys = (await db_session.execute(statement)).scalars().all()
 
     if request.method == "POST":
         form = await request.form
@@ -493,18 +503,19 @@ async def root_user_keys_delete() -> str:
     if session is None:
         raise ASFQuartException("Not authenticated", errorcode=401)
 
-    with Session(current_app.config["engine"]) as db_session:
-        # Get all keys for the user
-        # TODO: Might be clearer if user_id were "asf_id"
-        # But then we'd also want session.uid to be session.asf_id instead
-        statement = select(PublicSigningKey).where(PublicSigningKey.user_id == 
session.uid)
-        keys = db_session.exec(statement).all()
-        count = len(keys)
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
+        async with db_session.begin():
+            # Get all keys for the user
+            # TODO: Might be clearer if user_id were "asf_id"
+            # But then we'd also want session.uid to be session.asf_id instead
+            statement = 
select(PublicSigningKey).where(PublicSigningKey.user_id == session.uid)
+            keys = (await db_session.execute(statement)).scalars().all()
+            count = len(keys)
 
-        # Delete all keys
-        for key in keys:
-            db_session.delete(key)
-        db_session.commit()
+            # Delete all keys
+            for key in keys:
+                await db_session.delete(key)
 
         return f"Deleted {count} keys"
 
@@ -517,12 +528,20 @@ async def root_user_uploads() -> str:
     if session is None:
         raise ASFQuartException("Not authenticated", errorcode=401)
 
-    with Session(current_app.config["engine"]) as db_session:
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
         # Get all releases where the user is a PMC member of the associated PMC
         # TODO: We don't actually record who uploaded the release candidate
         # We should probably add that information!
-        statement = select(Release).join(PMC).where(Release.stage == 
ReleaseStage.CANDIDATE)
-        releases = db_session.exec(statement).all()
+        release_pmc = selectinload(cast(InstrumentedAttribute[PMC], 
Release.pmc))
+        release_packages = 
selectinload(cast(InstrumentedAttribute[List[Package]], Release.packages))
+        statement = (
+            select(Release)
+            .options(release_pmc, release_packages)
+            .join(PMC)
+            .where(Release.stage == ReleaseStage.CANDIDATE)
+        )
+        releases = (await db_session.execute(statement)).scalars().all()
 
         # Filter to only show releases for PMCs where the user is a member
         user_releases = []
@@ -585,16 +604,17 @@ async def user_keys_add(session: ClientSession, 
public_key: str) -> Tuple[str, O
         return ("Key is not long enough; must be at least 2048 bits", None)
 
     # Store key in database
-    with Session(current_app.config["engine"]) as db_session:
+    async_session = current_app.config["async_session"]
+    async with async_session() as db_session:
         return await user_keys_add_session(session, public_key, key, 
db_session)
 
 
 async def user_keys_add_session(
-    session: ClientSession, public_key: str, key: dict, db_session: Session
+    session: ClientSession, public_key: str, key: dict, db_session: 
AsyncSession
 ) -> Tuple[str, Optional[dict]]:
     # Check if key already exists
     statement = select(PublicSigningKey).where(PublicSigningKey.user_id == 
session.uid)
-    existing_key = db_session.exec(statement).first()
+    existing_key = (await db_session.execute(statement)).scalar_one_or_none()
 
     if existing_key:
         # TODO: We should allow more than one key per user
@@ -603,33 +623,28 @@ async def user_keys_add_session(
     if not session.uid:
         return ("You must be signed in to add a key", None)
 
-    # Create new key record
-    key_record = PublicSigningKey(
-        user_id=session.uid,
-        public_key=public_key,
-        key_type=key.get("type", "unknown"),
-        expiration=datetime.datetime.fromtimestamp(int(key["expires"]))
-        if key.get("expires")
-        else datetime.datetime.max,
-    )
-    db_session.add(key_record)
-
-    # Link key to user's PMCs
-    for pmc_name in session.committees:
-        statement = select(PMC).where(PMC.project_name == pmc_name)
-        pmc = db_session.exec(statement).first()
-        if pmc and pmc.id and session.uid:
-            link = PMCKeyLink(pmc_id=pmc.id, key_user_id=session.uid)
-            db_session.add(link)
-        else:
-            # TODO: Log? Add to "error"?
-            continue
-
-    try:
-        db_session.commit()
-    except IntegrityError:
-        db_session.rollback()
-        return ("Failed to save key", None)
+    async with db_session.begin():
+        # Create new key record
+        key_record = PublicSigningKey(
+            user_id=session.uid,
+            public_key=public_key,
+            key_type=key.get("type", "unknown"),
+            expiration=datetime.datetime.fromtimestamp(int(key["expires"]))
+            if key.get("expires")
+            else datetime.datetime.max,
+        )
+        db_session.add(key_record)
+
+        # Link key to user's PMCs
+        for pmc_name in session.committees:
+            statement = select(PMC).where(PMC.project_name == pmc_name)
+            pmc = (await db_session.execute(statement)).scalar_one_or_none()
+            if pmc and pmc.id and session.uid:
+                link = PMCKeyLink(pmc_id=pmc.id, key_user_id=session.uid)
+                db_session.add(link)
+            else:
+                # TODO: Log? Add to "error"?
+                continue
 
     return (
         "",
diff --git a/atr/server.py b/atr/server.py
index c8305e0..f0c3930 100644
--- a/atr/server.py
+++ b/atr/server.py
@@ -20,13 +20,20 @@
 import os
 
 import asfquart
+import asfquart.generics
 from asfquart.base import QuartApp
-from sqlmodel import SQLModel, create_engine
+from sqlmodel import SQLModel
+from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, 
async_sessionmaker
 from alembic import command
 from alembic.config import Config
+from sqlalchemy.sql import text
 
 from .models import __file__ as data_models_file
 
+# Avoid OIDC
+asfquart.generics.OAUTH_URL_INIT = 
"https://oauth.apache.org/auth?state=%s&redirect_uri=%s";
+asfquart.generics.OAUTH_URL_CALLBACK = "https://oauth.apache.org/token?code=%s";
+
 
 def register_routes() -> str:
     from . import routes
@@ -58,8 +65,9 @@ def create_app() -> QuartApp:
         app.config["RELEASE_STORAGE_DIR"] = release_storage
         app.config["DATA_MODELS_FILE"] = data_models_file
 
-        sqlite_url = "sqlite:///./atr.db"
-        engine = create_engine(
+        # Use aiosqlite for async SQLite access
+        sqlite_url = "sqlite+aiosqlite:///./atr.db"
+        engine = create_async_engine(
             sqlite_url,
             connect_args={
                 "check_same_thread": False,
@@ -67,14 +75,18 @@ def create_app() -> QuartApp:
             },
         )
 
+        # Create async session factory
+        async_session = async_sessionmaker(bind=engine, class_=AsyncSession, 
expire_on_commit=False)
+        app.config["async_session"] = async_session
+
         # Set SQLite pragmas for better performance
         # Use 64 MB for the cache_size, and 5000ms for busy_timeout
-        with engine.connect() as conn:
-            conn.exec_driver_sql("PRAGMA journal_mode=WAL")
-            conn.exec_driver_sql("PRAGMA synchronous=NORMAL")
-            conn.exec_driver_sql("PRAGMA cache_size=-64000")
-            conn.exec_driver_sql("PRAGMA foreign_keys=ON")
-            conn.exec_driver_sql("PRAGMA busy_timeout=5000")
+        async with engine.begin() as conn:
+            await conn.execute(text("PRAGMA journal_mode=WAL"))
+            await conn.execute(text("PRAGMA synchronous=NORMAL"))
+            await conn.execute(text("PRAGMA cache_size=-64000"))
+            await conn.execute(text("PRAGMA foreign_keys=ON"))
+            await conn.execute(text("PRAGMA busy_timeout=5000"))
 
         # Run any pending migrations
         # In dev we'd do this first:
@@ -90,7 +102,8 @@ def create_app() -> QuartApp:
         command.upgrade(alembic_cfg, "head")
 
         # Create any tables that might be missing
-        SQLModel.metadata.create_all(engine)
+        async with engine.begin() as conn:
+            await conn.run_sync(SQLModel.metadata.create_all)
 
         app.config["engine"] = engine
 
diff --git a/migrations/env.py b/migrations/env.py
index ecabd18..efdaa9d 100644
--- a/migrations/env.py
+++ b/migrations/env.py
@@ -1,9 +1,7 @@
 from logging.config import fileConfig
 from typing import Any, Dict, cast
 
-from sqlalchemy import engine_from_config
-from sqlalchemy import pool
-
+from sqlalchemy import engine_from_config, pool
 from alembic import context
 
 from sqlmodel import SQLModel
@@ -21,24 +19,9 @@ if config.config_file_name is not None:
 # for 'autogenerate' support
 target_metadata = SQLModel.metadata
 
-# other values from the config, defined by the needs of env.py,
-# can be acquired:
-# my_important_option = config.get_main_option("my_important_option")
-# ... etc.
-
 
 def run_migrations_offline() -> None:
-    """Run migrations in 'offline' mode.
-
-    This configures the context with just a URL
-    and not an Engine, though an Engine is acceptable
-    here as well.  By skipping the Engine creation
-    we don't even need a DBAPI to be available.
-
-    Calls to context.execute() here emit the given string to the
-    script output.
-
-    """
+    """Run migrations in 'offline' mode."""
     url = config.get_main_option("sqlalchemy.url")
     context.configure(
         url=url,
@@ -52,25 +35,20 @@ def run_migrations_offline() -> None:
 
 
 def run_migrations_online() -> None:
-    """Run migrations in 'online' mode.
-
-    In this scenario we need to create an Engine
-    and associate a connection with the context.
+    """Run migrations in 'online' mode."""
+    # Convert async URL to sync URL for migrations
+    url = config.get_main_option("sqlalchemy.url")
+    if url is not None:
+        sync_url = url.replace("sqlite+aiosqlite:", "sqlite:")
+    else:
+        raise RuntimeError("sqlalchemy.url is not set")
 
-    """
-    # Get the config section and ensure it's a dict
+    # Create synchronous engine for migrations
     configuration = config.get_section(config.config_ini_section)
     if configuration is None:
         configuration = {}
-
-    # Cast to the correct type for type checker
     configuration = cast(Dict[str, Any], configuration)
-
-    # # Add SQLite-specific connect args
-    # configuration["sqlalchemy.connect_args"] = {
-    #     "check_same_thread": False,
-    #     "timeout": 30,
-    # }
+    configuration["sqlalchemy.url"] = sync_url
 
     connectable = engine_from_config(
         configuration,
@@ -79,7 +57,10 @@ def run_migrations_online() -> None:
     )
 
     with connectable.connect() as connection:
-        context.configure(connection=connection, 
target_metadata=target_metadata)
+        context.configure(
+            connection=connection,
+            target_metadata=target_metadata,
+        )
 
         with context.begin_transaction():
             context.run_migrations()
diff --git a/poetry.lock b/poetry.lock
index 3e7b653..fa4cc10 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -142,6 +142,25 @@ files = [
 [package.dependencies]
 frozenlist = ">=1.1.0"
 
+[[package]]
+name = "aiosqlite"
+version = "0.21.0"
+description = "asyncio bridge to the standard sqlite3 module"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+    {file = "aiosqlite-0.21.0-py3-none-any.whl", hash = 
"sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0"},
+    {file = "aiosqlite-0.21.0.tar.gz", hash = 
"sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3"},
+]
+
+[package.dependencies]
+typing_extensions = ">=4.0"
+
+[package.extras]
+dev = ["attribution (==1.7.1)", "black (==24.3.0)", "build (>=1.2)", 
"coverage[toml] (==7.6.10)", "flake8 (==7.0.0)", "flake8-bugbear (==24.12.12)", 
"flit (==3.10.1)", "mypy (==1.14.1)", "ufmt (==2.5.1)", "usort (==1.0.8.post1)"]
+docs = ["sphinx (==8.1.3)", "sphinx-mdinclude (==0.6.1)"]
+
 [[package]]
 name = "alembic"
 version = "1.14.1"
@@ -909,7 +928,6 @@ description = "Lightweight in-process concurrent 
programming"
 optional = false
 python-versions = ">=3.7"
 groups = ["main"]
-markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or 
platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or 
platform_machine == \"amd64\" or platform_machine == \"AMD64\" or 
platform_machine == \"win32\" or platform_machine == \"WIN32\")"
 files = [
     {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = 
"sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
     {file = 
"greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", 
hash = 
"sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
@@ -2605,4 +2623,4 @@ propcache = ">=0.2.0"
 [metadata]
 lock-version = "2.1"
 python-versions = "~=3.13"
-content-hash = 
"7b2a7fad005d4107d9ead1e208a0ca85d80c486efa0fa4f8e90ee8578032f330"
+content-hash = 
"a38e31540d49b8412976386313634b90e0ac4680ae695494c4b0b383a88c7219"
diff --git a/pyproject.toml b/pyproject.toml
index b12ea5d..3478d5a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -9,9 +9,11 @@ license = "Apache-2.0"
 readme = "README.md"
 requires-python = "~=3.13"
 dependencies = [
+  "aiosqlite>=0.21.0,<0.22.0",
   "alembic~=1.14",
   "asfquart", # TODO: convert asfquart from a source dependency to pypi or git 
dependency
   "cryptography~=44.0",
+  "greenlet>=3.1.1,<4.0.0",
   "httpx~=0.27",
   "hypercorn~=0.17",
   "python-gnupg~=0.5",
@@ -40,6 +42,7 @@ package-mode = false
 # so we only need to enrich the dependencies with the source location of 
asfquart
 [tool.poetry.dependencies]
 asfquart = { path = "./asfquart", develop = true }
+python = "~=3.13"
 
 [tool.poetry.group.dev.dependencies]
 djlint = "^1.36.4"
diff --git a/scripts/poetry/sync-dev b/scripts/poetry/sync-dev
index 8fca73d..47b768c 100755
--- a/scripts/poetry/sync-dev
+++ b/scripts/poetry/sync-dev
@@ -4,8 +4,8 @@ _python="$(which python3)"
 poetry env use "${1:-$_python}"
 poetry lock
 poetry sync
-if [ -d ".vscode" ]; then
-cat <<EOF > .vscode/settings.json
+if test -d ".vscode"
+then cat <<EOF > .vscode/settings.json
 {
   "makefile.configureOnOpen": false,
   "python.analysis.extraPaths": ["$(poetry env info 
--path)/lib/python3.13/site-packages"],
diff --git a/typestubs/asfquart/generics.pyi b/typestubs/asfquart/generics.pyi
index 2c8572b..351e3cc 100644
--- a/typestubs/asfquart/generics.pyi
+++ b/typestubs/asfquart/generics.pyi
@@ -3,9 +3,9 @@ This type stub file was generated by pyright.
 """
 
 """Generic endpoints for ASFQuart"""
-OAUTH_URL_INIT = ...
-OAUTH_URL_CALLBACK = ...
-DEFAULT_OAUTH_URI = ...
+OAUTH_URL_INIT: str = ...
+OAUTH_URL_CALLBACK: str = ...
+DEFAULT_OAUTH_URI: str = ...
 
 def setup_oauth(app, uri=..., workflow_timeout: int = ...):  # -> None:
     """Sets up a generic ASF OAuth endpoint for the given app. The default URI 
is /auth, and the
diff --git a/uv.lock b/uv.lock
index 2417b89..c970bec 100644
--- a/uv.lock
+++ b/uv.lock
@@ -64,6 +64,18 @@ wheels = [
     { url = 
"https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl";,
 hash = 
"sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size 
= 7597 },
 ]
 
+[[package]]
+name = "aiosqlite"
+version = "0.21.0"
+source = { registry = "https://pypi.org/simple"; }
+dependencies = [
+    { name = "typing-extensions" },
+]
+sdist = { url = 
"https://files.pythonhosted.org/packages/13/7d/8bca2bf9a247c2c5dfeec1d7a5f40db6518f88d314b8bca9da29670d2671/aiosqlite-0.21.0.tar.gz";,
 hash = 
"sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3", size 
= 13454 }
+wheels = [
+    { url = 
"https://files.pythonhosted.org/packages/f5/10/6c25ed6de94c49f88a91fa5018cb4c0f3625f31d5be9f771ebe5cc7cd506/aiosqlite-0.21.0-py3-none-any.whl";,
 hash = 
"sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0", size 
= 15792 },
+]
+
 [[package]]
 name = "alembic"
 version = "1.14.1"
@@ -1087,9 +1099,11 @@ name = "tooling-atr-experiment"
 version = "0.0.1"
 source = { virtual = "." }
 dependencies = [
+    { name = "aiosqlite" },
     { name = "alembic" },
     { name = "asfquart" },
     { name = "cryptography" },
+    { name = "greenlet" },
     { name = "httpx" },
     { name = "hypercorn" },
     { name = "python-gnupg" },
@@ -1107,9 +1121,11 @@ dev = [
 
 [package.metadata]
 requires-dist = [
+    { name = "aiosqlite", specifier = ">=0.21.0,<0.22.0" },
     { name = "alembic", specifier = "~=1.14" },
     { name = "asfquart", editable = "asfquart" },
     { name = "cryptography", specifier = "~=44.0" },
+    { name = "greenlet", specifier = ">=3.1.1,<4.0.0" },
     { name = "httpx", specifier = "~=0.27" },
     { name = "hypercorn", specifier = "~=0.17" },
     { name = "python-gnupg", specifier = "~=0.5" },


---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscr...@tooling.apache.org
For additional commands, e-mail: dev-h...@tooling.apache.org

Reply via email to