This is an automated email from the ASF dual-hosted git repository.
sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git
The following commit(s) were added to refs/heads/main by this push:
new 3c7bc9d Implement custom Alembic revision naming and add startup
migration logic
3c7bc9d is described below
commit 3c7bc9d1106b54bf2977645817e69f615b95a461
Author: Sean B. Palmer <[email protected]>
AuthorDate: Tue May 6 16:09:06 2025 +0100
Implement custom Alembic revision naming and add startup migration logic
- Make Alembic generate NNNN_YYYY.MM.DD_COMMITSHORT.py filenames.
- Add a function to env.py for AutoString type handling.
- Update alembic.ini to support our customisations.
- Add Alembic upgrade and check commands to atr.db for startup.
- Remove old SQLModel.metadata.create_all from startup.
- Generate a new initial migration reflecting the current schema.
- Delete the obsolete initial migration file.
---
Makefile | 7 +-
alembic.ini | 9 +-
atr/db/__init__.py | 50 ++--
migrations/env.py | 159 ++++++++---
migrations/script.py.mako | 3 +-
migrations/versions/0001_2025.05.06_38b0d2de.py | 292 +++++++++++++++++++++
migrations/versions/609dc199438f_initial_schema.py | 27 --
pyproject.toml | 1 +
8 files changed, 463 insertions(+), 85 deletions(-)
diff --git a/Makefile b/Makefile
index 8a33ea5..cac0ac9 100644
--- a/Makefile
+++ b/Makefile
@@ -1,6 +1,6 @@
.PHONY: build build-alpine build-playwright build-ubuntu certs check \
docs generate-version obvfix report run run-dev run-playwright \
- run-playwright-slow run-staging stop serve sync sync-dev
+ run-playwright-slow run-staging stop serve serve-local sync sync-dev
BIND ?= 127.0.0.1:8080
IMAGE ?= tooling-trusted-release
@@ -71,6 +71,11 @@ serve:
SSH_HOST=127.0.0.1 $(SCRIPTS)/run hypercorn --bind $(BIND) \
--keyfile key.pem --certfile cert.pem atr.server:app --debug
--reload
+serve-local:
+ APP_HOST=127.0.0.1:8080 LOCAL_DEBUG=1 \
+ SSH_HOST=127.0.0.1 $(SCRIPTS)/run hypercorn --bind $(BIND) \
+ --keyfile key.pem --certfile cert.pem atr.server:app --debug
--reload
+
stop:
scripts/stop
diff --git a/alembic.ini b/alembic.ini
index 02dadc7..1d4fffc 100644
--- a/alembic.ini
+++ b/alembic.ini
@@ -10,6 +10,7 @@ script_location = migrations
# see
https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template =
%%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
+file_template = %%(rev)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
@@ -21,14 +22,15 @@ prepend_sys_path = .
# Any required deps can installed by adding `alembic[tz]` to the pip
requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
-# timezone =
+timezone = UTC
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
-# revision_environment = false
+# This is necessary to use the custom naming in env.py
+revision_environment = true
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
@@ -64,6 +66,7 @@ version_path_separator = os
# output_encoding = utf-8
# Despite the three slashes, this is a relative path
+# This is hardcoded, but env.py uses the actual configured ATR path
sqlalchemy.url = sqlite:///state/atr.db
@@ -95,7 +98,7 @@ keys = console
keys = generic
[logger_root]
-level = WARNING
+level = INFO
handlers = console
qualname =
diff --git a/atr/db/__init__.py b/atr/db/__init__.py
index 01eea06..8b8e603 100644
--- a/atr/db/__init__.py
+++ b/atr/db/__init__.py
@@ -30,6 +30,8 @@ import sqlalchemy.orm as orm
import sqlalchemy.sql as sql
import sqlmodel
import sqlmodel.sql.expression as expression
+from alembic import command
+from alembic.config import Config
import atr.config as config
import atr.db.models as models
@@ -533,23 +535,37 @@ def init_database(app: base.QuartApp) -> None:
bind=engine, class_=Session, expire_on_commit=False
)
- # Run any pending migrations
- # In dev we'd do this first:
- # poetry run alembic revision --autogenerate -m "description"
- # Then review the generated migration in migrations/versions/ and
commit it
- # project_root = app_config.PROJECT_ROOT
- # alembic_ini_path = os.path.join(project_root, "alembic.ini")
- # alembic_cfg = config.Config(alembic_ini_path)
- # # Override the migrations directory location to use project root
- # # TODO: Is it possible to set this in alembic.ini?
- # alembic_cfg.set_main_option("script_location",
os.path.join(project_root, "migrations"))
- # # Set the database URL in the config
- # alembic_cfg.set_main_option("sqlalchemy.url", str(engine.url))
- # # command.upgrade(alembic_cfg, "head")
-
- # Create any tables that might be missing
- async with engine.begin() as conn:
- await conn.run_sync(sqlmodel.SQLModel.metadata.create_all)
+ # Run any pending migrations on startup
+ _LOGGER.info("Applying database migrations via init_database...")
+ alembic_ini_path = os.path.join(app_config.PROJECT_ROOT, "alembic.ini")
+ alembic_cfg = Config(alembic_ini_path)
+
+ # Construct synchronous URLs
+ absolute_db_path = os.path.join(app_config.STATE_DIR,
app_config.SQLITE_DB_PATH)
+ sync_sqlalchemy_url = f"sqlite:///{absolute_db_path}"
+ _LOGGER.info(f"Setting Alembic URL for command: {sync_sqlalchemy_url}")
+ alembic_cfg.set_main_option("sqlalchemy.url", sync_sqlalchemy_url)
+
+ # Ensure that Alembic finds the migrations directory relative to
project root
+ migrations_dir_path = os.path.join(app_config.PROJECT_ROOT,
"migrations")
+ _LOGGER.info(f"Setting Alembic script_location for command:
{migrations_dir_path}")
+ alembic_cfg.set_main_option("script_location", migrations_dir_path)
+
+ try:
+ _LOGGER.info("Running alembic upgrade head...")
+ command.upgrade(alembic_cfg, "head")
+ _LOGGER.info("Database migrations applied successfully")
+ except Exception:
+ _LOGGER.exception("Failed to apply database migrations during
startup")
+ raise
+
+ try:
+ _LOGGER.info("Running alembic check...")
+ command.check(alembic_cfg)
+ _LOGGER.info("Alembic check passed: DB schema matches models")
+ except Exception:
+ _LOGGER.exception("Failed to check database migrations during
startup")
+ raise
async def init_database_for_worker() -> None:
diff --git a/migrations/env.py b/migrations/env.py
index 8b8f586..ed29ebe 100644
--- a/migrations/env.py
+++ b/migrations/env.py
@@ -1,71 +1,158 @@
-from logging.config import fileConfig
+import datetime
+import logging.config
+import os
+import re
+import subprocess
+import sys
+from collections.abc import Iterable
+from typing import Literal
-from alembic import context
-from sqlalchemy import engine_from_config, pool
-from sqlmodel import SQLModel
+import alembic
+import alembic.autogenerate.api as api
+import alembic.operations.ops as ops
+import alembic.runtime.migration as migration
+import sqlalchemy
+import sqlmodel
+import sqlmodel.sql.sqltypes as sqltypes
+
+# Add the project root to the Python path
+# This script must be at migrations/env.py for this to work
+project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
+if project_root not in sys.path:
+ sys.path.insert(0, project_root)
+
+
+# Use database metadata from ATR directly
+import atr.config
+
+# Populate SQLModel.metadata as a side effect of importing the models
+import atr.db.models
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
-config = context.config
+alembic_config = alembic.context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
-if config.config_file_name is not None:
- fileConfig(config.config_file_name)
+if alembic_config.config_file_name is not None:
+ logging.config.fileConfig(alembic_config.config_file_name)
+
+# The SQLModel.metadata object as populated by the ATR models
+target_metadata = sqlmodel.SQLModel.metadata
+
+# Get the database path from application configuration
+app_config = atr.config.get()
+absolute_db_path = os.path.join(app_config.STATE_DIR,
app_config.SQLITE_DB_PATH)
+
+# Construct the synchronous SQLite URL using the absolute path
+# Three slashes come before any absolute or relative path
+sync_sqlalchemy_url = f"sqlite:///{absolute_db_path}"
+
+
+def get_short_commit_hash(project_root_path: str) -> str:
+ """Get the short, eight character, git commit hash."""
+ process = subprocess.run(
+ ["git", "rev-parse", "--short=8", "HEAD"],
+ capture_output=True,
+ text=True,
+ cwd=project_root_path,
+ check=True,
+ )
+ return process.stdout.strip()
+
-# add your model's MetaData object here
-# for 'autogenerate' support
-target_metadata = SQLModel.metadata
+def process_revision_directives_custom_naming(
+ context: migration.MigrationContext,
+ revision: str | Iterable[str | None] | Iterable[str],
+ directives: list[ops.MigrationScript],
+) -> None:
+ """Generate revision IDs and filenames like
NNNN_YYYY.MM.DD_COMMITSHORT.py."""
+ global project_root
+
+ if context.script is None:
+ raise RuntimeError("MigrationContext.script is None, cannot determine
script directory")
+
+ versions_path = os.path.join(context.script.dir, "versions")
+ if not os.path.exists(versions_path):
+ os.makedirs(versions_path)
+
+ highest_num = 0
+ pattern = re.compile(r"^(\d{4})_.*\.py$")
+ try:
+ for fname in os.listdir(versions_path):
+ match = pattern.match(fname)
+ if match:
+ highest_num = max(highest_num, int(match.group(1)))
+ except Exception as e:
+ print(f"Warning: Error scanning versions directory '{versions_path}':
{e!r}")
+
+ next_num_str = f"{highest_num + 1:04d}"
+ date_str = datetime.date.today().strftime("%Y.%m.%d")
+ commit_short = get_short_commit_hash(project_root)
+ new_rev_id = f"{next_num_str}_{date_str}_{commit_short}"
+ calculated_path = os.path.join(versions_path, f"{new_rev_id}.py")
+
+ for directive in directives:
+ setattr(directive, "rev_id", new_rev_id)
+ setattr(directive, "path", calculated_path)
+
+
+def render_item_override(type_: str, item: object, autogen_context:
api.AutogenContext) -> str | Literal[False]:
+ """Apply custom rendering for SQLModel AutoString.
+
+ Prevents autogenerate from rendering <AutoString>.
+ Returns False to indicate no handler for other types.
+ """
+ # Add import for sqlalchemy as sa if not present
+ autogen_context.imports.add("import sqlalchemy as sa")
+
+ if (type_ == "type") and isinstance(item, sqltypes.AutoString):
+ # Render sqlmodel.sql.sqltypes.AutoString as sa.String()
+ return "sa.String()"
+
+ # Default rendering for other types
+ return False
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
- url = config.get_main_option("sqlalchemy.url")
- context.configure(
- url=url,
+ alembic.context.configure(
+ url=sync_sqlalchemy_url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
+ render_item=render_item_override,
+ process_revision_directives=process_revision_directives_custom_naming,
)
- with context.begin_transaction():
- context.run_migrations()
+ with alembic.context.begin_transaction():
+ alembic.context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
- # Convert async URL to sync URL for migrations
- url = config.get_main_option("sqlalchemy.url")
- if url is not None:
- sync_url = url.replace("sqlite+aiosqlite:", "sqlite:")
- else:
- raise RuntimeError("sqlalchemy.url is not set")
-
- # Create synchronous engine for migrations
- section = config.get_section(config.config_ini_section)
- if section is None:
- configuration = {}
- else:
- configuration = dict(section)
- configuration["sqlalchemy.url"] = sync_url
-
- connectable = engine_from_config(
+ configuration =
alembic_config.get_section(alembic_config.config_ini_section) or {}
+ configuration["sqlalchemy.url"] = sync_sqlalchemy_url
+
+ connectable = sqlalchemy.engine_from_config(
configuration,
prefix="sqlalchemy.",
- poolclass=pool.NullPool,
+ poolclass=sqlalchemy.pool.NullPool,
)
with connectable.connect() as connection:
- context.configure(
+ alembic.context.configure(
connection=connection,
target_metadata=target_metadata,
+ render_item=render_item_override,
+
process_revision_directives=process_revision_directives_custom_naming,
)
- with context.begin_transaction():
- context.run_migrations()
+ with alembic.context.begin_transaction():
+ alembic.context.run_migrations()
-if context.is_offline_mode():
+if alembic.context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
diff --git a/migrations/script.py.mako b/migrations/script.py.mako
index fbc4b07..6af926a 100644
--- a/migrations/script.py.mako
+++ b/migrations/script.py.mako
@@ -8,7 +8,8 @@ Create Date: ${create_date}
from typing import Sequence, Union
from alembic import op
-import sqlalchemy as sa
+import sqlmodel
+import atr.db.models
${imports if imports else ""}
# revision identifiers, used by Alembic.
diff --git a/migrations/versions/0001_2025.05.06_38b0d2de.py
b/migrations/versions/0001_2025.05.06_38b0d2de.py
new file mode 100644
index 0000000..6599f71
--- /dev/null
+++ b/migrations/versions/0001_2025.05.06_38b0d2de.py
@@ -0,0 +1,292 @@
+"""Use the existing ATR schema
+
+Revision ID: 0001_2025.05.06_38b0d2de
+Revises:
+Create Date: 2025-05-06 14:44:00.401362+00:00
+
+"""
+
+from collections.abc import Sequence
+
+import sqlalchemy as sa
+from alembic import op
+
+import atr.db.models
+
+# revision identifiers, used by Alembic.
+revision: str = "0001_2025.05.06_38b0d2de"
+down_revision: str | None = None
+branch_labels: str | Sequence[str] | None = None
+depends_on: str | Sequence[str] | None = None
+
+
+def upgrade() -> None:
+ op.create_table(
+ "committee",
+ sa.Column("name", sa.String(), nullable=False),
+ sa.Column("full_name", sa.String(), nullable=True),
+ sa.Column("is_podling", sa.Boolean(), nullable=False),
+ sa.Column("parent_committee_name", sa.String(), nullable=True),
+ sa.Column("committee_members", sa.JSON(), nullable=True),
+ sa.Column("committers", sa.JSON(), nullable=True),
+ sa.Column("release_managers", sa.JSON(), nullable=True),
+ sa.ForeignKeyConstraint(
+ ["parent_committee_name"],
+ ["committee.name"],
+ ),
+ sa.PrimaryKeyConstraint("name"),
+ sa.UniqueConstraint("name"),
+ )
+ op.create_table(
+ "publicsigningkey",
+ sa.Column("fingerprint", sa.String(), nullable=False),
+ sa.Column("algorithm", sa.Integer(), nullable=False),
+ sa.Column("length", sa.Integer(), nullable=False),
+ sa.Column("created", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.Column("expires", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.Column("declared_uid", sa.String(), nullable=True),
+ sa.Column("apache_uid", sa.String(), nullable=False),
+ sa.Column("ascii_armored_key", sa.String(), nullable=False),
+ sa.PrimaryKeyConstraint("fingerprint"),
+ sa.UniqueConstraint("fingerprint"),
+ )
+ op.create_table(
+ "releasepolicy",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("mailto_addresses", sa.JSON(), nullable=True),
+ sa.Column("manual_vote", sa.Boolean(), nullable=False),
+ sa.Column("min_hours", sa.Integer(), nullable=False),
+ sa.Column("release_checklist", sa.String(), nullable=False),
+ sa.Column("pause_for_rm", sa.Boolean(), nullable=False),
+ sa.Column("start_vote_template", sa.String(), nullable=False),
+ sa.Column("announce_release_template", sa.String(), nullable=False),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_table(
+ "sshkey",
+ sa.Column("fingerprint", sa.String(), nullable=False),
+ sa.Column("key", sa.String(), nullable=False),
+ sa.Column("asf_uid", sa.String(), nullable=False),
+ sa.PrimaryKeyConstraint("fingerprint"),
+ )
+ op.create_table(
+ "textvalue",
+ sa.Column("ns", sa.String(), nullable=False),
+ sa.Column("key", sa.String(), nullable=False),
+ sa.Column("value", sa.String(), nullable=False),
+ sa.PrimaryKeyConstraint("ns", "key"),
+ )
+ op.create_index(op.f("ix_textvalue_key"), "textvalue", ["key"],
unique=False)
+ op.create_index(op.f("ix_textvalue_ns"), "textvalue", ["ns"], unique=False)
+ op.create_table(
+ "keylink",
+ sa.Column("committee_name", sa.String(), nullable=False),
+ sa.Column("key_fingerprint", sa.String(), nullable=False),
+ sa.ForeignKeyConstraint(
+ ["committee_name"],
+ ["committee.name"],
+ ),
+ sa.ForeignKeyConstraint(
+ ["key_fingerprint"],
+ ["publicsigningkey.fingerprint"],
+ ),
+ sa.PrimaryKeyConstraint("committee_name", "key_fingerprint"),
+ )
+ op.create_table(
+ "project",
+ sa.Column("name", sa.String(), nullable=False),
+ sa.Column("full_name", sa.String(), nullable=True),
+ sa.Column("is_podling", sa.Boolean(), nullable=False),
+ sa.Column("is_retired", sa.Boolean(), nullable=False),
+ sa.Column("super_project_name", sa.String(), nullable=True),
+ sa.Column("description", sa.String(), nullable=True),
+ sa.Column("category", sa.String(), nullable=True),
+ sa.Column("programming_languages", sa.String(), nullable=True),
+ sa.Column("committee_name", sa.String(), nullable=True),
+ sa.Column("release_policy_id", sa.Integer(), nullable=True),
+ sa.Column("created", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.Column("created_by", sa.String(), nullable=True),
+ sa.ForeignKeyConstraint(
+ ["committee_name"],
+ ["committee.name"],
+ ),
+ sa.ForeignKeyConstraint(["release_policy_id"], ["releasepolicy.id"],
ondelete="CASCADE"),
+ sa.ForeignKeyConstraint(
+ ["super_project_name"],
+ ["project.name"],
+ ),
+ sa.PrimaryKeyConstraint("name"),
+ sa.UniqueConstraint("name"),
+ )
+ op.create_table(
+ "distributionchannel",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("name", sa.String(), nullable=False),
+ sa.Column("url", sa.String(), nullable=False),
+ sa.Column("credentials", sa.String(), nullable=False),
+ sa.Column("is_test", sa.Boolean(), nullable=False),
+ sa.Column("automation_endpoint", sa.String(), nullable=False),
+ sa.Column("project_name", sa.String(), nullable=False),
+ sa.ForeignKeyConstraint(
+ ["project_name"],
+ ["project.name"],
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_index(op.f("ix_distributionchannel_name"),
"distributionchannel", ["name"], unique=True)
+ op.create_table(
+ "release",
+ sa.Column("name", sa.String(), nullable=False),
+ sa.Column(
+ "stage", sa.Enum("RELEASE_CANDIDATE", "RELEASE", "MIGRATION",
"FAILED", name="releasestage"), nullable=False
+ ),
+ sa.Column(
+ "phase",
+ sa.Enum("RELEASE_CANDIDATE_DRAFT", "RELEASE_CANDIDATE",
"RELEASE_PREVIEW", "RELEASE", name="releasephase"),
+ nullable=False,
+ ),
+ sa.Column("created", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.Column("released", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.Column("project_name", sa.String(), nullable=False),
+ sa.Column("package_managers", sa.JSON(), nullable=True),
+ sa.Column("version", sa.String(), nullable=False),
+ sa.Column("revision", sa.String(), nullable=True),
+ sa.Column("sboms", sa.JSON(), nullable=True),
+ sa.Column("release_policy_id", sa.Integer(), nullable=True),
+ sa.Column("votes", sa.JSON(), nullable=True),
+ sa.Column("vote_started", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.Column("vote_resolved", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.ForeignKeyConstraint(
+ ["project_name"],
+ ["project.name"],
+ ),
+ sa.ForeignKeyConstraint(
+ ["release_policy_id"],
+ ["releasepolicy.id"],
+ ),
+ sa.PrimaryKeyConstraint("name"),
+ sa.UniqueConstraint("name"),
+ sa.UniqueConstraint("project_name", "version",
name="unique_project_version"),
+ )
+ op.create_index(op.f("ix_release_revision"), "release", ["revision"],
unique=False)
+ op.create_table(
+ "checkresult",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("release_name", sa.String(), nullable=False),
+ sa.Column("checker", sa.String(), nullable=False),
+ sa.Column("primary_rel_path", sa.String(), nullable=True),
+ sa.Column("created", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.Column(
+ "status", sa.Enum("EXCEPTION", "FAILURE", "SUCCESS", "WARNING",
name="checkresultstatus"), nullable=False
+ ),
+ sa.Column("message", sa.String(), nullable=False),
+ sa.Column("data", sa.JSON(), nullable=True),
+ sa.ForeignKeyConstraint(
+ ["release_name"],
+ ["release.name"],
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_index(op.f("ix_checkresult_primary_rel_path"), "checkresult",
["primary_rel_path"], unique=False)
+ op.create_table(
+ "task",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("status", sa.Enum("QUEUED", "ACTIVE", "COMPLETED", "FAILED",
name="taskstatus"), nullable=False),
+ sa.Column(
+ "task_type",
+ sa.Enum(
+ "HASHING_CHECK",
+ "LICENSE_FILES",
+ "LICENSE_HEADERS",
+ "MESSAGE_SEND",
+ "PATHS_CHECK",
+ "RAT_CHECK",
+ "SBOM_GENERATE_CYCLONEDX",
+ "SIGNATURE_CHECK",
+ "SVN_IMPORT_FILES",
+ "TARGZ_INTEGRITY",
+ "TARGZ_STRUCTURE",
+ "VOTE_INITIATE",
+ "ZIPFORMAT_INTEGRITY",
+ "ZIPFORMAT_LICENSE_FILES",
+ "ZIPFORMAT_LICENSE_HEADERS",
+ "ZIPFORMAT_STRUCTURE",
+ name="tasktype",
+ ),
+ nullable=False,
+ ),
+ sa.Column("task_args", sa.JSON(), nullable=True),
+ sa.Column("added", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.Column("started", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.Column("pid", sa.Integer(), nullable=True),
+ sa.Column("completed", atr.db.models.UTCDateTime(timezone=True),
nullable=True),
+ sa.Column("result", sa.JSON(), nullable=True),
+ sa.Column("error", sa.String(), nullable=True),
+ sa.Column("release_name", sa.String(), nullable=True),
+ sa.Column("draft_revision", sa.String(), nullable=True),
+ sa.Column("primary_rel_path", sa.String(), nullable=True),
+ sa.CheckConstraint(
+ """
+ (
+ -- Initial state is always valid
+ status = 'QUEUED'
+ -- QUEUED -> ACTIVE requires setting started time and pid
+ OR (status = 'ACTIVE' AND started IS NOT NULL AND pid IS NOT
NULL)
+ -- ACTIVE -> COMPLETED requires setting completed time and
result
+ OR (status = 'COMPLETED' AND completed IS NOT NULL AND result
IS NOT NULL)
+ -- ACTIVE -> FAILED requires setting completed time and error
(result optional)
+ OR (status = 'FAILED' AND completed IS NOT NULL AND error IS
NOT NULL)
+ )
+ """,
+ name="valid_task_status_transitions",
+ ),
+ sa.ForeignKeyConstraint(
+ ["release_name"],
+ ["release.name"],
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_index(op.f("ix_task_added"), "task", ["added"], unique=False)
+ op.create_index(op.f("ix_task_draft_revision"), "task",
["draft_revision"], unique=False)
+ op.create_index(op.f("ix_task_primary_rel_path"), "task",
["primary_rel_path"], unique=False)
+ op.create_index(op.f("ix_task_status"), "task", ["status"], unique=False)
+ op.create_index("ix_task_status_added", "task", ["status", "added"],
unique=False)
+ op.create_table(
+ "checkresulthistorylink",
+ sa.Column("check_result_id", sa.Integer(), nullable=False),
+ sa.Column("draft_revision", sa.String(), nullable=False),
+ sa.ForeignKeyConstraint(
+ ["check_result_id"],
+ ["checkresult.id"],
+ ),
+ sa.PrimaryKeyConstraint("check_result_id", "draft_revision"),
+ )
+ op.create_index(
+ op.f("ix_checkresulthistorylink_draft_revision"),
"checkresulthistorylink", ["draft_revision"], unique=False
+ )
+
+
+def downgrade() -> None:
+ op.drop_index(op.f("ix_checkresulthistorylink_draft_revision"),
table_name="checkresulthistorylink")
+ op.drop_table("checkresulthistorylink")
+ op.drop_index("ix_task_status_added", table_name="task")
+ op.drop_index(op.f("ix_task_status"), table_name="task")
+ op.drop_index(op.f("ix_task_primary_rel_path"), table_name="task")
+ op.drop_index(op.f("ix_task_draft_revision"), table_name="task")
+ op.drop_index(op.f("ix_task_added"), table_name="task")
+ op.drop_table("task")
+ op.drop_index(op.f("ix_checkresult_primary_rel_path"),
table_name="checkresult")
+ op.drop_table("checkresult")
+ op.drop_index(op.f("ix_release_revision"), table_name="release")
+ op.drop_table("release")
+ op.drop_index(op.f("ix_distributionchannel_name"),
table_name="distributionchannel")
+ op.drop_table("distributionchannel")
+ op.drop_table("project")
+ op.drop_table("keylink")
+ op.drop_index(op.f("ix_textvalue_ns"), table_name="textvalue")
+ op.drop_index(op.f("ix_textvalue_key"), table_name="textvalue")
+ op.drop_table("textvalue")
+ op.drop_table("sshkey")
+ op.drop_table("releasepolicy")
+ op.drop_table("publicsigningkey")
+ op.drop_table("committee")
diff --git a/migrations/versions/609dc199438f_initial_schema.py
b/migrations/versions/609dc199438f_initial_schema.py
deleted file mode 100644
index 9ac320c..0000000
--- a/migrations/versions/609dc199438f_initial_schema.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""initial_schema
-
-Revision ID: 609dc199438f
-Revises:
-Create Date: 2025-02-21 20:20:02.574246
-
-"""
-
-from collections.abc import Sequence
-
-# revision identifiers, used by Alembic.
-revision: str = "609dc199438f"
-down_revision: str | None = None
-branch_labels: str | Sequence[str] | None = None
-depends_on: str | Sequence[str] | None = None
-
-
-def upgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- pass
- # ### end Alembic commands ###
-
-
-def downgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- pass
- # ### end Alembic commands ###
diff --git a/pyproject.toml b/pyproject.toml
index 30bf8a6..98a4d1c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -119,6 +119,7 @@ select = [
"atr/apache.py" = ["N815", "TC003"]
"atr/db/__init__.py" = ["C901"]
"atr/routes/modules.py" = ["F401"]
+"migrations/env.py" = ["E402"]
"scripts/release_path_parse.py" = ["C901", "RUF001"]
[tool.mypy]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]