Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions .claude/skills/database-migrations.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Database Migration Guidelines

## Overview

This project uses Alembic for database migrations. API v1 still uses raw SQL
initializers rather than ORM models, so Alembic target metadata is reflected
from `policyengine_api/data/initialise_local.sql` by default.

## Rules

- Do not manually author Alembic operations for normal schema changes.
- Generate migrations with `uv run alembic revision --autogenerate`.
- Review generated migrations before applying them.
- Keep SQL initializers and generated migrations aligned.
- For pre-existing production databases, stamp the base revision before applying
new upgrade revisions.

## Commands

```bash
uv run alembic revision --autogenerate -m "Description"
uv run alembic upgrade head
uv run alembic current
uv run alembic history
uv run alembic stamp <revision>
```

## API v1 Notes

- Set `POLICYENGINE_ALEMBIC_DATABASE_URL` to the database SQLAlchemy URL Alembic
should connect to.
- Set `POLICYENGINE_ALEMBIC_SCHEMA_SQL` when generating against a temporary
schema SQL file instead of the current initializer.
- The base migration should be stamped in production because the tables already
exist there.
48 changes: 48 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Alembic configuration for PolicyEngine API v1.

[alembic]
script_location = %(here)s/alembic
file_template = %%(year)d%%(month).2d%%(day).2d_%%(rev)s_%%(slug)s
prepend_sys_path = .
path_separator = os
output_encoding = utf-8

# Overridden by alembic/env.py. For local generation, set
# POLICYENGINE_ALEMBIC_DATABASE_URL explicitly.
sqlalchemy.url = sqlite:///policyengine_api/data/policyengine.db

[post_write_hooks]

[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
8 changes: 8 additions & 0 deletions alembic/README
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
PolicyEngine API v1 Alembic migrations.

This project does not currently use SQLAlchemy ORM models. Alembic
autogenerate reflects target metadata from `policyengine_api/data/initialise_local.sql`
or from the path in `POLICYENGINE_ALEMBIC_SCHEMA_SQL`.

Use `alembic stamp` for pre-existing production databases before applying
incremental migrations.
83 changes: 83 additions & 0 deletions alembic/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
"""Alembic environment for PolicyEngine API v1 raw-SQL schema migrations."""

from logging.config import fileConfig
import importlib.util
import os
from pathlib import Path
import sys

from sqlalchemy import engine_from_config, pool

from alembic import context

sys.path.insert(0, str(Path(__file__).parent.parent))

metadata_path = (
Path(__file__).parent.parent / "policyengine_api" / "data" / "alembic_metadata.py"
)
metadata_spec = importlib.util.spec_from_file_location(
"policyengine_api_alembic_metadata",
metadata_path,
)
if metadata_spec is None or metadata_spec.loader is None:
raise RuntimeError(f"Could not load Alembic metadata helper from {metadata_path}")
metadata_module = importlib.util.module_from_spec(metadata_spec)
metadata_spec.loader.exec_module(metadata_module)
build_metadata_from_sql = metadata_module.build_metadata_from_sql


config = context.config

database_url = os.environ.get("POLICYENGINE_ALEMBIC_DATABASE_URL") or os.environ.get(
"DATABASE_URL"
)
if database_url:
config.set_main_option("sqlalchemy.url", database_url)

if config.config_file_name is not None:
fileConfig(config.config_file_name)

schema_sql_path = os.environ.get("POLICYENGINE_ALEMBIC_SCHEMA_SQL")
target_metadata = build_metadata_from_sql(schema_sql_path)


def _configure_context(connection=None, url: str | None = None) -> None:
options = {
"target_metadata": target_metadata,
"compare_type": False,
"compare_server_default": False,
}
if connection is not None:
context.configure(connection=connection, **options)
else:
context.configure(
url=url,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
**options,
)


def run_migrations_offline() -> None:
_configure_context(url=config.get_main_option("sqlalchemy.url"))
with context.begin_transaction():
context.run_migrations()


def run_migrations_online() -> None:
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
_configure_context(connection=connection)
with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
28 changes: 28 additions & 0 deletions alembic/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}


def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}


def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
"""Add report run canonical schema

Revision ID: 558935decda5
Revises: 60d38593ddc3
Create Date: 2026-05-11 22:21:20.417733

"""

from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = "558935decda5"
down_revision: Union[str, Sequence[str], None] = "60d38593ddc3"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"legacy_report_output_id_map",
sa.Column("legacy_report_output_id", sa.INTEGER(), nullable=False),
sa.Column("canonical_report_output_id", sa.INTEGER(), nullable=False),
sa.Column("display_report_output_run_id", sa.CHAR(length=36), nullable=False),
sa.PrimaryKeyConstraint("legacy_report_output_id"),
)
op.create_index(
"legacy_report_output_id_map_canonical_idx",
"legacy_report_output_id_map",
["canonical_report_output_id"],
unique=False,
)
op.add_column(
"report_outputs",
sa.Column("report_identity_hash", sa.VARCHAR(length=64), nullable=True),
)
op.add_column(
"report_outputs",
sa.Column("report_identity_schema_version", sa.INTEGER(), nullable=True),
)
op.create_index(
"report_outputs_identity_idx",
"report_outputs",
["country_id", "report_identity_hash", "report_identity_schema_version"],
unique=False,
)
op.create_index(
"simulation_runs_report_output_run_idx",
"simulation_runs",
["report_output_run_id"],
unique=False,
)
# ### end Alembic commands ###


def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("simulation_runs_report_output_run_idx", table_name="simulation_runs")
op.drop_index("report_outputs_identity_idx", table_name="report_outputs")
op.drop_column("report_outputs", "report_identity_schema_version")
op.drop_column("report_outputs", "report_identity_hash")
op.drop_index(
"legacy_report_output_id_map_canonical_idx",
table_name="legacy_report_output_id_map",
)
op.drop_table("legacy_report_output_id_map")
# ### end Alembic commands ###
Loading
Loading