Skip to main content
Advertisement

Alembic Migrations

Alembic is a database migration tool dedicated to SQLAlchemy. It manages schema change history similarly to Django migrations.


Installation and Initialization

pip install alembic

# Initialize project
alembic init alembic

# Initialize for async environments
alembic init --template async alembic
myproject/
├── alembic/
│ ├── versions/ # migration files
│ ├── env.py # environment configuration
│ └── script.py.mako # migration file template
├── alembic.ini # main configuration
└── database.py

Configuration

# alembic.ini
[alembic]
script_location = alembic
sqlalchemy.url = postgresql://user:password@localhost/dbname

# Using environment variables (recommended)
# sqlalchemy.url = %(DB_URL)s
# alembic/env.py
from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool
from alembic import context

# Import model Base (for auto-detection)
from database import Base
from models import * # noqa: F401 — must import all models

config = context.config
fileConfig(config.config_file_name)

target_metadata = Base.metadata


def run_migrations_offline():
"""Offline mode: generate SQL script"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Online mode: direct DB connection"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True, # detect column type changes
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

Migration Commands

# Auto-generate migration based on current models
alembic revision --autogenerate -m "add product table"

# Create empty migration file (for data migrations)
alembic revision -m "fill discount rate"

# Upgrade to latest version
alembic upgrade head

# Upgrade to specific version
alembic upgrade +2 # 2 steps forward from current
alembic upgrade abc1234 # specific revision ID

# Downgrade
alembic downgrade -1 # 1 step back
alembic downgrade base # back to initial state

# Check current state
alembic current

# View history
alembic history --verbose

Migration File Structure

# alembic/versions/20240115_abc123_add_product_table.py
"""add product table

Revision ID: abc123
Revises: xyz789
Create Date: 2024-01-15 10:00:00
"""
from alembic import op
import sqlalchemy as sa

revision = "abc123"
down_revision = "xyz789"
branch_labels = None
depends_on = None


def upgrade() -> None:
op.create_table(
"products",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(200), nullable=False),
sa.Column("price", sa.Float(), nullable=False),
sa.Column("stock", sa.Integer(), server_default="0"),
sa.Column("is_active", sa.Boolean(), server_default="true"),
sa.Column("category_id", sa.Integer(), sa.ForeignKey("categories.id")),
sa.Column("created_at", sa.DateTime(), server_default=sa.func.now()),
)
op.create_index("idx_products_category", "products", ["category_id", "is_active"])


def downgrade() -> None:
op.drop_index("idx_products_category")
op.drop_table("products")

Data Migration Pattern

# alembic/versions/0003_fill_discount_rate.py
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column


def upgrade() -> None:
# Step 1: Add column (allow null)
op.add_column(
"products",
sa.Column("discount_rate", sa.Float(), nullable=True),
)

# Step 2: Fill in data (using SQLAlchemy core)
products = table("products",
column("id", sa.Integer),
column("discount_rate", sa.Float),
)
op.execute(
products.update()
.where(products.c.discount_rate == None)
.values(discount_rate=0.0)
)

# Step 3: Apply not null constraint
op.alter_column("products", "discount_rate", nullable=False)


def downgrade() -> None:
op.drop_column("products", "discount_rate")

Async Environment Setup

# alembic/env.py (async version)
import asyncio
from sqlalchemy.ext.asyncio import create_async_engine

DATABASE_URL = "postgresql+asyncpg://user:password@localhost/dbname"


def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()


async def run_async_migrations():
engine = create_async_engine(DATABASE_URL)
async with engine.connect() as connection:
await connection.run_sync(do_run_migrations)
await engine.dispose()


def run_migrations_online():
asyncio.run(run_async_migrations())

Practical Tips

# Always review after auto-generating migrations!
alembic revision --autogenerate -m "add index"
# → Open the file in versions/ folder and review the content

# Test on staging environment first
DATABASE_URL=postgresql://user:pass@staging/db alembic upgrade head

# SQL preview (without actually applying)
alembic upgrade head --sql > migration.sql
cat migration.sql # review

Summary

CommandDescription
alembic initInitialize project
revision --autogenerateAuto-detect model changes
upgrade headApply latest migration
downgrade -1Roll back to previous version
currentCheck current applied state
historyView full history
RunPythonop.executeData migration

Alembic's --autogenerate automatically detects differences between models and DB, but the generated files must always be reviewed before deployment.

Advertisement