CLAUDE: Add Alembic migrations and database session injection

Database Infrastructure:
- Added Alembic migration system (alembic.ini, env.py)
- Migration 001: Initial schema
- Migration 004: Stat materialized views (enhanced)
- Migration 005: Composite indexes for performance
- operations.py: Session injection support for test isolation
- session.py: Enhanced session management

Application Updates:
- main.py: Integration with new database infrastructure
- health.py: Enhanced health checks with pool monitoring

Integration Tests:
- conftest.py: Session injection pattern for reliable tests
- test_operations.py: Database operations tests
- test_migrations.py: Migration verification tests

Session injection pattern enables:
- Production: Auto-commit per operation
- Testing: Shared session with automatic rollback
- Transactions: Multiple ops, single commit

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Cal Corum 2025-11-28 12:09:09 -06:00
parent 4253b71db9
commit 9d0d29ef18
13 changed files with 2020 additions and 726 deletions

65
backend/alembic.ini Normal file
View File

@ -0,0 +1,65 @@
# Alembic configuration file for Paper Dynasty Game Engine
[alembic]
# Path to migration scripts
script_location = alembic
# Template used to generate migration files
file_template = %%(rev)s_%%(slug)s
# Output encoding
output_encoding = utf-8
# Truncate database identifiers to this length
truncate_slug_length = 40
# Revision IDs to use for new migrations (default is 12 char hex)
revision_environment = false
# sourceless mode - ignore .pyc files
sourceless = false
# version path separator (OS agnostic)
version_path_separator = os
[post_write_hooks]
# Optional Ruff formatting after generating migrations
# hooks = ruff
# ruff.type = console_scripts
# ruff.entrypoint = ruff
# ruff.options = format REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

101
backend/alembic/env.py Normal file
View File

@ -0,0 +1,101 @@
"""
Alembic migration environment configuration.
Uses synchronous psycopg2 driver for migrations while the application
uses asyncpg at runtime. This is the standard pattern for Alembic with
async SQLAlchemy applications.
"""
from logging.config import fileConfig
from sqlalchemy import create_engine, pool
from sqlalchemy.engine import Connection
from alembic import context
# Import Base and all models to ensure metadata is complete
from app.database.session import Base
from app.models.db_models import ( # noqa: F401
Game,
GameCardsetLink,
GameSession,
Lineup,
Play,
Roll,
RosterLink,
)
# Load settings for database URL
from app.config import get_settings
# Alembic Config object - provides access to the .ini file values
config = context.config
# Get database URL from settings and convert for sync driver (psycopg2)
settings = get_settings()
# Replace asyncpg with psycopg2 for synchronous migrations
sync_database_url = settings.database_url.replace("+asyncpg", "+psycopg2")
config.set_main_option("sqlalchemy.url", sync_database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Target metadata for 'autogenerate' support
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""
Run migrations in 'offline' mode.
This configures the context with just a URL and not an Engine,
generating SQL scripts without connecting to the database.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True, # Detect column type changes
compare_server_default=True, # Detect server_default changes
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations using the provided connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""
Run migrations in 'online' mode.
Creates a synchronous engine and associates a connection with the context.
"""
connectable = create_engine(
config.get_main_option("sqlalchemy.url"),
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
do_run_migrations(connection)
connectable.dispose()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,288 @@
"""Initial database schema
Revision ID: 001
Revises:
Create Date: 2025-01-27
Creates the core tables for the Paper Dynasty Real-Time Game Engine:
- games: Game container with status, scores, AI configuration
- plays: At-bat records with 30+ statistical fields
- lineups: Player assignments and substitution tracking
- game_sessions: WebSocket state tracking
- rolls: Dice roll audit trail
- roster_links: Eligible cards (PD) or players (SBA) per game
- game_cardset_links: PD league cardset configuration
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '001'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# === GAMES TABLE ===
op.create_table(
'games',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('league_id', sa.String(50), nullable=False),
sa.Column('home_team_id', sa.Integer(), nullable=False),
sa.Column('away_team_id', sa.Integer(), nullable=False),
sa.Column('status', sa.String(20), nullable=False, server_default='pending'),
sa.Column('game_mode', sa.String(20), nullable=False),
sa.Column('visibility', sa.String(20), nullable=False),
sa.Column('current_inning', sa.Integer(), nullable=True),
sa.Column('current_half', sa.String(10), nullable=True),
sa.Column('home_score', sa.Integer(), nullable=True, server_default='0'),
sa.Column('away_score', sa.Integer(), nullable=True, server_default='0'),
sa.Column('home_team_is_ai', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('away_team_is_ai', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('ai_difficulty', sa.String(20), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('started_at', sa.DateTime(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('winner_team_id', sa.Integer(), nullable=True),
sa.Column('game_metadata', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_games_created_at', 'games', ['created_at'])
op.create_index('ix_games_league_id', 'games', ['league_id'])
op.create_index('ix_games_status', 'games', ['status'])
# === LINEUPS TABLE ===
op.create_table(
'lineups',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('game_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('team_id', sa.Integer(), nullable=False),
sa.Column('card_id', sa.Integer(), nullable=True),
sa.Column('player_id', sa.Integer(), nullable=True),
sa.Column('position', sa.String(10), nullable=False),
sa.Column('batting_order', sa.Integer(), nullable=True),
sa.Column('is_starter', sa.Boolean(), nullable=True, server_default='true'),
sa.Column('is_active', sa.Boolean(), nullable=True, server_default='true'),
sa.Column('entered_inning', sa.Integer(), nullable=True, server_default='1'),
sa.Column('replacing_id', sa.Integer(), nullable=True),
sa.Column('after_play', sa.Integer(), nullable=True),
sa.Column('is_fatigued', sa.Boolean(), nullable=True),
sa.Column('lineup_metadata', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['game_id'], ['games.id'], ondelete='CASCADE'),
sa.CheckConstraint(
'(card_id IS NOT NULL)::int + (player_id IS NOT NULL)::int = 1',
name='lineup_one_id_required'
),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_lineups_game_id', 'lineups', ['game_id'])
op.create_index('ix_lineups_is_active', 'lineups', ['is_active'])
op.create_index('ix_lineups_team_id', 'lineups', ['team_id'])
# === PLAYS TABLE ===
op.create_table(
'plays',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('game_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('play_number', sa.Integer(), nullable=False),
sa.Column('inning', sa.Integer(), nullable=False, server_default='1'),
sa.Column('half', sa.String(10), nullable=False),
sa.Column('outs_before', sa.Integer(), nullable=False, server_default='0'),
sa.Column('batting_order', sa.Integer(), nullable=False, server_default='1'),
sa.Column('away_score', sa.Integer(), nullable=True, server_default='0'),
sa.Column('home_score', sa.Integer(), nullable=True, server_default='0'),
# Players involved
sa.Column('batter_id', sa.Integer(), nullable=False),
sa.Column('pitcher_id', sa.Integer(), nullable=False),
sa.Column('catcher_id', sa.Integer(), nullable=False),
sa.Column('defender_id', sa.Integer(), nullable=True),
sa.Column('runner_id', sa.Integer(), nullable=True),
# Base runners
sa.Column('on_first_id', sa.Integer(), nullable=True),
sa.Column('on_second_id', sa.Integer(), nullable=True),
sa.Column('on_third_id', sa.Integer(), nullable=True),
# Runner final positions
sa.Column('on_first_final', sa.Integer(), nullable=True),
sa.Column('on_second_final', sa.Integer(), nullable=True),
sa.Column('on_third_final', sa.Integer(), nullable=True),
sa.Column('batter_final', sa.Integer(), nullable=True),
# Base state code
sa.Column('on_base_code', sa.Integer(), nullable=True, server_default='0'),
# Strategic decisions
sa.Column('defensive_choices', sa.JSON(), nullable=True),
sa.Column('offensive_choices', sa.JSON(), nullable=True),
# Play result
sa.Column('dice_roll', sa.String(50), nullable=True),
sa.Column('hit_type', sa.String(50), nullable=True),
sa.Column('result_description', sa.Text(), nullable=True),
sa.Column('outs_recorded', sa.Integer(), nullable=False, server_default='0'),
sa.Column('runs_scored', sa.Integer(), nullable=True, server_default='0'),
# Defensive details
sa.Column('check_pos', sa.String(10), nullable=True),
sa.Column('error', sa.Integer(), nullable=True, server_default='0'),
# Batting statistics
sa.Column('pa', sa.Integer(), nullable=True, server_default='0'),
sa.Column('ab', sa.Integer(), nullable=True, server_default='0'),
sa.Column('hit', sa.Integer(), nullable=True, server_default='0'),
sa.Column('double', sa.Integer(), nullable=True, server_default='0'),
sa.Column('triple', sa.Integer(), nullable=True, server_default='0'),
sa.Column('homerun', sa.Integer(), nullable=True, server_default='0'),
sa.Column('bb', sa.Integer(), nullable=True, server_default='0'),
sa.Column('so', sa.Integer(), nullable=True, server_default='0'),
sa.Column('hbp', sa.Integer(), nullable=True, server_default='0'),
sa.Column('rbi', sa.Integer(), nullable=True, server_default='0'),
sa.Column('sac', sa.Integer(), nullable=True, server_default='0'),
sa.Column('ibb', sa.Integer(), nullable=True, server_default='0'),
sa.Column('gidp', sa.Integer(), nullable=True, server_default='0'),
# Baserunning statistics
sa.Column('sb', sa.Integer(), nullable=True, server_default='0'),
sa.Column('cs', sa.Integer(), nullable=True, server_default='0'),
# Pitching events
sa.Column('wild_pitch', sa.Integer(), nullable=True, server_default='0'),
sa.Column('passed_ball', sa.Integer(), nullable=True, server_default='0'),
sa.Column('pick_off', sa.Integer(), nullable=True, server_default='0'),
sa.Column('balk', sa.Integer(), nullable=True, server_default='0'),
# Ballpark power events
sa.Column('bphr', sa.Integer(), nullable=True, server_default='0'),
sa.Column('bpfo', sa.Integer(), nullable=True, server_default='0'),
sa.Column('bp1b', sa.Integer(), nullable=True, server_default='0'),
sa.Column('bplo', sa.Integer(), nullable=True, server_default='0'),
# Advanced analytics
sa.Column('wpa', sa.Float(), nullable=True, server_default='0.0'),
sa.Column('re24', sa.Float(), nullable=True, server_default='0.0'),
# Earned/unearned runs
sa.Column('run', sa.Integer(), nullable=True, server_default='0'),
sa.Column('e_run', sa.Integer(), nullable=True, server_default='0'),
# Game situation flags
sa.Column('is_tied', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('is_go_ahead', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('is_new_inning', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('in_pow', sa.Boolean(), nullable=True, server_default='false'),
# Play workflow
sa.Column('complete', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('locked', sa.Boolean(), nullable=True, server_default='false'),
# Timestamps
sa.Column('created_at', sa.DateTime(), nullable=True),
# Extensibility
sa.Column('play_metadata', sa.JSON(), nullable=True),
# Foreign keys
sa.ForeignKeyConstraint(['game_id'], ['games.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['batter_id'], ['lineups.id']),
sa.ForeignKeyConstraint(['pitcher_id'], ['lineups.id']),
sa.ForeignKeyConstraint(['catcher_id'], ['lineups.id']),
sa.ForeignKeyConstraint(['defender_id'], ['lineups.id']),
sa.ForeignKeyConstraint(['runner_id'], ['lineups.id']),
sa.ForeignKeyConstraint(['on_first_id'], ['lineups.id']),
sa.ForeignKeyConstraint(['on_second_id'], ['lineups.id']),
sa.ForeignKeyConstraint(['on_third_id'], ['lineups.id']),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_plays_complete', 'plays', ['complete'])
op.create_index('ix_plays_created_at', 'plays', ['created_at'])
op.create_index('ix_plays_game_id', 'plays', ['game_id'])
# === GAME_SESSIONS TABLE ===
op.create_table(
'game_sessions',
sa.Column('game_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('connected_users', sa.JSON(), nullable=True),
sa.Column('last_action_at', sa.DateTime(), nullable=True),
sa.Column('state_snapshot', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['game_id'], ['games.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('game_id')
)
op.create_index('ix_game_sessions_last_action_at', 'game_sessions', ['last_action_at'])
# === ROLLS TABLE ===
op.create_table(
'rolls',
sa.Column('roll_id', sa.String(), nullable=False),
sa.Column('game_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('roll_type', sa.String(), nullable=False),
sa.Column('league_id', sa.String(), nullable=False),
sa.Column('team_id', sa.Integer(), nullable=True),
sa.Column('player_id', sa.Integer(), nullable=True),
sa.Column('roll_data', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('context', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()')),
sa.ForeignKeyConstraint(['game_id'], ['games.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('roll_id')
)
op.create_index('ix_rolls_game_id', 'rolls', ['game_id'])
op.create_index('ix_rolls_league_id', 'rolls', ['league_id'])
op.create_index('ix_rolls_player_id', 'rolls', ['player_id'])
op.create_index('ix_rolls_roll_type', 'rolls', ['roll_type'])
op.create_index('ix_rolls_team_id', 'rolls', ['team_id'])
op.create_index('ix_rolls_timestamp', 'rolls', ['timestamp'])
# === ROSTER_LINKS TABLE ===
op.create_table(
'roster_links',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('game_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('card_id', sa.Integer(), nullable=True),
sa.Column('player_id', sa.Integer(), nullable=True),
sa.Column('team_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['game_id'], ['games.id'], ondelete='CASCADE'),
sa.CheckConstraint(
'(card_id IS NOT NULL)::int + (player_id IS NOT NULL)::int = 1',
name='roster_link_one_id_required'
),
sa.UniqueConstraint('game_id', 'card_id', name='uq_game_card'),
sa.UniqueConstraint('game_id', 'player_id', name='uq_game_player'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_roster_links_game_id', 'roster_links', ['game_id'])
op.create_index('ix_roster_links_team_id', 'roster_links', ['team_id'])
# === GAME_CARDSET_LINKS TABLE ===
op.create_table(
'game_cardset_links',
sa.Column('game_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('cardset_id', sa.Integer(), nullable=False),
sa.Column('priority', sa.Integer(), nullable=True, server_default='1'),
sa.ForeignKeyConstraint(['game_id'], ['games.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('game_id', 'cardset_id')
)
op.create_index('ix_game_cardset_links_priority', 'game_cardset_links', ['priority'])
def downgrade() -> None:
# Drop tables in reverse order (respecting foreign key constraints)
op.drop_index('ix_game_cardset_links_priority', table_name='game_cardset_links')
op.drop_table('game_cardset_links')
op.drop_index('ix_roster_links_team_id', table_name='roster_links')
op.drop_index('ix_roster_links_game_id', table_name='roster_links')
op.drop_table('roster_links')
op.drop_index('ix_rolls_timestamp', table_name='rolls')
op.drop_index('ix_rolls_team_id', table_name='rolls')
op.drop_index('ix_rolls_roll_type', table_name='rolls')
op.drop_index('ix_rolls_player_id', table_name='rolls')
op.drop_index('ix_rolls_league_id', table_name='rolls')
op.drop_index('ix_rolls_game_id', table_name='rolls')
op.drop_table('rolls')
op.drop_index('ix_game_sessions_last_action_at', table_name='game_sessions')
op.drop_table('game_sessions')
op.drop_index('ix_plays_game_id', table_name='plays')
op.drop_index('ix_plays_created_at', table_name='plays')
op.drop_index('ix_plays_complete', table_name='plays')
op.drop_table('plays')
op.drop_index('ix_lineups_team_id', table_name='lineups')
op.drop_index('ix_lineups_is_active', table_name='lineups')
op.drop_index('ix_lineups_game_id', table_name='lineups')
op.drop_table('lineups')
op.drop_index('ix_games_status', table_name='games')
op.drop_index('ix_games_league_id', table_name='games')
op.drop_index('ix_games_created_at', table_name='games')
op.drop_table('games')

View File

@ -1,7 +1,7 @@
"""Create materialized views for game statistics """Create materialized views for game statistics
Revision ID: 004 Revision ID: 004
Revises: 003 Revises: 001
Create Date: 2025-11-07 Create Date: 2025-11-07
Creates three materialized views for statistics aggregation: Creates three materialized views for statistics aggregation:
@ -17,7 +17,7 @@ from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '004' revision = '004'
down_revision = '003' down_revision = '001'
branch_labels = None branch_labels = None
depends_on = None depends_on = None

View File

@ -0,0 +1,80 @@
"""Add composite indexes for common query patterns.
Creates indexes to optimize:
- Game recovery (plays by game_id + play_number)
- Team lineup queries (game_id + team_id + is_active)
- Active player queries (game_id + is_active)
- Roll history queries (game_id + roll_type)
- Game listing queries (status + created_at)
Revision ID: 005
Revises: 004
Create Date: 2025-11-27
"""
from typing import Sequence, Union
from alembic import op
revision: str = '005'
down_revision: Union[str, Sequence[str], None] = '004'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Create composite indexes for common query patterns."""
# Plays table - optimize game recovery and play history
# Used by: get_plays_for_game, state recovery
op.create_index(
'idx_play_game_number',
'plays',
['game_id', 'play_number'],
unique=False
)
# Lineups table - optimize team lineup queries
# Used by: get_lineup_for_team, substitution lookups
op.create_index(
'idx_lineup_game_team_active',
'lineups',
['game_id', 'team_id', 'is_active'],
unique=False
)
# Lineups table - optimize active player queries
# Used by: get_active_players
op.create_index(
'idx_lineup_game_active',
'lineups',
['game_id', 'is_active'],
unique=False
)
# Rolls table - optimize roll history queries
# Used by: get_rolls_for_game, roll auditing
op.create_index(
'idx_roll_game_type',
'rolls',
['game_id', 'roll_type'],
unique=False
)
# Games table - optimize status queries
# Used by: list_active_games, game browsing
op.create_index(
'idx_game_status_created',
'games',
['status', 'created_at'],
unique=False
)
def downgrade() -> None:
"""Remove composite indexes."""
op.drop_index('idx_play_game_number', table_name='plays')
op.drop_index('idx_lineup_game_team_active', table_name='lineups')
op.drop_index('idx_lineup_game_active', table_name='lineups')
op.drop_index('idx_roll_game_type', table_name='rolls')
op.drop_index('idx_game_status_created', table_name='games')

View File

@ -46,3 +46,159 @@ async def database_health():
"error": str(e), "error": str(e),
"timestamp": pendulum.now("UTC").to_iso8601_string(), "timestamp": pendulum.now("UTC").to_iso8601_string(),
} }
@router.get("/health/memory")
async def memory_health():
"""
Memory usage health check for game state management.
Returns memory statistics including:
- active_games: Number of games currently in memory
- max_games: Configured limit
- usage_percent: Current usage as percentage of limit
- oldest_game_hours: Age of oldest game in hours
- status: healthy (<75%), warning (75-90%), critical (>90%)
"""
from app.core.state_manager import state_manager
stats = state_manager.get_memory_stats()
# Determine health status based on usage percentage
if stats["max_games"] > 0:
usage_pct = (stats["active_games"] / stats["max_games"]) * 100
else:
usage_pct = 0
if usage_pct > 90:
status = "critical"
elif usage_pct > 75:
status = "warning"
else:
status = "healthy"
return {
"status": status,
"usage_percent": round(usage_pct, 1),
"active_games": stats["active_games"],
"max_games": stats["max_games"],
"oldest_game_hours": round(stats["oldest_game_hours"], 2),
"total_lineups_cached": stats["total_lineups_cached"],
"total_locks": stats["total_locks"],
"idle_timeout_hours": settings.game_idle_timeout_hours,
"eviction_interval_minutes": settings.game_eviction_interval_minutes,
"timestamp": pendulum.now("UTC").to_iso8601_string(),
}
@router.get("/health/pool")
async def connection_pool_health():
"""
Database connection pool health check.
Returns pool statistics including:
- pool_size: Configured base pool size
- max_overflow: Maximum overflow connections allowed
- available: Connections available for use
- in_use: Connections currently checked out
- overflow_active: Overflow connections in use
- usage_percent: Current usage as percentage of total capacity
- status: healthy (<75%), warning (75-90%), critical (>90%)
- recent_history: Last 5 usage snapshots
"""
from app.monitoring.pool_monitor import pool_monitor
if not pool_monitor:
return {
"status": "unknown",
"message": "Pool monitor not initialized",
"timestamp": pendulum.now("UTC").to_iso8601_string(),
}
health = pool_monitor.get_health_status()
history = pool_monitor.get_history(limit=5)
return {
**health,
"recent_history": history,
}
@router.get("/health/full")
async def full_health():
"""
Comprehensive health check aggregating all subsystems.
Returns overall status based on worst-case component status.
Components checked: database connectivity, memory usage, connection pool.
"""
from sqlalchemy import text
from app.core.state_manager import state_manager
from app.database.session import engine
from app.monitoring.pool_monitor import pool_monitor
components = {}
# Database connectivity
try:
async with engine.connect() as conn:
await conn.execute(text("SELECT 1"))
components["database"] = {"status": "healthy"}
except Exception as e:
logger.error(f"Database health check failed: {e}")
components["database"] = {"status": "critical", "error": str(e)}
# Memory/game state
try:
stats = state_manager.get_memory_stats()
if stats["max_games"] > 0:
usage_pct = (stats["active_games"] / stats["max_games"]) * 100
else:
usage_pct = 0
if usage_pct > 90:
mem_status = "critical"
elif usage_pct > 75:
mem_status = "warning"
else:
mem_status = "healthy"
components["memory"] = {
"status": mem_status,
"usage_percent": round(usage_pct, 1),
}
except Exception as e:
logger.error(f"Memory health check failed: {e}")
components["memory"] = {"status": "unknown", "error": str(e)}
# Connection pool
if pool_monitor:
try:
pool_health = pool_monitor.get_health_status()
components["pool"] = {
"status": pool_health["status"],
"usage_percent": pool_health["usage_percent"],
}
except Exception as e:
logger.error(f"Pool health check failed: {e}")
components["pool"] = {"status": "unknown", "error": str(e)}
else:
components["pool"] = {"status": "unknown", "message": "Not initialized"}
# Aggregate status (worst case wins)
statuses = [c.get("status", "unknown") for c in components.values()]
if "critical" in statuses:
overall = "critical"
elif "warning" in statuses:
overall = "warning"
elif "unknown" in statuses:
overall = "degraded"
else:
overall = "healthy"
return {
"status": overall,
"components": components,
"timestamp": pendulum.now("UTC").to_iso8601_string(),
}

View File

@ -4,6 +4,18 @@ Database Operations - Async persistence layer for game data.
Provides async operations for persisting and retrieving game data. Provides async operations for persisting and retrieving game data.
Used by StateManager for database persistence and recovery. Used by StateManager for database persistence and recovery.
Supports session injection for efficient batching and easy testing:
# Production - auto-manages sessions
db_ops = DatabaseOperations()
await db_ops.create_game(...) # Creates and commits its own session
# Testing or batching - inject shared session
async with AsyncSessionLocal() as session:
db_ops = DatabaseOperations(session)
await db_ops.create_game(...) # Uses injected session
await db_ops.add_lineup(...) # Same session
await session.commit() # Caller commits
Author: Claude Author: Claude
Date: 2025-10-22 Date: 2025-10-22
""" """
@ -12,6 +24,8 @@ Date: 2025-10-22
# Note: SQLAlchemy Column descriptors cause false positives in Pylance/Pyright # Note: SQLAlchemy Column descriptors cause false positives in Pylance/Pyright
import logging import logging
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
from uuid import UUID from uuid import UUID
from sqlalchemy import select from sqlalchemy import select
@ -29,9 +43,54 @@ class DatabaseOperations:
Async database operations for game persistence. Async database operations for game persistence.
Provides methods for creating, reading, and updating game data in PostgreSQL. Provides methods for creating, reading, and updating game data in PostgreSQL.
All operations are async and use the AsyncSessionLocal for session management.
Session Management:
- If constructed without a session, each method creates and commits its own session
- If constructed with a session, all methods use that session (caller commits)
Args:
session: Optional AsyncSession for dependency injection.
If provided, caller is responsible for commit/rollback.
""" """
def __init__(self, session: AsyncSession | None = None):
"""
Initialize DatabaseOperations.
Args:
session: Optional AsyncSession. If provided, all operations use this
session and caller is responsible for committing.
If None, each operation creates its own auto-committing session.
"""
self._session = session
@asynccontextmanager
async def _get_session(self) -> AsyncGenerator[AsyncSession]:
"""
Get database session for operations.
If a session was injected at construction:
- Yields that session
- Does NOT commit (caller controls transaction)
If no session was injected:
- Creates a new session
- Auto-commits on success
- Rolls back on exception
"""
if self._session:
# Use injected session - caller controls transaction
yield self._session
else:
# Create new session with auto-commit/rollback
async with AsyncSessionLocal() as session:
try:
yield session
await session.commit()
except Exception:
await session.rollback()
raise
async def create_game( async def create_game(
self, self,
game_id: UUID, game_id: UUID,
@ -64,8 +123,7 @@ class DatabaseOperations:
Raises: Raises:
SQLAlchemyError: If database operation fails SQLAlchemyError: If database operation fails
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
game = Game( game = Game(
id=game_id, id=game_id,
league_id=league_id, league_id=league_id,
@ -79,14 +137,10 @@ class DatabaseOperations:
status="pending", status="pending",
) )
session.add(game) session.add(game)
await session.commit() await session.flush()
await session.refresh(game) await session.refresh(game)
logger.info(f"Created game {game_id} in database ({league_id})") logger.info(f"Created game {game_id} in database ({league_id})")
return game return game
except Exception as e:
await session.rollback()
logger.error(f"Failed to create game {game_id}: {e}")
raise
async def get_game(self, game_id: UUID) -> Game | None: async def get_game(self, game_id: UUID) -> Game | None:
""" """
@ -98,7 +152,7 @@ class DatabaseOperations:
Returns: Returns:
Game model if found, None otherwise Game model if found, None otherwise
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
result = await session.execute(select(Game).where(Game.id == game_id)) result = await session.execute(select(Game).where(Game.id == game_id))
game = result.scalar_one_or_none() game = result.scalar_one_or_none()
if game: if game:
@ -113,7 +167,6 @@ class DatabaseOperations:
home_score: int, home_score: int,
away_score: int, away_score: int,
status: str | None = None, status: str | None = None,
session: AsyncSession | None = None,
) -> None: ) -> None:
""" """
Update game state fields using direct UPDATE (no SELECT). Update game state fields using direct UPDATE (no SELECT).
@ -125,14 +178,13 @@ class DatabaseOperations:
home_score: Home team score home_score: Home team score
away_score: Away team score away_score: Away team score
status: Game status if updating status: Game status if updating
session: Optional external session for transaction grouping
Raises: Raises:
ValueError: If game not found ValueError: If game not found
""" """
from sqlalchemy import update from sqlalchemy import update
async def _do_update(sess: AsyncSession) -> None: async with self._get_session() as session:
# Build update values # Build update values
update_values = { update_values = {
"current_inning": inning, "current_inning": inning,
@ -145,31 +197,14 @@ class DatabaseOperations:
update_values["status"] = status update_values["status"] = status
# Direct UPDATE statement (no SELECT needed) # Direct UPDATE statement (no SELECT needed)
result = await sess.execute( result = await session.execute(
update(Game).where(Game.id == game_id).values(**update_values) update(Game).where(Game.id == game_id).values(**update_values)
) )
if result.rowcount == 0: if result.rowcount == 0:
raise ValueError(f"Game {game_id} not found for update") raise ValueError(f"Game {game_id} not found for update")
# Use provided session or create new one logger.debug(f"Updated game {game_id} state (inning {inning}, {half})")
if session:
await _do_update(session)
# Don't commit - caller controls transaction
logger.debug(f"Updated game {game_id} state in external transaction")
else:
async with AsyncSessionLocal() as new_session:
try:
await _do_update(new_session)
await new_session.commit()
logger.debug(
f"Updated game {game_id} state (inning {inning}, {half})"
)
except Exception as e:
await new_session.rollback()
logger.error(f"Failed to update game {game_id} state: {e}")
raise
async def add_pd_lineup_card( async def add_pd_lineup_card(
self, self,
@ -197,8 +232,7 @@ class DatabaseOperations:
Raises: Raises:
SQLAlchemyError: If database operation fails SQLAlchemyError: If database operation fails
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
lineup = Lineup( lineup = Lineup(
game_id=game_id, game_id=game_id,
team_id=team_id, team_id=team_id,
@ -210,16 +244,11 @@ class DatabaseOperations:
is_active=True, is_active=True,
) )
session.add(lineup) session.add(lineup)
await session.commit() await session.flush()
await session.refresh(lineup) await session.refresh(lineup)
logger.debug(f"Added PD card {card_id} to lineup in game {game_id}") logger.debug(f"Added PD card {card_id} to lineup in game {game_id}")
return lineup return lineup
except Exception as e:
await session.rollback()
logger.error(f"Failed to add PD lineup card: {e}")
raise
async def add_sba_lineup_player( async def add_sba_lineup_player(
self, self,
game_id: UUID, game_id: UUID,
@ -246,8 +275,7 @@ class DatabaseOperations:
Raises: Raises:
SQLAlchemyError: If database operation fails SQLAlchemyError: If database operation fails
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
lineup = Lineup( lineup = Lineup(
game_id=game_id, game_id=game_id,
team_id=team_id, team_id=team_id,
@ -259,18 +287,11 @@ class DatabaseOperations:
is_active=True, is_active=True,
) )
session.add(lineup) session.add(lineup)
await session.commit() await session.flush()
await session.refresh(lineup) await session.refresh(lineup)
logger.debug( logger.debug(f"Added SBA player {player_id} to lineup in game {game_id}")
f"Added SBA player {player_id} to lineup in game {game_id}"
)
return lineup return lineup
except Exception as e:
await session.rollback()
logger.error(f"Failed to add SBA lineup player: {e}")
raise
async def get_active_lineup(self, game_id: UUID, team_id: int) -> list[Lineup]: async def get_active_lineup(self, game_id: UUID, team_id: int) -> list[Lineup]:
""" """
Get active lineup for team. Get active lineup for team.
@ -282,7 +303,7 @@ class DatabaseOperations:
Returns: Returns:
List of active Lineup models, sorted by batting order List of active Lineup models, sorted by batting order
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
result = await session.execute( result = await session.execute(
select(Lineup) select(Lineup)
.where( .where(
@ -339,8 +360,7 @@ class DatabaseOperations:
ValueError: If player_out not found ValueError: If player_out not found
SQLAlchemyError: If database operation fails SQLAlchemyError: If database operation fails
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
# STEP 1: Mark old player inactive # STEP 1: Mark old player inactive
result = await session.execute( result = await session.execute(
select(Lineup).where(Lineup.id == player_out_lineup_id) select(Lineup).where(Lineup.id == player_out_lineup_id)
@ -368,7 +388,7 @@ class DatabaseOperations:
) )
session.add(new_lineup) session.add(new_lineup)
await session.commit() await session.flush()
new_lineup_id = new_lineup.id # type: ignore[assignment] new_lineup_id = new_lineup.id # type: ignore[assignment]
@ -379,11 +399,6 @@ class DatabaseOperations:
return new_lineup_id return new_lineup_id
except Exception as e:
await session.rollback()
logger.error(f"Failed to create substitution: {e}", exc_info=True)
raise
async def get_eligible_substitutes( async def get_eligible_substitutes(
self, game_id: UUID, team_id: int self, game_id: UUID, team_id: int
) -> list[Lineup]: ) -> list[Lineup]:
@ -397,7 +412,7 @@ class DatabaseOperations:
Returns: Returns:
List of inactive Lineup models List of inactive Lineup models
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
result = await session.execute( result = await session.execute(
select(Lineup) select(Lineup)
.where( .where(
@ -413,15 +428,12 @@ class DatabaseOperations:
) )
return subs return subs
async def save_play( async def save_play(self, play_data: dict) -> int:
self, play_data: dict, session: AsyncSession | None = None
) -> int:
""" """
Save play to database. Save play to database.
Args: Args:
play_data: Dictionary with play data matching Play model fields play_data: Dictionary with play data matching Play model fields
session: Optional external session for transaction grouping
Returns: Returns:
Play ID (primary key) Play ID (primary key)
@ -429,30 +441,14 @@ class DatabaseOperations:
Raises: Raises:
SQLAlchemyError: If database operation fails SQLAlchemyError: If database operation fails
""" """
async with self._get_session() as session:
async def _do_save(sess: AsyncSession) -> int:
play = Play(**play_data) play = Play(**play_data)
sess.add(play) session.add(play)
await sess.flush() # Get ID without committing await session.flush() # Get ID without committing
play_id = play.id play_id = play.id
logger.info(f"Saved play {play.play_number} for game {play.game_id}") logger.info(f"Saved play {play.play_number} for game {play.game_id}")
return play_id # type: ignore return play_id # type: ignore
# Use provided session or create new one
if session:
return await _do_save(session)
# Don't commit - caller controls transaction
async with AsyncSessionLocal() as new_session:
try:
play_id = await _do_save(new_session)
await new_session.commit()
return play_id
except Exception as e:
await new_session.rollback()
logger.error(f"Failed to save play: {e}")
raise
async def get_plays(self, game_id: UUID) -> list[Play]: async def get_plays(self, game_id: UUID) -> list[Play]:
""" """
Get all plays for game. Get all plays for game.
@ -463,7 +459,7 @@ class DatabaseOperations:
Returns: Returns:
List of Play models, ordered by play_number List of Play models, ordered by play_number
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
result = await session.execute( result = await session.execute(
select(Play).where(Play.game_id == game_id).order_by(Play.play_number) select(Play).where(Play.game_id == game_id).order_by(Play.play_number)
) )
@ -483,7 +479,7 @@ class DatabaseOperations:
Returns: Returns:
Dictionary with 'game', 'lineups', and 'plays' keys, or None if game not found Dictionary with 'game', 'lineups', and 'plays' keys, or None if game not found
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
# Get game # Get game
game_result = await session.execute(select(Game).where(Game.id == game_id)) game_result = await session.execute(select(Game).where(Game.id == game_id))
game = game_result.scalar_one_or_none() game = game_result.scalar_one_or_none()
@ -570,20 +566,14 @@ class DatabaseOperations:
Returns: Returns:
Created GameSession model Created GameSession model
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
game_session = GameSession(game_id=game_id) game_session = GameSession(game_id=game_id)
session.add(game_session) session.add(game_session)
await session.commit() await session.flush()
await session.refresh(game_session) await session.refresh(game_session)
logger.info(f"Created game session for {game_id}") logger.info(f"Created game session for {game_id}")
return game_session return game_session
except Exception as e:
await session.rollback()
logger.error(f"Failed to create game session: {e}")
raise
async def update_session_snapshot( async def update_session_snapshot(
self, game_id: UUID, state_snapshot: dict self, game_id: UUID, state_snapshot: dict
) -> None: ) -> None:
@ -597,8 +587,7 @@ class DatabaseOperations:
Raises: Raises:
ValueError: If game session not found ValueError: If game session not found
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
result = await session.execute( result = await session.execute(
select(GameSession).where(GameSession.game_id == game_id) select(GameSession).where(GameSession.game_id == game_id)
) )
@ -608,14 +597,8 @@ class DatabaseOperations:
raise ValueError(f"Game session {game_id} not found") raise ValueError(f"Game session {game_id} not found")
game_session.state_snapshot = state_snapshot game_session.state_snapshot = state_snapshot
await session.commit()
logger.debug(f"Updated session snapshot for {game_id}") logger.debug(f"Updated session snapshot for {game_id}")
except Exception as e:
await session.rollback()
logger.error(f"Failed to update session snapshot: {e}")
raise
async def add_pd_roster_card( async def add_pd_roster_card(
self, game_id: UUID, card_id: int, team_id: int self, game_id: UUID, card_id: int, team_id: int
) -> PdRosterLinkData: ) -> PdRosterLinkData:
@ -633,13 +616,12 @@ class DatabaseOperations:
Raises: Raises:
ValueError: If card already rostered or constraint violation ValueError: If card already rostered or constraint violation
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
roster_link = RosterLink( roster_link = RosterLink(
game_id=game_id, card_id=card_id, team_id=team_id game_id=game_id, card_id=card_id, team_id=team_id
) )
session.add(roster_link) session.add(roster_link)
await session.commit() await session.flush()
await session.refresh(roster_link) await session.refresh(roster_link)
logger.info(f"Added PD card {card_id} to roster for game {game_id}") logger.info(f"Added PD card {card_id} to roster for game {game_id}")
@ -650,11 +632,6 @@ class DatabaseOperations:
team_id=roster_link.team_id, team_id=roster_link.team_id,
) )
except Exception as e:
await session.rollback()
logger.error(f"Failed to add PD roster card: {e}")
raise ValueError(f"Could not add card to roster: {e}")
async def add_sba_roster_player( async def add_sba_roster_player(
self, game_id: UUID, player_id: int, team_id: int self, game_id: UUID, player_id: int, team_id: int
) -> SbaRosterLinkData: ) -> SbaRosterLinkData:
@ -672,17 +649,14 @@ class DatabaseOperations:
Raises: Raises:
ValueError: If player already rostered or constraint violation ValueError: If player already rostered or constraint violation
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
roster_link = RosterLink( roster_link = RosterLink(
game_id=game_id, player_id=player_id, team_id=team_id game_id=game_id, player_id=player_id, team_id=team_id
) )
session.add(roster_link) session.add(roster_link)
await session.commit() await session.flush()
await session.refresh(roster_link) await session.refresh(roster_link)
logger.info( logger.info(f"Added SBA player {player_id} to roster for game {game_id}")
f"Added SBA player {player_id} to roster for game {game_id}"
)
return SbaRosterLinkData( return SbaRosterLinkData(
id=roster_link.id, id=roster_link.id,
@ -691,11 +665,6 @@ class DatabaseOperations:
team_id=roster_link.team_id, team_id=roster_link.team_id,
) )
except Exception as e:
await session.rollback()
logger.error(f"Failed to add SBA roster player: {e}")
raise ValueError(f"Could not add player to roster: {e}")
async def get_pd_roster( async def get_pd_roster(
self, game_id: UUID, team_id: int | None = None self, game_id: UUID, team_id: int | None = None
) -> list[PdRosterLinkData]: ) -> list[PdRosterLinkData]:
@ -709,8 +678,7 @@ class DatabaseOperations:
Returns: Returns:
List of PdRosterLinkData List of PdRosterLinkData
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
query = select(RosterLink).where( query = select(RosterLink).where(
RosterLink.game_id == game_id, RosterLink.card_id.is_not(None) RosterLink.game_id == game_id, RosterLink.card_id.is_not(None)
) )
@ -731,10 +699,6 @@ class DatabaseOperations:
for link in roster_links for link in roster_links
] ]
except Exception as e:
logger.error(f"Failed to get PD roster: {e}")
raise
async def get_sba_roster( async def get_sba_roster(
self, game_id: UUID, team_id: int | None = None self, game_id: UUID, team_id: int | None = None
) -> list[SbaRosterLinkData]: ) -> list[SbaRosterLinkData]:
@ -748,8 +712,7 @@ class DatabaseOperations:
Returns: Returns:
List of SbaRosterLinkData List of SbaRosterLinkData
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
query = select(RosterLink).where( query = select(RosterLink).where(
RosterLink.game_id == game_id, RosterLink.player_id.is_not(None) RosterLink.game_id == game_id, RosterLink.player_id.is_not(None)
) )
@ -770,10 +733,6 @@ class DatabaseOperations:
for link in roster_links for link in roster_links
] ]
except Exception as e:
logger.error(f"Failed to get SBA roster: {e}")
raise
async def remove_roster_entry(self, roster_id: int) -> None: async def remove_roster_entry(self, roster_id: int) -> None:
""" """
Remove a roster entry by ID. Remove a roster entry by ID.
@ -784,8 +743,7 @@ class DatabaseOperations:
Raises: Raises:
ValueError: If roster entry not found ValueError: If roster entry not found
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
result = await session.execute( result = await session.execute(
select(RosterLink).where(RosterLink.id == roster_id) select(RosterLink).where(RosterLink.id == roster_id)
) )
@ -795,14 +753,8 @@ class DatabaseOperations:
raise ValueError(f"Roster entry {roster_id} not found") raise ValueError(f"Roster entry {roster_id} not found")
await session.delete(roster_link) await session.delete(roster_link)
await session.commit()
logger.info(f"Removed roster entry {roster_id}") logger.info(f"Removed roster entry {roster_id}")
except Exception as e:
await session.rollback()
logger.error(f"Failed to remove roster entry: {e}")
raise
async def save_rolls_batch(self, rolls: list) -> None: async def save_rolls_batch(self, rolls: list) -> None:
""" """
Save multiple dice rolls in a single transaction. Save multiple dice rolls in a single transaction.
@ -819,8 +771,7 @@ class DatabaseOperations:
logger.debug("No rolls to save") logger.debug("No rolls to save")
return return
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
roll_records = [ roll_records = [
Roll( Roll(
roll_id=roll.roll_id, roll_id=roll.roll_id,
@ -837,14 +788,9 @@ class DatabaseOperations:
] ]
session.add_all(roll_records) session.add_all(roll_records)
await session.commit() await session.flush()
logger.info(f"Batch saved {len(rolls)} rolls") logger.info(f"Batch saved {len(rolls)} rolls")
except Exception as e:
await session.rollback()
logger.error(f"Failed to batch save rolls: {e}")
raise
async def get_rolls_for_game( async def get_rolls_for_game(
self, self,
game_id: UUID, game_id: UUID,
@ -864,8 +810,7 @@ class DatabaseOperations:
Returns: Returns:
List of Roll objects List of Roll objects
""" """
async with AsyncSessionLocal() as session: async with self._get_session() as session:
try:
query = select(Roll).where(Roll.game_id == game_id) query = select(Roll).where(Roll.game_id == game_id)
if roll_type: if roll_type:
@ -879,10 +824,6 @@ class DatabaseOperations:
result = await session.execute(query) result = await session.execute(query)
return list(result.scalars().all()) return list(result.scalars().all())
except Exception as e:
logger.error(f"Failed to get rolls for game: {e}")
raise
# ============================================================================ # ============================================================================
# ROLLBACK OPERATIONS # ROLLBACK OPERATIONS
# ============================================================================ # ============================================================================
@ -900,16 +841,14 @@ class DatabaseOperations:
Returns: Returns:
Number of plays deleted Number of plays deleted
""" """
async with AsyncSessionLocal() as session:
try:
from sqlalchemy import delete from sqlalchemy import delete
async with self._get_session() as session:
stmt = delete(Play).where( stmt = delete(Play).where(
Play.game_id == game_id, Play.play_number > after_play_number Play.game_id == game_id, Play.play_number > after_play_number
) )
result = await session.execute(stmt) result = await session.execute(stmt)
await session.commit()
deleted_count = result.rowcount deleted_count = result.rowcount
logger.info( logger.info(
@ -917,11 +856,6 @@ class DatabaseOperations:
) )
return deleted_count return deleted_count
except Exception as e:
await session.rollback()
logger.error(f"Failed to delete plays: {e}")
raise
async def delete_substitutions_after( async def delete_substitutions_after(
self, game_id: UUID, after_play_number: int self, game_id: UUID, after_play_number: int
) -> int: ) -> int:
@ -937,16 +871,14 @@ class DatabaseOperations:
Returns: Returns:
Number of lineup entries deleted Number of lineup entries deleted
""" """
async with AsyncSessionLocal() as session:
try:
from sqlalchemy import delete from sqlalchemy import delete
async with self._get_session() as session:
stmt = delete(Lineup).where( stmt = delete(Lineup).where(
Lineup.game_id == game_id, Lineup.after_play >= after_play_number Lineup.game_id == game_id, Lineup.after_play >= after_play_number
) )
result = await session.execute(stmt) result = await session.execute(stmt)
await session.commit()
deleted_count = result.rowcount deleted_count = result.rowcount
logger.info( logger.info(
@ -954,11 +886,6 @@ class DatabaseOperations:
) )
return deleted_count return deleted_count
except Exception as e:
await session.rollback()
logger.error(f"Failed to delete substitutions: {e}")
raise
async def delete_rolls_after(self, game_id: UUID, after_play_number: int) -> int: async def delete_rolls_after(self, game_id: UUID, after_play_number: int) -> int:
""" """
Delete all dice rolls after a specific play number. Delete all dice rolls after a specific play number.
@ -972,24 +899,17 @@ class DatabaseOperations:
Returns: Returns:
Number of rolls deleted Number of rolls deleted
""" """
async with AsyncSessionLocal() as session:
try:
from sqlalchemy import delete from sqlalchemy import delete
async with self._get_session() as session:
stmt = delete(Roll).where( stmt = delete(Roll).where(
Roll.game_id == game_id, Roll.play_number > after_play_number Roll.game_id == game_id, Roll.play_number > after_play_number
) )
result = await session.execute(stmt) result = await session.execute(stmt)
await session.commit()
deleted_count = result.rowcount deleted_count = result.rowcount
logger.info( logger.info(
f"Deleted {deleted_count} rolls after play {after_play_number} for game {game_id}" f"Deleted {deleted_count} rolls after play {after_play_number} for game {game_id}"
) )
return deleted_count return deleted_count
except Exception as e:
await session.rollback()
logger.error(f"Failed to delete rolls: {e}")
raise

View File

@ -1,6 +1,7 @@
import logging import logging
from collections.abc import AsyncGenerator from collections.abc import AsyncGenerator
import sqlalchemy as sa
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.orm import declarative_base from sqlalchemy.orm import declarative_base
@ -32,13 +33,16 @@ Base = declarative_base()
async def init_db() -> None: async def init_db() -> None:
"""Initialize database tables""" """Initialize database connection.
async with engine.begin() as conn:
# Import all models here to ensure they're registered
# Create tables NOTE: Schema creation is now handled by Alembic migrations.
await conn.run_sync(Base.metadata.create_all) Run `alembic upgrade head` to create/update schema.
logger.info("Database tables created") See backend/README.md for migration instructions.
"""
# Verify database connectivity
async with engine.begin() as conn:
await conn.execute(sa.text("SELECT 1"))
logger.info("Database connection initialized")
async def get_session() -> AsyncGenerator[AsyncSession]: async def get_session() -> AsyncGenerator[AsyncSession]:

View File

@ -1,13 +1,25 @@
import asyncio
import logging import logging
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
import socketio import socketio
from fastapi import FastAPI from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
from sqlalchemy.exc import SQLAlchemyError
from app.api.routes import auth, games, health, teams from app.api.routes import auth, games, health, teams
from app.config import get_settings from app.config import get_settings
from app.database.session import init_db from app.core.exceptions import (
DatabaseError,
GameEngineError,
GameNotFoundError,
InvalidGameStateError,
)
from app.core.state_manager import state_manager
from app.database.session import engine, init_db
from app.middleware.rate_limit import rate_limiter
from app.monitoring.pool_monitor import init_pool_monitor
from app.services import redis_client from app.services import redis_client
from app.utils.logging import setup_logging from app.utils.logging import setup_logging
from app.websocket.connection_manager import ConnectionManager from app.websocket.connection_manager import ConnectionManager
@ -15,10 +27,105 @@ from app.websocket.handlers import register_handlers
logger = logging.getLogger(f"{__name__}.main") logger = logging.getLogger(f"{__name__}.main")
# Background task handles
_eviction_task: asyncio.Task | None = None
_session_expiration_task: asyncio.Task | None = None
_rate_limit_cleanup_task: asyncio.Task | None = None
_pool_monitoring_task: asyncio.Task | None = None
async def periodic_eviction():
"""
Background task to periodically evict idle games from memory.
Runs on a configurable interval (default 60 minutes) to:
1. Evict games that have been idle beyond the timeout threshold
2. Enforce memory limits by evicting oldest games if over limit
3. Log memory statistics for monitoring
This prevents unbounded memory growth from abandoned games.
"""
settings = get_settings()
interval = settings.game_eviction_interval_minutes * 60
logger.info(
f"Starting periodic eviction task (interval: {settings.game_eviction_interval_minutes}m, "
f"idle timeout: {settings.game_idle_timeout_hours}h, max games: {settings.game_max_in_memory})"
)
while True:
try:
await asyncio.sleep(interval)
# Run idle game eviction
evicted = await state_manager.evict_idle_games()
# Enforce memory limit
force_evicted = await state_manager.enforce_memory_limit()
# Log stats
stats = state_manager.get_memory_stats()
logger.info(
f"Eviction cycle complete: {len(evicted)} idle + {len(force_evicted)} forced. "
f"Active: {stats['active_games']}/{stats['max_games']}, "
f"Oldest: {stats['oldest_game_hours']:.1f}h"
)
except asyncio.CancelledError:
logger.info("Eviction task cancelled - shutting down")
break
except Exception as e:
logger.error(f"Eviction task error: {e}", exc_info=True)
# Continue running despite errors - don't let one failure stop eviction
async def periodic_session_expiration(connection_manager):
"""
Background task to periodically expire inactive WebSocket sessions.
Runs every 60 seconds to detect and clean up zombie connections
that weren't properly closed (network failure, browser crash, etc.).
This works alongside Socket.io's ping/pong mechanism:
- Socket.io ping/pong: Detects transport-level disconnections
- This task: Detects application-level inactivity (no events for extended period)
"""
settings = get_settings()
interval = 60 # Check every minute
timeout = settings.ws_connection_timeout * 5 # 5x timeout = 5 minutes default
logger.info(
f"Starting session expiration task (interval: {interval}s, timeout: {timeout}s)"
)
while True:
try:
await asyncio.sleep(interval)
# Expire inactive sessions
expired = await connection_manager.expire_inactive_sessions(timeout)
# Log stats periodically
stats = connection_manager.get_stats()
if stats["total_sessions"] > 0:
logger.debug(
f"Session stats: {stats['total_sessions']} sessions, "
f"{stats['unique_users']} users, "
f"{stats['active_game_rooms']} active games"
)
except asyncio.CancelledError:
logger.info("Session expiration task cancelled - shutting down")
break
except Exception as e:
logger.error(f"Session expiration task error: {e}", exc_info=True)
# Continue running despite errors
@asynccontextmanager @asynccontextmanager
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
"""Startup and shutdown events""" """Startup and shutdown events"""
global _eviction_task, _session_expiration_task, _rate_limit_cleanup_task, _pool_monitoring_task
settings = get_settings() settings = get_settings()
# Startup # Startup
@ -29,6 +136,13 @@ async def lifespan(app: FastAPI):
await init_db() await init_db()
logger.info("Database initialized") logger.info("Database initialized")
# Initialize pool monitor for connection pool observability
pool_monitor = init_pool_monitor(engine)
_pool_monitoring_task = asyncio.create_task(
pool_monitor.start_monitoring(interval_seconds=60)
)
logger.info("Pool monitoring initialized")
# Initialize Redis # Initialize Redis
try: try:
redis_url = settings.redis_url redis_url = settings.redis_url
@ -39,11 +153,65 @@ async def lifespan(app: FastAPI):
f"Redis connection failed: {e}. Position rating caching will be unavailable." f"Redis connection failed: {e}. Position rating caching will be unavailable."
) )
# Start background eviction task
logger.info("Starting background eviction task")
_eviction_task = asyncio.create_task(periodic_eviction())
# Start session expiration task (connection_manager created in module scope below)
# Note: This task cleans up zombie WebSocket connections
logger.info("Starting session expiration task")
_session_expiration_task = asyncio.create_task(
periodic_session_expiration(connection_manager)
)
# Start rate limiter cleanup task
# Note: This task cleans up stale rate limit buckets to prevent memory leaks
logger.info("Starting rate limiter cleanup task")
_rate_limit_cleanup_task = asyncio.create_task(
rate_limiter.cleanup_stale_buckets()
)
yield yield
# Shutdown # Shutdown
logger.info("Shutting down Paper Dynasty Game Backend") logger.info("Shutting down Paper Dynasty Game Backend")
# Stop eviction task
if _eviction_task:
logger.info("Stopping background eviction task")
_eviction_task.cancel()
try:
await _eviction_task
except asyncio.CancelledError:
pass
# Stop session expiration task
if _session_expiration_task:
logger.info("Stopping session expiration task")
_session_expiration_task.cancel()
try:
await _session_expiration_task
except asyncio.CancelledError:
pass
# Stop rate limiter cleanup task
if _rate_limit_cleanup_task:
logger.info("Stopping rate limiter cleanup task")
_rate_limit_cleanup_task.cancel()
try:
await _rate_limit_cleanup_task
except asyncio.CancelledError:
pass
# Stop pool monitoring task
if _pool_monitoring_task:
logger.info("Stopping pool monitoring task")
_pool_monitoring_task.cancel()
try:
await _pool_monitoring_task
except asyncio.CancelledError:
pass
# Disconnect Redis # Disconnect Redis
if redis_client.is_connected: if redis_client.is_connected:
await redis_client.disconnect() await redis_client.disconnect()
@ -58,6 +226,70 @@ app = FastAPI(
lifespan=lifespan, lifespan=lifespan,
) )
# Global exception handlers for REST API
@app.exception_handler(GameNotFoundError)
async def game_not_found_handler(request: Request, exc: GameNotFoundError):
"""Handle game not found errors with 404 response."""
logger.warning(f"Game not found: {exc.game_id}")
return JSONResponse(
status_code=404,
content={"detail": str(exc), "error_code": "GAME_NOT_FOUND"},
)
@app.exception_handler(InvalidGameStateError)
async def invalid_game_state_handler(request: Request, exc: InvalidGameStateError):
"""Handle invalid game state errors with 400 response."""
logger.warning(f"Invalid game state: {exc}")
return JSONResponse(
status_code=400,
content={
"detail": str(exc),
"error_code": "INVALID_GAME_STATE",
"current_state": exc.current_state,
"expected_state": exc.expected_state,
},
)
@app.exception_handler(DatabaseError)
async def database_error_handler(request: Request, exc: DatabaseError):
"""Handle database errors with 500 response."""
logger.error(f"Database error during {exc.operation}: {exc.original_error}")
return JSONResponse(
status_code=500,
content={
"detail": "Database error occurred",
"error_code": "DATABASE_ERROR",
"operation": exc.operation,
},
)
@app.exception_handler(GameEngineError)
async def game_engine_error_handler(request: Request, exc: GameEngineError):
"""Handle generic game engine errors with 400 response."""
logger.warning(f"Game engine error: {exc}")
return JSONResponse(
status_code=400,
content={"detail": str(exc), "error_code": "GAME_ENGINE_ERROR"},
)
@app.exception_handler(SQLAlchemyError)
async def sqlalchemy_error_handler(request: Request, exc: SQLAlchemyError):
"""Handle SQLAlchemy errors with 500 response."""
logger.error(f"SQLAlchemy error: {exc}")
return JSONResponse(
status_code=500,
content={
"detail": "Database error occurred - please retry",
"error_code": "DATABASE_ERROR",
},
)
# CORS middleware # CORS middleware
settings = get_settings() settings = get_settings()
app.add_middleware( app.add_middleware(
@ -68,10 +300,16 @@ app.add_middleware(
allow_headers=["*"], allow_headers=["*"],
) )
# Initialize Socket.io # Initialize Socket.io with ping/pong for connection health
sio = socketio.AsyncServer( sio = socketio.AsyncServer(
async_mode="asgi", async_mode="asgi",
cors_allowed_origins=settings.cors_origins, cors_allowed_origins=settings.cors_origins,
# Ping/pong for detecting dead connections:
# - Server sends ping every ping_interval seconds
# - Client must respond with pong within ping_timeout seconds
# - Total: connection dies after (ping_interval + ping_timeout) of no response
ping_interval=settings.ws_heartbeat_interval, # Default: 30s
ping_timeout=settings.ws_connection_timeout, # Default: 60s
logger=True, logger=True,
engineio_logger=False, engineio_logger=False,
) )
@ -99,6 +337,47 @@ async def root():
return {"message": "Paper Dynasty Game Backend", "version": "1.0.0"} return {"message": "Paper Dynasty Game Backend", "version": "1.0.0"}
@app.get("/api/health/connections", tags=["health"])
async def connections_health():
"""
WebSocket connection statistics for health monitoring.
Returns:
- total_sessions: Number of active WebSocket connections
- unique_users: Number of unique authenticated users
- active_game_rooms: Number of games with connected players
- sessions_per_game: Breakdown of connections per game
- inactivity_distribution: Sessions grouped by inactivity period
- status: healthy (<50 sessions), warning (50-100), critical (>100)
"""
import pendulum
stats = connection_manager.get_stats()
# Determine health status based on session count
# Thresholds can be adjusted based on expected load
total = stats["total_sessions"]
if total > 100:
status = "critical"
elif total > 50:
status = "warning"
else:
status = "healthy"
# Include rate limiter stats
rate_limit_stats = rate_limiter.get_stats()
return {
"status": status,
**stats,
"rate_limiter": rate_limit_stats,
"ping_interval_seconds": settings.ws_heartbeat_interval,
"ping_timeout_seconds": settings.ws_connection_timeout,
"session_expiration_timeout_seconds": settings.ws_connection_timeout * 5,
"timestamp": pendulum.now("UTC").to_iso8601_string(),
}
if __name__ == "__main__": if __name__ == "__main__":
import uvicorn import uvicorn

View File

@ -4,11 +4,19 @@ Pytest configuration for integration tests.
Provides shared fixtures for database testing with proper async session management. Provides shared fixtures for database testing with proper async session management.
Uses NullPool to avoid asyncpg connection reuse issues in tests. Uses NullPool to avoid asyncpg connection reuse issues in tests.
Key Pattern: Session injection into DatabaseOperations
- Each test gets a fresh session (function scope)
- Session is injected into DatabaseOperations
- All operations use the same session (no connection conflicts)
- Session is rolled back after each test (isolation)
Reference: https://github.com/MagicStack/asyncpg/issues/863#issuecomment-1229220920 Reference: https://github.com/MagicStack/asyncpg/issues/863#issuecomment-1229220920
""" """
import pytest import pytest
import pytest_asyncio import pytest_asyncio
from uuid import uuid4 from uuid import uuid4
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy.pool import NullPool from sqlalchemy.pool import NullPool
@ -19,10 +27,11 @@ from app.config import get_settings
settings = get_settings() settings = get_settings()
# Create test-specific engine with NullPool to avoid connection reuse issues # Create test-specific engine with NullPool to avoid connection reuse issues
# Each test gets a fresh connection - prevents "another operation is in progress" errors
test_engine = create_async_engine( test_engine = create_async_engine(
settings.database_url, settings.database_url,
poolclass=NullPool, # Each test gets a fresh connection - fixes asyncpg concurrency issue poolclass=NullPool,
echo=False echo=False,
) )
# Create test-specific session factory # Create test-specific session factory
@ -35,25 +44,148 @@ TestAsyncSessionLocal = async_sessionmaker(
) )
@pytest_asyncio.fixture @pytest_asyncio.fixture(scope="function")
async def db_ops(monkeypatch): async def db_session():
""" """
Provide DatabaseOperations instance for each test. Provide an isolated database session for each test.
Monkeypatches the database session module to use NullPool test engine. Creates a new session for each test function.
This prevents asyncpg "another operation is in progress" errors. The session is NOT automatically committed - tests must call
await session.commit() if they want changes persisted.
After the test, the session is rolled back to ensure isolation.
""" """
# Import the session module async with TestAsyncSessionLocal() as session:
from app.database import session yield session
# Rollback any uncommitted changes after each test
await session.rollback()
# Monkeypatch the AsyncSessionLocal to use our test session factory
monkeypatch.setattr(session, 'AsyncSessionLocal', TestAsyncSessionLocal)
# Now DatabaseOperations will use the test session factory @pytest_asyncio.fixture(scope="function")
return DatabaseOperations() async def db_ops(db_session: AsyncSession):
"""
Provide DatabaseOperations instance with injected session.
This is the key fixture for integration tests:
- All database operations use the same session
- No connection conflicts between operations
- Session is rolled back after test (via db_session fixture)
Usage in tests:
async def test_something(db_ops):
game = await db_ops.create_game(...)
# All operations use the same session
await db_ops.add_lineup(...)
"""
return DatabaseOperations(db_session)
@pytest.fixture @pytest.fixture
def unique_game_id(): def unique_game_id():
"""Generate a unique game ID for each test""" """Generate a unique game ID for each test."""
return uuid4() return uuid4()
@pytest.fixture
def sample_game_id():
"""Alias for unique_game_id for backward compatibility."""
return uuid4()
@pytest_asyncio.fixture(scope="function")
async def sample_game(db_ops: DatabaseOperations, db_session: AsyncSession):
"""
Create a sample game for testing.
Returns the game_id of the created game.
The game is created within the test session and will be
rolled back after the test completes.
"""
game_id = uuid4()
await db_ops.create_game(
game_id=game_id,
league_id="sba",
home_team_id=1,
away_team_id=2,
game_mode="friendly",
visibility="public",
)
# Flush to make the game visible in this session
await db_session.flush()
return game_id
@pytest_asyncio.fixture(scope="function")
async def sample_pd_game(db_ops: DatabaseOperations, db_session: AsyncSession):
"""
Create a sample PD league game for testing.
Returns the game_id of the created game.
"""
game_id = uuid4()
await db_ops.create_game(
game_id=game_id,
league_id="pd",
home_team_id=10,
away_team_id=20,
game_mode="friendly",
visibility="public",
)
await db_session.flush()
return game_id
@pytest_asyncio.fixture(scope="function")
async def game_with_lineup(db_ops: DatabaseOperations, db_session: AsyncSession):
"""
Create a game with full lineups for both teams.
Returns a dict with game_id and lineup entries.
Useful for tests that need a fully set up game.
"""
game_id = uuid4()
# Create game
await db_ops.create_game(
game_id=game_id,
league_id="sba",
home_team_id=1,
away_team_id=2,
game_mode="friendly",
visibility="public",
)
# Add home team lineup (9 players)
home_lineup = []
positions = ["P", "C", "1B", "2B", "3B", "SS", "LF", "CF", "RF"]
for i, pos in enumerate(positions):
lineup = await db_ops.add_sba_lineup_player(
game_id=game_id,
team_id=1,
player_id=100 + i,
position=pos,
batting_order=None if pos == "P" else i,
is_starter=True,
)
home_lineup.append(lineup)
# Add away team lineup (9 players)
away_lineup = []
for i, pos in enumerate(positions):
lineup = await db_ops.add_sba_lineup_player(
game_id=game_id,
team_id=2,
player_id=200 + i,
position=pos,
batting_order=None if pos == "P" else i,
is_starter=True,
)
away_lineup.append(lineup)
await db_session.flush()
return {
"game_id": game_id,
"home_lineup": home_lineup,
"away_lineup": away_lineup,
}

View File

@ -0,0 +1,269 @@
"""
Tests for Alembic database migrations.
Verifies that migrations can be applied and rolled back correctly.
These tests use the actual database and should be run with caution
on shared environments.
What: Tests for database migration integrity
Why: Ensures migrations can be applied cleanly on fresh databases
and rolled back safely for disaster recovery
"""
import pytest
from alembic import command
from alembic.config import Config
from sqlalchemy import create_engine, inspect, text
from app.config import get_settings
@pytest.fixture
def alembic_config():
"""Create Alembic config pointing to the backend directory."""
config = Config("/mnt/NV2/Development/strat-gameplay-webapp/backend/alembic.ini")
return config
@pytest.fixture
def sync_engine():
"""Create synchronous engine for migration testing."""
settings = get_settings()
sync_url = settings.database_url.replace("+asyncpg", "+psycopg2")
return create_engine(sync_url)
class TestMigrationHistory:
"""Tests for migration chain integrity."""
def test_migration_history_valid(self, alembic_config):
"""
Verify migration chain is valid and has no gaps.
What: Checks that all migrations reference existing predecessors
Why: Broken migration chains cause 'revision not found' errors
"""
from alembic.script import ScriptDirectory
script = ScriptDirectory.from_config(alembic_config)
revisions = list(script.walk_revisions())
# Should have at least 2 migrations (001 and 004)
assert len(revisions) >= 2, "Expected at least 2 migrations"
# All revisions except base should have a down_revision that exists
for rev in revisions:
if rev.down_revision is not None:
# The down_revision should be in our revisions list
down_revs = (
rev.down_revision
if isinstance(rev.down_revision, tuple)
else (rev.down_revision,)
)
for down_rev in down_revs:
assert script.get_revision(down_rev) is not None, (
f"Migration {rev.revision} references non-existent "
f"down_revision {down_rev}"
)
def test_head_is_reachable(self, alembic_config):
"""
Verify we can traverse from base to head.
What: Ensures migration path from base to head is unbroken
Why: Broken path means new databases cannot be fully initialized
"""
from alembic.script import ScriptDirectory
script = ScriptDirectory.from_config(alembic_config)
heads = script.get_heads()
assert len(heads) == 1, f"Expected single head, got {heads}"
assert heads[0] == "004", f"Expected head to be 004, got {heads[0]}"
class TestMigrationContent:
"""Tests for migration content correctness."""
def test_initial_migration_creates_all_tables(self, alembic_config):
"""
Verify initial migration (001) creates all expected tables.
What: Checks that 001 migration creates the full schema
Why: Missing tables would cause application failures
"""
from alembic.script import ScriptDirectory
script = ScriptDirectory.from_config(alembic_config)
rev_001 = script.get_revision("001")
# Read the migration file
import importlib.util
spec = importlib.util.spec_from_file_location("001", rev_001.path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Check upgrade function exists
assert hasattr(module, "upgrade"), "Migration 001 missing upgrade function"
assert hasattr(module, "downgrade"), "Migration 001 missing downgrade function"
# Get source code to verify table creation
import inspect as py_inspect
source = py_inspect.getsource(module.upgrade)
expected_tables = [
"games",
"plays",
"lineups",
"game_sessions",
"rolls",
"roster_links",
"game_cardset_links",
]
for table in expected_tables:
assert (
f"'{table}'" in source
), f"Migration 001 missing table creation for '{table}'"
def test_materialized_views_migration_content(self, alembic_config):
"""
Verify materialized views migration (004) creates expected views.
What: Checks that 004 migration creates statistics views
Why: Missing views would break box score functionality
"""
from alembic.script import ScriptDirectory
script = ScriptDirectory.from_config(alembic_config)
rev_004 = script.get_revision("004")
import importlib.util
spec = importlib.util.spec_from_file_location("004", rev_004.path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
import inspect as py_inspect
source = py_inspect.getsource(module.upgrade)
expected_views = [
"batting_game_stats",
"pitching_game_stats",
"game_stats",
]
for view in expected_views:
assert (
view in source
), f"Migration 004 missing materialized view '{view}'"
class TestCurrentDatabaseState:
"""Tests for current database migration state."""
def test_database_at_head(self, sync_engine):
"""
Verify database is at the latest migration revision.
What: Checks alembic_version table shows current head
Why: Ensures database schema is up to date
"""
with sync_engine.connect() as conn:
result = conn.execute(text("SELECT version_num FROM alembic_version"))
versions = list(result)
assert len(versions) == 1, f"Expected 1 version row, got {len(versions)}"
assert versions[0][0] == "004", f"Expected version 004, got {versions[0][0]}"
def test_all_tables_exist(self, sync_engine):
"""
Verify all expected tables exist in the database.
What: Inspects database to confirm table presence
Why: Validates migrations were applied correctly
"""
inspector = inspect(sync_engine)
tables = set(inspector.get_table_names())
expected_tables = {
"games",
"plays",
"lineups",
"game_sessions",
"rolls",
"roster_links",
"game_cardset_links",
"alembic_version", # Created by Alembic
}
missing = expected_tables - tables
assert not missing, f"Missing tables: {missing}"
def test_materialized_views_exist(self, sync_engine):
"""
Verify all materialized views exist in the database.
What: Checks for statistics views created by migration 004
Why: Box score functionality depends on these views
"""
with sync_engine.connect() as conn:
result = conn.execute(
text(
"""
SELECT matviewname FROM pg_matviews
WHERE schemaname = 'public'
ORDER BY matviewname
"""
)
)
views = {row[0] for row in result}
expected_views = {
"batting_game_stats",
"pitching_game_stats",
"game_stats",
}
missing = expected_views - views
assert not missing, f"Missing materialized views: {missing}"
class TestMigrationDowngrade:
"""Tests for migration downgrade safety (non-destructive checks only)."""
def test_downgrade_functions_exist(self, alembic_config):
"""
Verify all migrations have downgrade functions.
What: Checks each migration has a downgrade() implementation
Why: Rollback capability is critical for disaster recovery
"""
from alembic.script import ScriptDirectory
script = ScriptDirectory.from_config(alembic_config)
for rev in script.walk_revisions():
import importlib.util
spec = importlib.util.spec_from_file_location(rev.revision, rev.path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
assert hasattr(
module, "downgrade"
), f"Migration {rev.revision} missing downgrade function"
import inspect as py_inspect
source = py_inspect.getsource(module.downgrade)
# Downgrade should not just be 'pass' for data-creating migrations
if rev.revision in ["001", "004"]:
assert "pass" not in source.replace(
" ", ""
).replace("\n", ""), (
f"Migration {rev.revision} downgrade should not be empty"
)

File diff suppressed because it is too large Load Diff