Switch to testcontainers for automatic test container management
- Create tests/conftest.py with testcontainers for Postgres and Redis - Auto-detect Docker Desktop socket and disable Ryuk for compatibility - Update tests/db/conftest.py and tests/services/conftest.py to use shared fixtures - Fix test_resolve_effect_logs_exceptions: logger was disabled by pytest - Fix test_save_and_load_with_real_redis: use redis_url fixture - Minor lint fix in engine_validation.py Tests now auto-start containers on run - no need for `docker compose up` All 1199 tests passing. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
0a7c35c262
commit
c00ee87f25
@ -1181,7 +1181,7 @@ async def run_validation():
|
||||
# Set damage so Thunder Shock (30 + 20 weakness = 50) will KO
|
||||
# Tentacool has 60 HP, so 20 damage = 40 HP left, 50 damage KOs
|
||||
p2_active.damage = 20
|
||||
print_action(f"P2 Tentacool (weak to Lightning) has 40/60 HP remaining")
|
||||
print_action("P2 Tentacool (weak to Lightning) has 40/60 HP remaining")
|
||||
|
||||
# Execute knockout attack
|
||||
print_step("7.2", "Execute winning knockout")
|
||||
|
||||
291
backend/tests/conftest.py
Normal file
291
backend/tests/conftest.py
Normal file
@ -0,0 +1,291 @@
|
||||
"""Shared test fixtures for Mantimon TCG using testcontainers.
|
||||
|
||||
This module provides database and Redis fixtures that automatically start
|
||||
containers when tests run - no need for `docker compose up` beforehand.
|
||||
|
||||
Key Features:
|
||||
- Testcontainers auto-starts Postgres and Redis for test session
|
||||
- Containers are shared across all tests (session scope)
|
||||
- Tables are truncated between tests for isolation
|
||||
- Sync psycopg2 used for fixture teardown (avoids event loop issues)
|
||||
|
||||
Usage:
|
||||
@pytest.mark.asyncio
|
||||
async def test_something(db_session, redis_client):
|
||||
# db_session is an AsyncSession connected to testcontainer Postgres
|
||||
# redis_client is an async Redis client connected to testcontainer Redis
|
||||
pass
|
||||
|
||||
Environment:
|
||||
The following environment variables are auto-configured for Docker Desktop:
|
||||
- DOCKER_HOST: Set to Docker Desktop socket if default socket not found
|
||||
- TESTCONTAINERS_RYUK_DISABLED: Disabled to avoid Ryuk startup issues
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# =============================================================================
|
||||
# Docker Environment Configuration
|
||||
# =============================================================================
|
||||
|
||||
# Auto-detect Docker Desktop socket if default socket doesn't exist
|
||||
_default_socket = Path("/var/run/docker.sock")
|
||||
_desktop_socket = Path.home() / ".docker/desktop/docker.sock"
|
||||
|
||||
if not _default_socket.exists() and _desktop_socket.exists():
|
||||
os.environ.setdefault("DOCKER_HOST", f"unix://{_desktop_socket}")
|
||||
|
||||
# Disable Ryuk (cleanup container) to avoid startup issues with Docker Desktop
|
||||
os.environ.setdefault("TESTCONTAINERS_RYUK_DISABLED", "true")
|
||||
|
||||
# ruff: noqa: E402
|
||||
# Imports must come after environment setup for testcontainers to work correctly
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import Any
|
||||
|
||||
import psycopg2
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
import redis.asyncio as aioredis
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncSession,
|
||||
async_sessionmaker,
|
||||
create_async_engine,
|
||||
)
|
||||
from testcontainers.postgres import PostgresContainer
|
||||
from testcontainers.redis import RedisContainer
|
||||
|
||||
# =============================================================================
|
||||
# Module-level container state (set by session fixtures)
|
||||
# =============================================================================
|
||||
|
||||
_postgres_container: PostgresContainer | None = None
|
||||
_redis_container: RedisContainer | None = None
|
||||
_db_params: dict[str, Any] = {}
|
||||
_async_db_url: str = ""
|
||||
_sync_db_url: str = ""
|
||||
_redis_url: str = ""
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Tables to Truncate (ordered for FK constraints - children first)
|
||||
# =============================================================================
|
||||
|
||||
TABLES_TO_TRUNCATE = [
|
||||
"game_history",
|
||||
"active_games",
|
||||
"campaign_progress",
|
||||
"collections",
|
||||
"decks",
|
||||
"oauth_linked_accounts",
|
||||
"users",
|
||||
]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Container Fixtures (Session-Scoped)
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def postgres_container() -> PostgresContainer:
|
||||
"""Start a Postgres container for the test session.
|
||||
|
||||
The container is started once and shared across all tests.
|
||||
It's automatically stopped when the test session ends.
|
||||
"""
|
||||
global _postgres_container, _db_params, _async_db_url, _sync_db_url
|
||||
|
||||
container = PostgresContainer(
|
||||
image="postgres:15-alpine",
|
||||
username="mantimon",
|
||||
password="mantimon",
|
||||
dbname="mantimon",
|
||||
)
|
||||
container.start()
|
||||
|
||||
# Extract connection info
|
||||
host = container.get_container_host_ip()
|
||||
port = container.get_exposed_port(5432)
|
||||
|
||||
_db_params = {
|
||||
"host": host,
|
||||
"port": int(port),
|
||||
"user": "mantimon",
|
||||
"password": "mantimon",
|
||||
"dbname": "mantimon",
|
||||
}
|
||||
_async_db_url = f"postgresql+asyncpg://mantimon:mantimon@{host}:{port}/mantimon"
|
||||
_sync_db_url = f"postgresql+psycopg2://mantimon:mantimon@{host}:{port}/mantimon"
|
||||
|
||||
_postgres_container = container
|
||||
|
||||
yield container
|
||||
|
||||
container.stop()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def redis_container() -> RedisContainer:
|
||||
"""Start a Redis container for the test session.
|
||||
|
||||
The container is started once and shared across all tests.
|
||||
It's automatically stopped when the test session ends.
|
||||
"""
|
||||
global _redis_container, _redis_url
|
||||
|
||||
container = RedisContainer(image="redis:7-alpine")
|
||||
container.start()
|
||||
|
||||
host = container.get_container_host_ip()
|
||||
port = container.get_exposed_port(6379)
|
||||
|
||||
_redis_url = f"redis://{host}:{port}/0"
|
||||
_redis_container = container
|
||||
|
||||
yield container
|
||||
|
||||
container.stop()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def redis_url(redis_container: RedisContainer) -> str:
|
||||
"""Get the Redis connection URL for the testcontainer.
|
||||
|
||||
This fixture provides the URL so tests can create their own
|
||||
Redis clients within their own event loop, avoiding the
|
||||
'attached to different loop' issue.
|
||||
"""
|
||||
return _redis_url
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Migration Fixture (Session-Scoped)
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def _run_migrations(postgres_container: PostgresContainer) -> None:
|
||||
"""Run Alembic migrations once per session after Postgres starts.
|
||||
|
||||
This fixture depends on postgres_container to ensure the database
|
||||
is running before migrations are applied.
|
||||
"""
|
||||
alembic_cfg = Config("alembic.ini")
|
||||
alembic_cfg.set_main_option("sqlalchemy.url", _sync_db_url)
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Sync Helper Functions (for fixture teardown)
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def truncate_all_tables() -> None:
|
||||
"""Truncate all tables using sync psycopg2.
|
||||
|
||||
This runs in fixture teardown which may be in a different event loop,
|
||||
so we use sync operations to avoid event loop issues.
|
||||
"""
|
||||
if not _db_params:
|
||||
return # Container not started yet
|
||||
|
||||
conn = psycopg2.connect(**_db_params)
|
||||
try:
|
||||
conn.autocommit = True
|
||||
with conn.cursor() as cur:
|
||||
for table in TABLES_TO_TRUNCATE:
|
||||
cur.execute(f"TRUNCATE TABLE {table} CASCADE")
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Database Session Fixture (Function-Scoped)
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def db_session(
|
||||
postgres_container: PostgresContainer,
|
||||
) -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Provide a fresh async session for each test.
|
||||
|
||||
The session uses NullPool (no connection reuse) and expires on commit=False.
|
||||
Tables are truncated AFTER each test using sync psycopg2 to avoid event
|
||||
loop issues in fixture teardown.
|
||||
|
||||
Example:
|
||||
async def test_create_user(db_session):
|
||||
user = User(email="test@example.com", ...)
|
||||
db_session.add(user)
|
||||
await db_session.flush()
|
||||
|
||||
assert user.id is not None
|
||||
"""
|
||||
engine = create_async_engine(
|
||||
_async_db_url,
|
||||
echo=False,
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
session_factory = async_sessionmaker(
|
||||
bind=engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
session = session_factory()
|
||||
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
with contextlib.suppress(RuntimeError):
|
||||
await session.close()
|
||||
|
||||
with contextlib.suppress(RuntimeError):
|
||||
await engine.dispose()
|
||||
|
||||
# SYNC truncate - works regardless of event loop
|
||||
truncate_all_tables()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Redis Client Fixture (Function-Scoped)
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def redis_client(
|
||||
redis_container: RedisContainer,
|
||||
) -> AsyncGenerator[Any, None]:
|
||||
"""Provide a Redis client for testing.
|
||||
|
||||
Flushes the database before and after each test for isolation.
|
||||
"""
|
||||
client = aioredis.from_url(_redis_url, decode_responses=True)
|
||||
try:
|
||||
await client.flushdb()
|
||||
yield client
|
||||
finally:
|
||||
try:
|
||||
await client.flushdb()
|
||||
await client.aclose()
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Utility Fixtures
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def anyio_backend() -> str:
|
||||
"""Specify asyncio as the async backend."""
|
||||
return "asyncio"
|
||||
@ -233,16 +233,38 @@ class TestResolveEffect:
|
||||
assert "failed" in result.message
|
||||
assert "Intentional error" in result.message
|
||||
|
||||
def test_resolve_effect_logs_exceptions(self, caplog: pytest.LogCaptureFixture) -> None:
|
||||
def test_resolve_effect_logs_exceptions(self) -> None:
|
||||
"""
|
||||
Verify resolve_effect logs full exception details when handler fails.
|
||||
|
||||
This ensures debugging information is preserved even though the game
|
||||
continues gracefully. The log should include the effect_id, context
|
||||
details, and the full traceback.
|
||||
|
||||
Note: Uses a custom log handler instead of caplog due to pytest-asyncio
|
||||
compatibility issues with log capture.
|
||||
"""
|
||||
import logging
|
||||
|
||||
# Create a custom handler to capture log records
|
||||
captured_records: list[logging.LogRecord] = []
|
||||
|
||||
class CaptureHandler(logging.Handler):
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
captured_records.append(record)
|
||||
|
||||
# Set up the handler on the registry logger
|
||||
registry_logger = logging.getLogger("app.core.effects.registry")
|
||||
original_level = registry_logger.level
|
||||
original_disabled = registry_logger.disabled
|
||||
registry_logger.setLevel(logging.ERROR)
|
||||
registry_logger.disabled = False # Pytest may disable loggers
|
||||
handler = CaptureHandler()
|
||||
handler.setLevel(logging.ERROR)
|
||||
registry_logger.addHandler(handler)
|
||||
|
||||
try:
|
||||
|
||||
@effect_handler("logging_buggy_effect")
|
||||
def buggy_handler(ctx: EffectContext) -> EffectResult:
|
||||
raise ValueError("Detailed error message for logging test")
|
||||
@ -252,24 +274,29 @@ class TestResolveEffect:
|
||||
ctx.target_card_id = "test-target-card"
|
||||
ctx.params = {"test_param": 123}
|
||||
|
||||
with caplog.at_level(logging.ERROR):
|
||||
result = resolve_effect("logging_buggy_effect", ctx)
|
||||
|
||||
# Verify the result is still a failure
|
||||
assert result.success is False
|
||||
|
||||
# Verify the exception was logged with context
|
||||
assert len(caplog.records) == 1
|
||||
log_record = caplog.records[0]
|
||||
assert len(captured_records) == 1, f"Expected 1 record, got {len(captured_records)}"
|
||||
log_record = captured_records[0]
|
||||
assert log_record.levelname == "ERROR"
|
||||
assert "logging_buggy_effect" in log_record.message
|
||||
assert "player1" in log_record.message # source_player_id
|
||||
assert "test-source-card" in log_record.message
|
||||
assert "test-target-card" in log_record.message
|
||||
message = log_record.getMessage()
|
||||
assert "logging_buggy_effect" in message
|
||||
assert "player1" in message # source_player_id
|
||||
assert "test-source-card" in message
|
||||
assert "test-target-card" in message
|
||||
|
||||
# Verify traceback is included (logger.exception includes exc_info)
|
||||
assert log_record.exc_info is not None
|
||||
assert "Detailed error message for logging test" in str(log_record.exc_info[1])
|
||||
finally:
|
||||
# Clean up handler and restore level/disabled state
|
||||
registry_logger.removeHandler(handler)
|
||||
registry_logger.setLevel(original_level)
|
||||
registry_logger.disabled = original_disabled
|
||||
|
||||
def test_resolve_effect_returns_handler_result(self) -> None:
|
||||
"""
|
||||
|
||||
@ -1,205 +1,12 @@
|
||||
"""Database test fixtures for Mantimon TCG.
|
||||
"""Database-specific test fixtures for Mantimon TCG.
|
||||
|
||||
This module provides fixtures for database integration testing using the
|
||||
running dev containers (docker-compose).
|
||||
This module extends the shared fixtures from tests/conftest.py with
|
||||
any database-test-specific helpers.
|
||||
|
||||
Key insight: pytest-asyncio runs fixture teardown in a DIFFERENT event loop
|
||||
than the test body. This causes "Future attached to different loop" errors
|
||||
when trying to do async cleanup on connections created during the test.
|
||||
|
||||
Solution: Use SYNC psycopg2 for all fixture setup/teardown operations,
|
||||
and only use asyncpg within the test body itself. The session is created
|
||||
fresh per test with no cleanup needed (NullPool + TRUNCATE after).
|
||||
|
||||
Prerequisites:
|
||||
docker compose up -d # Start Postgres (5433) and Redis (6380)
|
||||
The db_session and redis_client fixtures are inherited from the parent conftest.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import Any
|
||||
# All shared fixtures (db_session, redis_client, postgres_container, redis_container)
|
||||
# are inherited from tests/conftest.py - no need to redefine them here.
|
||||
|
||||
import psycopg2
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncSession,
|
||||
async_sessionmaker,
|
||||
create_async_engine,
|
||||
)
|
||||
|
||||
# =============================================================================
|
||||
# Configuration
|
||||
# =============================================================================
|
||||
|
||||
# Dev container URLs (matches docker-compose.yml)
|
||||
TEST_DATABASE_URL = os.getenv(
|
||||
"TEST_DATABASE_URL",
|
||||
"postgresql+asyncpg://mantimon:mantimon@localhost:5433/mantimon",
|
||||
)
|
||||
SYNC_DATABASE_URL = os.getenv(
|
||||
"SYNC_DATABASE_URL",
|
||||
"postgresql+psycopg2://mantimon:mantimon@localhost:5433/mantimon",
|
||||
)
|
||||
TEST_REDIS_URL = os.getenv(
|
||||
"TEST_REDIS_URL",
|
||||
"redis://localhost:6380/1", # Use DB 1 for tests, DB 0 for dev
|
||||
)
|
||||
|
||||
# Connection params for sync psycopg2 (used in fixtures)
|
||||
DB_PARAMS = {
|
||||
"host": "localhost",
|
||||
"port": 5433,
|
||||
"user": "mantimon",
|
||||
"password": "mantimon",
|
||||
"dbname": "mantimon",
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Tables to Truncate (ordered for FK constraints - children first)
|
||||
# =============================================================================
|
||||
|
||||
TABLES_TO_TRUNCATE = [
|
||||
"game_history",
|
||||
"active_games",
|
||||
"campaign_progress",
|
||||
"collections",
|
||||
"decks",
|
||||
"oauth_linked_accounts",
|
||||
"users",
|
||||
]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Sync Helper Functions (for fixture setup/teardown)
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def truncate_all_tables() -> None:
|
||||
"""Truncate all tables using sync psycopg2.
|
||||
|
||||
This runs in fixture teardown which may be in a different event loop,
|
||||
so we use sync operations to avoid event loop issues.
|
||||
"""
|
||||
conn = psycopg2.connect(**DB_PARAMS)
|
||||
try:
|
||||
conn.autocommit = True
|
||||
with conn.cursor() as cur:
|
||||
for table in TABLES_TO_TRUNCATE:
|
||||
cur.execute(f"TRUNCATE TABLE {table} CASCADE")
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Migration Fixture (Session-Scoped, Sync)
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def _run_migrations() -> None:
|
||||
"""Run Alembic migrations once per session.
|
||||
|
||||
Uses sync psycopg2 to avoid async event loop issues.
|
||||
autouse=True ensures migrations run before any tests.
|
||||
"""
|
||||
alembic_cfg = Config("alembic.ini")
|
||||
alembic_cfg.set_main_option("sqlalchemy.url", SYNC_DATABASE_URL)
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Database Session Fixture (Function-Scoped)
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Provide a fresh async session for each test.
|
||||
|
||||
The session uses NullPool (no connection reuse) and expires on commit=False.
|
||||
Tables are truncated AFTER each test using sync psycopg2 to avoid event
|
||||
loop issues in fixture teardown.
|
||||
|
||||
Example:
|
||||
async def test_create_user(db_session):
|
||||
user = User(email="test@example.com", ...)
|
||||
db_session.add(user)
|
||||
await db_session.flush()
|
||||
|
||||
assert user.id is not None
|
||||
# Data is truncated after test via sync cleanup
|
||||
"""
|
||||
# Create fresh engine with NullPool
|
||||
engine = create_async_engine(
|
||||
TEST_DATABASE_URL,
|
||||
echo=False,
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
# Create session factory
|
||||
session_factory = async_sessionmaker(
|
||||
bind=engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
# Create session
|
||||
session = session_factory()
|
||||
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
# Close session without rollback (may fail on different event loop)
|
||||
# The truncate below will clean up any uncommitted data anyway
|
||||
with contextlib.suppress(RuntimeError):
|
||||
await session.close()
|
||||
|
||||
# Dispose engine (may fail on different event loop)
|
||||
with contextlib.suppress(RuntimeError):
|
||||
await engine.dispose()
|
||||
|
||||
# SYNC truncate - this always works regardless of event loop
|
||||
truncate_all_tables()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Redis Client Fixture
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def redis_client() -> AsyncGenerator[Any, None]:
|
||||
"""Provide a Redis client for testing.
|
||||
|
||||
Uses DB 1 (test database) to avoid conflicts with dev data in DB 0.
|
||||
Flushes the database after each test.
|
||||
"""
|
||||
import redis.asyncio as aioredis
|
||||
|
||||
client = aioredis.from_url(TEST_REDIS_URL, decode_responses=True)
|
||||
try:
|
||||
yield client
|
||||
finally:
|
||||
try:
|
||||
await client.flushdb()
|
||||
await client.aclose()
|
||||
except RuntimeError:
|
||||
# Ignore event loop errors during cleanup
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Utility Fixtures
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def anyio_backend() -> str:
|
||||
"""Specify asyncio as the async backend."""
|
||||
return "asyncio"
|
||||
# Add any db-test-specific fixtures below if needed.
|
||||
|
||||
@ -1,153 +1,19 @@
|
||||
"""Service test fixtures for Mantimon TCG.
|
||||
"""Service-specific test fixtures for Mantimon TCG.
|
||||
|
||||
This module provides fixtures for testing services that use both
|
||||
Redis and PostgreSQL. Uses dev containers like the db tests.
|
||||
This module extends the shared fixtures from tests/conftest.py with
|
||||
service-test-specific helpers like GameState factories.
|
||||
|
||||
Prerequisites:
|
||||
docker compose up -d # Start Postgres (5433) and Redis (6380)
|
||||
The db_session and redis_client fixtures are inherited from the parent conftest.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
import psycopg2
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
import redis.asyncio as aioredis
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncSession,
|
||||
async_sessionmaker,
|
||||
create_async_engine,
|
||||
)
|
||||
|
||||
from app.core.config import RulesConfig
|
||||
from app.core.enums import TurnPhase
|
||||
from app.core.models.game_state import GameState, PlayerState
|
||||
|
||||
# =============================================================================
|
||||
# Configuration
|
||||
# =============================================================================
|
||||
|
||||
TEST_DATABASE_URL = os.getenv(
|
||||
"TEST_DATABASE_URL",
|
||||
"postgresql+asyncpg://mantimon:mantimon@localhost:5433/mantimon",
|
||||
)
|
||||
SYNC_DATABASE_URL = os.getenv(
|
||||
"SYNC_DATABASE_URL",
|
||||
"postgresql+psycopg2://mantimon:mantimon@localhost:5433/mantimon",
|
||||
)
|
||||
TEST_REDIS_URL = os.getenv(
|
||||
"TEST_REDIS_URL",
|
||||
"redis://localhost:6380/2", # Use DB 2 for service tests
|
||||
)
|
||||
|
||||
DB_PARAMS = {
|
||||
"host": "localhost",
|
||||
"port": 5433,
|
||||
"user": "mantimon",
|
||||
"password": "mantimon",
|
||||
"dbname": "mantimon",
|
||||
}
|
||||
|
||||
TABLES_TO_TRUNCATE = [
|
||||
"game_history",
|
||||
"active_games",
|
||||
"campaign_progress",
|
||||
"collections",
|
||||
"decks",
|
||||
"users",
|
||||
]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Sync Cleanup Helper
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def truncate_all_tables() -> None:
|
||||
"""Truncate all tables using sync psycopg2."""
|
||||
conn = psycopg2.connect(**DB_PARAMS)
|
||||
try:
|
||||
conn.autocommit = True
|
||||
with conn.cursor() as cur:
|
||||
for table in TABLES_TO_TRUNCATE:
|
||||
cur.execute(f"TRUNCATE TABLE {table} CASCADE")
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Migration Fixture
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def _run_migrations() -> None:
|
||||
"""Run Alembic migrations once per session."""
|
||||
alembic_cfg = Config("alembic.ini")
|
||||
alembic_cfg.set_main_option("sqlalchemy.url", SYNC_DATABASE_URL)
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Database Session Fixture
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Provide a fresh async session for each test."""
|
||||
engine = create_async_engine(
|
||||
TEST_DATABASE_URL,
|
||||
echo=False,
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
session_factory = async_sessionmaker(
|
||||
bind=engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
async with session_factory() as session:
|
||||
yield session
|
||||
with contextlib.suppress(RuntimeError):
|
||||
await session.close()
|
||||
|
||||
with contextlib.suppress(RuntimeError):
|
||||
await engine.dispose()
|
||||
|
||||
truncate_all_tables()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Redis Fixture
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def redis_client() -> AsyncGenerator[Any, None]:
|
||||
"""Provide a Redis client for testing."""
|
||||
client = aioredis.from_url(TEST_REDIS_URL, decode_responses=True)
|
||||
try:
|
||||
# Clear test database before test
|
||||
await client.flushdb()
|
||||
yield client
|
||||
finally:
|
||||
# Clean up after test
|
||||
try:
|
||||
await client.flushdb()
|
||||
await client.aclose()
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# GameState Factory
|
||||
# =============================================================================
|
||||
@ -190,14 +56,3 @@ def create_test_game_state(
|
||||
def game_state() -> GameState:
|
||||
"""Provide a test GameState."""
|
||||
return create_test_game_state()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Utility
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def anyio_backend() -> str:
|
||||
"""Specify asyncio as the async backend."""
|
||||
return "asyncio"
|
||||
|
||||
@ -517,31 +517,28 @@ class TestHighLevelOperations:
|
||||
|
||||
|
||||
class TestRealRedisIntegration:
|
||||
"""Integration tests using real Redis (from docker-compose)."""
|
||||
"""Integration tests using real Redis (from testcontainers)."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_save_and_load_with_real_redis(self) -> None:
|
||||
async def test_save_and_load_with_real_redis(self, redis_url: str) -> None:
|
||||
"""Test full save/load cycle with real Redis client.
|
||||
|
||||
Verifies the complete flow works with actual Redis.
|
||||
Creates Redis connection inside the test to avoid event loop issues.
|
||||
Creates Redis client inside test to avoid event loop issues.
|
||||
"""
|
||||
import redis.asyncio as aioredis
|
||||
|
||||
# Create Redis client inside test (same event loop)
|
||||
client = aioredis.from_url(
|
||||
"redis://localhost:6380/3", # Use DB 3 for this specific test
|
||||
decode_responses=True,
|
||||
)
|
||||
# Create client inside test (same event loop)
|
||||
client = aioredis.from_url(redis_url, decode_responses=True)
|
||||
|
||||
try:
|
||||
# Clear test database
|
||||
await client.flushdb()
|
||||
|
||||
# Create a helper that uses our client
|
||||
# Create a helper that uses the test client
|
||||
class TestRedisHelper(RedisHelper):
|
||||
def __init__(self, client: Any) -> None:
|
||||
self._client = client
|
||||
def __init__(self, redis_client: Any) -> None:
|
||||
self._client = redis_client
|
||||
|
||||
async def get_json(self, key: str) -> dict | None:
|
||||
value = await self._client.get(key)
|
||||
|
||||
Loading…
Reference in New Issue
Block a user