Add production deployment config and fix stringified list parsing

- Fix /legal-check endpoint to handle card_ids passed as stringified list
- Add compose.production.yml for akamai deployment (pd_api container)
- Add migrate_missing_data.py script for filling gaps from initial migration

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Cal Corum 2026-01-30 14:03:07 -06:00
parent b063b73f92
commit 23bf59e3db
3 changed files with 269 additions and 0 deletions

View File

@ -225,6 +225,14 @@ async def v1_cards_legal_check(
if rarity_name not in CARDSETS.keys(): if rarity_name not in CARDSETS.keys():
return f'Rarity name {rarity_name} not a valid check' return f'Rarity name {rarity_name} not a valid check'
# Handle case where card_id is passed as a stringified list
if card_id and len(card_id) == 1 and isinstance(card_id[0], str) and card_id[0].startswith('['):
import ast
try:
card_id = [int(x) for x in ast.literal_eval(card_id[0])]
except (ValueError, SyntaxError):
pass
bad_cards = [] bad_cards = []
all_cards = Card.select().where(Card.id << card_id) all_cards = Card.select().where(Card.id << card_id)

42
compose.production.yml Normal file
View File

@ -0,0 +1,42 @@
# Production deployment for Paper Dynasty API on akamai
# Uses shared sba_postgres database server
#
# Deployment:
# scp docker-compose.prod.yml akamai:/opt/paper-dynasty/
# ssh akamai "cd /opt/paper-dynasty && docker compose -f docker-compose.prod.yml up -d"
services:
api:
image: manticorum67/paper-dynasty-database:postgres-migration
container_name: pd_api
restart: unless-stopped
ports:
- "8002:80"
volumes:
- ./logs:/usr/src/app/logs
- ./storage:/usr/src/app/storage
environment:
- DATABASE_TYPE=postgresql
- POSTGRES_HOST=sba_postgres
- POSTGRES_DB=pd_master
- POSTGRES_USER=pd_admin
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-wJHZRZbO5NJBjhGfqydsZueV}
- POSTGRES_PORT=5432
- API_TOKEN=${API_TOKEN:-Tp3aO3jhYve5NJF1IqOmJTmk}
- LOG_LEVEL=${LOG_LEVEL:-INFO}
- TZ=America/Chicago
networks:
- sba-database_default
- nginx-proxy-manager_npm_network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:80/openapi.json"]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
networks:
sba-database_default:
external: true
nginx-proxy-manager_npm_network:
external: true

View File

@ -0,0 +1,219 @@
#!/usr/bin/env python3
"""
Migrate missing packs and cards from SQLite to PostgreSQL.
Run from sba-db where SQLite lives, connecting to akamai's PostgreSQL.
"""
import sqlite3
import psycopg2
from psycopg2.extras import execute_values
import sys
from datetime import datetime
# PostgreSQL connection (sba_postgres container on same network)
PG_CONFIG = {
"host": "sba_postgres",
"database": "pd_master",
"user": "pd_admin",
"password": "wJHZRZbO5NJBjhGfqydsZueV",
"port": 5432,
}
SQLITE_PATH = "/tmp/pd_sqlite_prod.db"
def get_missing_pack_ids(sqlite_conn, pg_conn):
"""Find pack IDs that exist in SQLite but not PostgreSQL."""
# Get all pack IDs from SQLite
sqlite_cur = sqlite_conn.execute("SELECT id FROM pack ORDER BY id")
sqlite_ids = set(row[0] for row in sqlite_cur.fetchall())
# Get all pack IDs from PostgreSQL
pg_cur = pg_conn.cursor()
pg_cur.execute("SELECT id FROM pack ORDER BY id")
pg_ids = set(row[0] for row in pg_cur.fetchall())
missing = sqlite_ids - pg_ids
print(f"SQLite packs: {len(sqlite_ids)}, PostgreSQL packs: {len(pg_ids)}, Missing: {len(missing)}")
return missing
def migrate_packs(sqlite_conn, pg_conn, pack_ids):
"""Migrate specific packs from SQLite to PostgreSQL."""
if not pack_ids:
print("No packs to migrate")
return 0
pack_ids = list(pack_ids)
batch_size = 500
total_inserted = 0
total_skipped = 0
# Ensure we're not in a transaction before setting autocommit
pg_conn.rollback()
pg_conn.autocommit = True
for i in range(0, len(pack_ids), batch_size):
batch_ids = pack_ids[i:i + batch_size]
# Get pack data from SQLite
placeholders = ",".join("?" * len(batch_ids))
sqlite_cur = sqlite_conn.execute(
f"SELECT id, team_id, pack_type_id, pack_team_id, pack_cardset_id, open_time FROM pack WHERE id IN ({placeholders})",
tuple(batch_ids)
)
packs = sqlite_cur.fetchall()
# Convert timestamps (milliseconds to datetime)
converted_packs = []
for pack in packs:
id, team_id, pack_type_id, pack_team_id, pack_cardset_id, open_time = pack
if open_time and open_time > 0:
try:
open_time = datetime.fromtimestamp(open_time / 1000)
except:
open_time = None
else:
open_time = None
converted_packs.append((id, team_id, pack_type_id, pack_team_id, pack_cardset_id, open_time))
# Insert into PostgreSQL
pg_cur = pg_conn.cursor()
inserted = 0
skipped = 0
for pack in converted_packs:
try:
pg_cur.execute(
"INSERT INTO pack (id, team_id, pack_type_id, pack_team_id, pack_cardset_id, open_time) VALUES (%s, %s, %s, %s, %s, %s)",
pack
)
inserted += 1
except psycopg2.errors.ForeignKeyViolation as e:
skipped += 1
except psycopg2.errors.UniqueViolation:
skipped += 1
except Exception as e:
print(f"Error inserting pack {pack[0]}: {e}")
skipped += 1
total_inserted += inserted
total_skipped += skipped
print(f" Pack batch {i//batch_size + 1}: {inserted} inserted, {skipped} skipped")
# Reset sequence
pg_conn.autocommit = False
pg_cur = pg_conn.cursor()
pg_cur.execute("SELECT setval('pack_id_seq', (SELECT MAX(id) FROM pack), true)")
pg_conn.commit()
print(f"Packs total: {total_inserted} inserted, {total_skipped} skipped")
return total_inserted
def get_missing_card_ids(sqlite_conn, pg_conn):
"""Find card IDs that exist in SQLite but not PostgreSQL."""
sqlite_cur = sqlite_conn.execute("SELECT id FROM card ORDER BY id")
sqlite_ids = set(row[0] for row in sqlite_cur.fetchall())
pg_cur = pg_conn.cursor()
pg_cur.execute("SELECT id FROM card ORDER BY id")
pg_ids = set(row[0] for row in pg_cur.fetchall())
missing = sqlite_ids - pg_ids
print(f"SQLite cards: {len(sqlite_ids)}, PostgreSQL cards: {len(pg_ids)}, Missing: {len(missing)}")
return missing
def migrate_cards(sqlite_conn, pg_conn, card_ids):
"""Migrate specific cards from SQLite to PostgreSQL."""
if not card_ids:
print("No cards to migrate")
return 0
# Process in batches
card_ids = list(card_ids)
batch_size = 1000
total_inserted = 0
total_skipped = 0
# Ensure we're not in a transaction before setting autocommit
pg_conn.rollback()
pg_conn.autocommit = True
for i in range(0, len(card_ids), batch_size):
batch_ids = card_ids[i:i + batch_size]
placeholders = ",".join("?" * len(batch_ids))
sqlite_cur = sqlite_conn.execute(
f"SELECT id, player_id, team_id, pack_id, value FROM card WHERE id IN ({placeholders})",
tuple(batch_ids)
)
cards = sqlite_cur.fetchall()
pg_cur = pg_conn.cursor()
inserted = 0
skipped = 0
for card in cards:
try:
pg_cur.execute(
"INSERT INTO card (id, player_id, team_id, pack_id, value) VALUES (%s, %s, %s, %s, %s)",
card
)
inserted += 1
except psycopg2.errors.ForeignKeyViolation as e:
skipped += 1
except psycopg2.errors.UniqueViolation:
skipped += 1
except Exception as e:
print(f"Error inserting card {card[0]}: {e}")
skipped += 1
total_inserted += inserted
total_skipped += skipped
print(f" Card batch {i//batch_size + 1}: {inserted} inserted, {skipped} skipped")
# Reset sequence
pg_conn.autocommit = False
pg_cur = pg_conn.cursor()
pg_cur.execute("SELECT setval('card_id_seq', (SELECT MAX(id) FROM card), true)")
pg_conn.commit()
print(f"Cards total: {total_inserted} inserted, {total_skipped} skipped")
return total_inserted
def main():
print("=" * 60)
print("Migrating missing packs and cards")
print("=" * 60)
# Connect to databases
sqlite_conn = sqlite3.connect(SQLITE_PATH)
pg_conn = psycopg2.connect(**PG_CONFIG)
# Migrate packs first
print("\n--- PACKS ---")
missing_packs = get_missing_pack_ids(sqlite_conn, pg_conn)
if missing_packs:
migrate_packs(sqlite_conn, pg_conn, missing_packs)
# Then migrate cards
print("\n--- CARDS ---")
missing_cards = get_missing_card_ids(sqlite_conn, pg_conn)
if missing_cards:
migrate_cards(sqlite_conn, pg_conn, missing_cards)
# Verify
print("\n--- VERIFICATION ---")
get_missing_pack_ids(sqlite_conn, pg_conn)
get_missing_card_ids(sqlite_conn, pg_conn)
sqlite_conn.close()
pg_conn.close()
print("\nDone!")
if __name__ == "__main__":
main()