- Fix /legal-check endpoint to handle card_ids passed as stringified list - Add compose.production.yml for akamai deployment (pd_api container) - Add migrate_missing_data.py script for filling gaps from initial migration Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
220 lines
6.9 KiB
Python
220 lines
6.9 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Migrate missing packs and cards from SQLite to PostgreSQL.
|
|
Run from sba-db where SQLite lives, connecting to akamai's PostgreSQL.
|
|
"""
|
|
|
|
import sqlite3
|
|
import psycopg2
|
|
from psycopg2.extras import execute_values
|
|
import sys
|
|
from datetime import datetime
|
|
|
|
# PostgreSQL connection (sba_postgres container on same network)
|
|
PG_CONFIG = {
|
|
"host": "sba_postgres",
|
|
"database": "pd_master",
|
|
"user": "pd_admin",
|
|
"password": "wJHZRZbO5NJBjhGfqydsZueV",
|
|
"port": 5432,
|
|
}
|
|
|
|
SQLITE_PATH = "/tmp/pd_sqlite_prod.db"
|
|
|
|
|
|
def get_missing_pack_ids(sqlite_conn, pg_conn):
|
|
"""Find pack IDs that exist in SQLite but not PostgreSQL."""
|
|
# Get all pack IDs from SQLite
|
|
sqlite_cur = sqlite_conn.execute("SELECT id FROM pack ORDER BY id")
|
|
sqlite_ids = set(row[0] for row in sqlite_cur.fetchall())
|
|
|
|
# Get all pack IDs from PostgreSQL
|
|
pg_cur = pg_conn.cursor()
|
|
pg_cur.execute("SELECT id FROM pack ORDER BY id")
|
|
pg_ids = set(row[0] for row in pg_cur.fetchall())
|
|
|
|
missing = sqlite_ids - pg_ids
|
|
print(f"SQLite packs: {len(sqlite_ids)}, PostgreSQL packs: {len(pg_ids)}, Missing: {len(missing)}")
|
|
return missing
|
|
|
|
|
|
def migrate_packs(sqlite_conn, pg_conn, pack_ids):
|
|
"""Migrate specific packs from SQLite to PostgreSQL."""
|
|
if not pack_ids:
|
|
print("No packs to migrate")
|
|
return 0
|
|
|
|
pack_ids = list(pack_ids)
|
|
batch_size = 500
|
|
total_inserted = 0
|
|
total_skipped = 0
|
|
|
|
# Ensure we're not in a transaction before setting autocommit
|
|
pg_conn.rollback()
|
|
pg_conn.autocommit = True
|
|
|
|
for i in range(0, len(pack_ids), batch_size):
|
|
batch_ids = pack_ids[i:i + batch_size]
|
|
|
|
# Get pack data from SQLite
|
|
placeholders = ",".join("?" * len(batch_ids))
|
|
sqlite_cur = sqlite_conn.execute(
|
|
f"SELECT id, team_id, pack_type_id, pack_team_id, pack_cardset_id, open_time FROM pack WHERE id IN ({placeholders})",
|
|
tuple(batch_ids)
|
|
)
|
|
packs = sqlite_cur.fetchall()
|
|
|
|
# Convert timestamps (milliseconds to datetime)
|
|
converted_packs = []
|
|
for pack in packs:
|
|
id, team_id, pack_type_id, pack_team_id, pack_cardset_id, open_time = pack
|
|
if open_time and open_time > 0:
|
|
try:
|
|
open_time = datetime.fromtimestamp(open_time / 1000)
|
|
except:
|
|
open_time = None
|
|
else:
|
|
open_time = None
|
|
converted_packs.append((id, team_id, pack_type_id, pack_team_id, pack_cardset_id, open_time))
|
|
|
|
# Insert into PostgreSQL
|
|
pg_cur = pg_conn.cursor()
|
|
inserted = 0
|
|
skipped = 0
|
|
|
|
for pack in converted_packs:
|
|
try:
|
|
pg_cur.execute(
|
|
"INSERT INTO pack (id, team_id, pack_type_id, pack_team_id, pack_cardset_id, open_time) VALUES (%s, %s, %s, %s, %s, %s)",
|
|
pack
|
|
)
|
|
inserted += 1
|
|
except psycopg2.errors.ForeignKeyViolation as e:
|
|
skipped += 1
|
|
except psycopg2.errors.UniqueViolation:
|
|
skipped += 1
|
|
except Exception as e:
|
|
print(f"Error inserting pack {pack[0]}: {e}")
|
|
skipped += 1
|
|
|
|
total_inserted += inserted
|
|
total_skipped += skipped
|
|
print(f" Pack batch {i//batch_size + 1}: {inserted} inserted, {skipped} skipped")
|
|
|
|
# Reset sequence
|
|
pg_conn.autocommit = False
|
|
pg_cur = pg_conn.cursor()
|
|
pg_cur.execute("SELECT setval('pack_id_seq', (SELECT MAX(id) FROM pack), true)")
|
|
pg_conn.commit()
|
|
|
|
print(f"Packs total: {total_inserted} inserted, {total_skipped} skipped")
|
|
return total_inserted
|
|
|
|
|
|
def get_missing_card_ids(sqlite_conn, pg_conn):
|
|
"""Find card IDs that exist in SQLite but not PostgreSQL."""
|
|
sqlite_cur = sqlite_conn.execute("SELECT id FROM card ORDER BY id")
|
|
sqlite_ids = set(row[0] for row in sqlite_cur.fetchall())
|
|
|
|
pg_cur = pg_conn.cursor()
|
|
pg_cur.execute("SELECT id FROM card ORDER BY id")
|
|
pg_ids = set(row[0] for row in pg_cur.fetchall())
|
|
|
|
missing = sqlite_ids - pg_ids
|
|
print(f"SQLite cards: {len(sqlite_ids)}, PostgreSQL cards: {len(pg_ids)}, Missing: {len(missing)}")
|
|
return missing
|
|
|
|
|
|
def migrate_cards(sqlite_conn, pg_conn, card_ids):
|
|
"""Migrate specific cards from SQLite to PostgreSQL."""
|
|
if not card_ids:
|
|
print("No cards to migrate")
|
|
return 0
|
|
|
|
# Process in batches
|
|
card_ids = list(card_ids)
|
|
batch_size = 1000
|
|
total_inserted = 0
|
|
total_skipped = 0
|
|
|
|
# Ensure we're not in a transaction before setting autocommit
|
|
pg_conn.rollback()
|
|
pg_conn.autocommit = True
|
|
|
|
for i in range(0, len(card_ids), batch_size):
|
|
batch_ids = card_ids[i:i + batch_size]
|
|
placeholders = ",".join("?" * len(batch_ids))
|
|
|
|
sqlite_cur = sqlite_conn.execute(
|
|
f"SELECT id, player_id, team_id, pack_id, value FROM card WHERE id IN ({placeholders})",
|
|
tuple(batch_ids)
|
|
)
|
|
cards = sqlite_cur.fetchall()
|
|
|
|
pg_cur = pg_conn.cursor()
|
|
inserted = 0
|
|
skipped = 0
|
|
|
|
for card in cards:
|
|
try:
|
|
pg_cur.execute(
|
|
"INSERT INTO card (id, player_id, team_id, pack_id, value) VALUES (%s, %s, %s, %s, %s)",
|
|
card
|
|
)
|
|
inserted += 1
|
|
except psycopg2.errors.ForeignKeyViolation as e:
|
|
skipped += 1
|
|
except psycopg2.errors.UniqueViolation:
|
|
skipped += 1
|
|
except Exception as e:
|
|
print(f"Error inserting card {card[0]}: {e}")
|
|
skipped += 1
|
|
|
|
total_inserted += inserted
|
|
total_skipped += skipped
|
|
print(f" Card batch {i//batch_size + 1}: {inserted} inserted, {skipped} skipped")
|
|
|
|
# Reset sequence
|
|
pg_conn.autocommit = False
|
|
pg_cur = pg_conn.cursor()
|
|
pg_cur.execute("SELECT setval('card_id_seq', (SELECT MAX(id) FROM card), true)")
|
|
pg_conn.commit()
|
|
|
|
print(f"Cards total: {total_inserted} inserted, {total_skipped} skipped")
|
|
return total_inserted
|
|
|
|
|
|
def main():
|
|
print("=" * 60)
|
|
print("Migrating missing packs and cards")
|
|
print("=" * 60)
|
|
|
|
# Connect to databases
|
|
sqlite_conn = sqlite3.connect(SQLITE_PATH)
|
|
pg_conn = psycopg2.connect(**PG_CONFIG)
|
|
|
|
# Migrate packs first
|
|
print("\n--- PACKS ---")
|
|
missing_packs = get_missing_pack_ids(sqlite_conn, pg_conn)
|
|
if missing_packs:
|
|
migrate_packs(sqlite_conn, pg_conn, missing_packs)
|
|
|
|
# Then migrate cards
|
|
print("\n--- CARDS ---")
|
|
missing_cards = get_missing_card_ids(sqlite_conn, pg_conn)
|
|
if missing_cards:
|
|
migrate_cards(sqlite_conn, pg_conn, missing_cards)
|
|
|
|
# Verify
|
|
print("\n--- VERIFICATION ---")
|
|
get_missing_pack_ids(sqlite_conn, pg_conn)
|
|
get_missing_card_ids(sqlite_conn, pg_conn)
|
|
|
|
sqlite_conn.close()
|
|
pg_conn.close()
|
|
print("\nDone!")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|