From 0cba52cea5d3e1c46b90455bbe051dfe22828c79 Mon Sep 17 00:00:00 2001 From: Cal Corum Date: Sun, 25 Jan 2026 23:05:54 -0600 Subject: [PATCH] PostgreSQL migration: Complete code preparation phase - Add db_helpers.py with cross-database upsert functions for SQLite/PostgreSQL - Replace 12 on_conflict_replace() calls with PostgreSQL-compatible upserts - Add unique indexes: StratPlay(game, play_num), Decision(game, pitcher) - Add max_length to Team model fields (abbrev, sname, lname) - Fix boolean comparison in teams.py (== 0/1 to == False/True) - Create migrate_to_postgres.py with ID-preserving migration logic - Create audit_sqlite.py for pre-migration data integrity checks - Add PROJECT_PLAN.json for migration tracking - Add .secrets/ to .gitignore for credentials Audit results: 658,963 records across 29 tables, 2,390 orphaned stats (expected) Based on Major Domo migration lessons learned (33 issues resolved there) --- .gitignore | 5 +- PROJECT_PLAN.json | 482 ++++ app/db_engine.py | 328 ++- app/db_helpers.py | 284 ++ app/routers_v2/battingcardratings.py | 660 +++-- app/routers_v2/battingcards.py | 156 +- app/routers_v2/cardpositions.py | 115 +- app/routers_v2/decisions.py | 205 +- app/routers_v2/gauntletrewards.py | 77 +- app/routers_v2/mlbplayers.py | 218 +- app/routers_v2/pitchingcardratings.py | 477 ++-- app/routers_v2/pitchingcards.py | 143 +- app/routers_v2/players.py | 831 ++++-- app/routers_v2/stratplays.py | 1465 ++++++---- app/routers_v2/teams.py | 1125 +++++--- main.py | 3638 ++++++++++++++++--------- scripts/audit_results.json | 92 + scripts/audit_sqlite.py | 564 ++++ scripts/migrate_to_postgres.py | 510 ++++ 19 files changed, 7987 insertions(+), 3388 deletions(-) create mode 100644 PROJECT_PLAN.json create mode 100644 app/db_helpers.py create mode 100644 scripts/audit_results.json create mode 100755 scripts/audit_sqlite.py create mode 100755 scripts/migrate_to_postgres.py diff --git a/.gitignore b/.gitignore index c4c247d..fa0691f 100644 --- a/.gitignore +++ b/.gitignore @@ -74,4 +74,7 @@ CLAUDE.md *.backup # PostgreSQL data directory (local mount) -postgres_data/ \ No newline at end of file +postgres_data/ + +# PostgreSQL credentials +.secrets/ diff --git a/PROJECT_PLAN.json b/PROJECT_PLAN.json new file mode 100644 index 0000000..53e7d30 --- /dev/null +++ b/PROJECT_PLAN.json @@ -0,0 +1,482 @@ +{ + "meta": { + "version": "1.1.0", + "created": "2026-01-25", + "lastUpdated": "2026-01-25", + "planType": "migration", + "description": "SQLite to PostgreSQL migration for Paper Dynasty database API", + "branch": "postgres-migration", + "totalEstimatedHours": 22, + "totalTasks": 16, + "completedTasks": 13 + }, + "context": { + "sourceDatabase": { + "type": "SQLite", + "file": "storage/pd_master.db", + "size": "110 MB", + "tables": 29, + "totalRecords": 515000, + "largestTable": { + "name": "stratplay", + "records": 332737 + } + }, + "targetDatabase": { + "type": "PostgreSQL 17", + "server": "sba_postgres (same server as Major Domo)", + "database": "pd_master", + "user": "pd_admin", + "credentialsFile": ".secrets/pd_admin_credentials.txt" + }, + "lessonsFromMajorDomo": [ + "CRITICAL: Primary key IDs must be explicitly preserved during migration", + "PostgreSQL GROUP BY requires ALL non-aggregated columns", + "Boolean fields cannot be summed directly - cast to integer first", + "Discord snowflake IDs must be strings, not integers (N/A for Paper Dynasty)", + "VARCHAR fields need explicit max_length", + "NULL constraints are stricter in PostgreSQL", + "Foreign key orphaned records need smart fallback handling", + "Reset sequences after ID-preserving inserts" + ], + "devServer": { + "access": "ssh sba-db", + "composeLocation": "cd container-data/dev-sba-database/" + } + }, + "categories": { + "critical": "Must complete before migration - blocks production", + "high": "Required for successful migration", + "medium": "Improves migration quality/reliability", + "low": "Polish and nice-to-have", + "completed": "Already done on postgres-migration branch" + }, + "tasks": [ + { + "id": "MIG-001", + "name": "Environment-based database configuration", + "description": "Add PostgreSQL support with environment variable switching between SQLite/PostgreSQL", + "category": "completed", + "priority": 1, + "completed": true, + "tested": true, + "dependencies": [], + "files": [ + { + "path": "app/db_engine.py", + "lines": [11, 35], + "issue": "Now supports DATABASE_TYPE env var for SQLite/PostgreSQL switching" + } + ], + "suggestedFix": "Already implemented with PooledPostgresqlDatabase", + "estimatedHours": 2, + "notes": "Includes connection pooling (20 max, 5-min stale timeout, autorollback)" + }, + { + "id": "MIG-002", + "name": "Add table_name to all models", + "description": "Explicit table naming for PostgreSQL compatibility", + "category": "completed", + "priority": 2, + "completed": true, + "tested": true, + "dependencies": [], + "files": [ + { + "path": "app/db_engine.py", + "lines": [], + "issue": "All 29 models now have Meta.table_name defined" + } + ], + "suggestedFix": "Already implemented", + "estimatedHours": 1, + "notes": "Prevents Peewee naming inconsistencies" + }, + { + "id": "MIG-003", + "name": "Fix GROUP BY queries for PostgreSQL", + "description": "PostgreSQL requires all non-aggregated SELECT fields in GROUP BY clause", + "category": "completed", + "priority": 3, + "completed": true, + "tested": false, + "dependencies": [], + "files": [ + { + "path": "app/routers_v2/stratplays.py", + "lines": [342, 456, 645, 733], + "issue": "Conditionally build SELECT fields based on group_by parameter" + } + ], + "suggestedFix": "Already implemented - needs testing with all group_by variations", + "estimatedHours": 4, + "notes": "Pattern: only include non-aggregated fields that will be in GROUP BY" + }, + { + "id": "MIG-004", + "name": "Add psycopg2-binary dependency", + "description": "PostgreSQL adapter for Python", + "category": "completed", + "priority": 4, + "completed": true, + "tested": true, + "dependencies": [], + "files": [ + { + "path": "requirements.txt", + "lines": [], + "issue": "psycopg2-binary added" + } + ], + "suggestedFix": "Already implemented", + "estimatedHours": 0.1, + "notes": "" + }, + { + "id": "MIG-005", + "name": "Docker Compose for local testing", + "description": "Local PostgreSQL environment for development testing", + "category": "completed", + "priority": 5, + "completed": true, + "tested": true, + "dependencies": [], + "files": [ + { + "path": "docker-compose.yml", + "lines": [], + "issue": "PostgreSQL 17 + Adminer configured" + }, + { + "path": "QUICK_START.md", + "lines": [], + "issue": "Testing guide created" + } + ], + "suggestedFix": "Already implemented", + "estimatedHours": 1, + "notes": "Adminer on port 8081" + }, + { + "id": "MIG-006", + "name": "Migration script auto-detection", + "description": "db_migrations.py auto-selects PostgresqlMigrator or SqliteMigrator", + "category": "completed", + "priority": 6, + "completed": true, + "tested": false, + "dependencies": ["MIG-001"], + "files": [ + { + "path": "db_migrations.py", + "lines": [], + "issue": "Migrator selection based on DATABASE_TYPE" + } + ], + "suggestedFix": "Already implemented", + "estimatedHours": 0.5, + "notes": "" + }, + { + "id": "MIG-007", + "name": "Create data migration script with ID preservation", + "description": "CRITICAL: Migrate all data from SQLite to PostgreSQL while preserving primary key IDs exactly", + "category": "critical", + "priority": 1, + "completed": false, + "tested": false, + "dependencies": ["MIG-001", "MIG-002"], + "files": [ + { + "path": "scripts/migrate_to_postgres.py", + "lines": [], + "issue": "New file - must explicitly insert IDs and reset sequences" + } + ], + "suggestedFix": "1. Read all records from SQLite\n2. Insert into PostgreSQL with explicit ID values\n3. Reset PostgreSQL sequences: SELECT setval('table_id_seq', MAX(id))\n4. Validate record counts match\n5. Smart FK error handling (batch insert with individual fallback)", + "estimatedHours": 3, + "notes": "Major Domo's #1 lesson: Without explicit ID preservation, PostgreSQL auto-assigns sequential IDs starting from 1, causing all FK references to point to wrong records" + }, + { + "id": "MIG-008", + "name": "Fix on_conflict_replace() calls (Player model)", + "description": "Convert SQLite on_conflict_replace() to PostgreSQL on_conflict() for Player model", + "category": "critical", + "priority": 2, + "completed": false, + "tested": false, + "dependencies": [], + "files": [ + { + "path": "main.py", + "lines": [1696], + "issue": "Player.insert_many(batch).on_conflict_replace()" + }, + { + "path": "app/routers_v2/players.py", + "lines": [808], + "issue": "Player.insert_many(batch).on_conflict_replace()" + } + ], + "suggestedFix": "Player.insert_many(batch).on_conflict(\n conflict_target=[Player.player_id],\n action='update',\n update={Player.p_name: EXCLUDED.p_name, ...all fields}\n).execute()", + "estimatedHours": 0.5, + "notes": "Player has explicit player_id primary key - straightforward" + }, + { + "id": "MIG-009", + "name": "Fix on_conflict_replace() calls (Card models)", + "description": "Convert SQLite on_conflict_replace() for BattingCard, PitchingCard, CardPosition, ratings", + "category": "critical", + "priority": 3, + "completed": false, + "tested": false, + "dependencies": [], + "files": [ + { + "path": "app/routers_v2/battingcards.py", + "lines": [134], + "issue": "BattingCard - unique on (player, variant)" + }, + { + "path": "app/routers_v2/pitchingcards.py", + "lines": [130], + "issue": "PitchingCard - unique on (player, variant)" + }, + { + "path": "app/routers_v2/cardpositions.py", + "lines": [131], + "issue": "CardPosition - unique on (player, variant, position)" + }, + { + "path": "app/routers_v2/battingcardratings.py", + "lines": [549], + "issue": "BattingCardRatings - unique on (battingcard, vs_hand)" + }, + { + "path": "app/routers_v2/pitchingcardratings.py", + "lines": [432], + "issue": "PitchingCardRatings - unique on (pitchingcard, vs_hand)" + } + ], + "suggestedFix": "All have existing unique indexes - use those as conflict_target", + "estimatedHours": 2, + "notes": "These have many fields to update - consider helper function" + }, + { + "id": "MIG-010", + "name": "Fix on_conflict_replace() calls (Game models)", + "description": "Convert SQLite on_conflict_replace() for StratPlay, Decision, GauntletReward", + "category": "critical", + "priority": 4, + "completed": false, + "tested": false, + "dependencies": ["MIG-011"], + "files": [ + { + "path": "app/routers_v2/stratplays.py", + "lines": [1082], + "issue": "StratPlay - needs unique index on (game, play_num)" + }, + { + "path": "app/routers_v2/decisions.py", + "lines": [217], + "issue": "Decision - needs unique index on (game, pitcher)" + }, + { + "path": "main.py", + "lines": [4978], + "issue": "GauntletReward - investigate if id provided or needs refactor" + }, + { + "path": "app/routers_v2/gauntletrewards.py", + "lines": [127], + "issue": "GauntletReward - same as main.py" + } + ], + "suggestedFix": "Add unique indexes first (MIG-011), then implement on_conflict()", + "estimatedHours": 1.5, + "notes": "StratPlay and Decision need new unique indexes to be created first" + }, + { + "id": "MIG-011", + "name": "Add missing unique indexes for upserts", + "description": "Create unique indexes needed for PostgreSQL on_conflict() operations", + "category": "high", + "priority": 5, + "completed": false, + "tested": false, + "dependencies": [], + "files": [ + { + "path": "app/db_engine.py", + "lines": [779, 848], + "issue": "Add unique indexes for StratPlay and Decision" + } + ], + "suggestedFix": "StratPlay: ModelIndex(StratPlay, (StratPlay.game, StratPlay.play_num), unique=True)\nDecision: ModelIndex(Decision, (Decision.game, Decision.pitcher), unique=True)", + "estimatedHours": 1, + "notes": "These are natural business keys - a play number should be unique within a game, and a pitcher should have one decision per game" + }, + { + "id": "MIG-012", + "name": "Fix on_conflict_replace() for MlbPlayer", + "description": "Convert or remove on_conflict_replace() for MlbPlayer", + "category": "medium", + "priority": 6, + "completed": false, + "tested": false, + "dependencies": [], + "files": [ + { + "path": "app/routers_v2/mlbplayers.py", + "lines": [185], + "issue": "MlbPlayer.insert_many(batch).on_conflict_replace()" + } + ], + "suggestedFix": "Code already checks for duplicates before insert (lines 170-179) and raises HTTPException. The on_conflict_replace() may be unnecessary. Option 1: Remove it and use plain insert_many(). Option 2: Use on_conflict with id as target.", + "estimatedHours": 0.25, + "notes": "Low risk - pre-check rejects duplicates" + }, + { + "id": "MIG-013", + "name": "Fix boolean comparison in teams.py", + "description": "PostgreSQL requires True/False instead of 1/0 for boolean comparisons", + "category": "low", + "priority": 7, + "completed": false, + "tested": false, + "dependencies": [], + "files": [ + { + "path": "app/routers_v2/teams.py", + "lines": [110, 112], + "issue": "Team.has_guide == 0 / Team.has_guide == 1" + } + ], + "suggestedFix": "Change to Team.has_guide == False / Team.has_guide == True", + "estimatedHours": 0.25, + "notes": "Peewee may handle this automatically, but explicit is better" + }, + { + "id": "MIG-014", + "name": "SQLite data integrity audit", + "description": "Check for NULL values, orphaned FKs, VARCHAR lengths before migration", + "category": "high", + "priority": 8, + "completed": false, + "tested": false, + "dependencies": [], + "files": [ + { + "path": "scripts/audit_sqlite.py", + "lines": [], + "issue": "New file - pre-migration data validation" + } + ], + "suggestedFix": "Create script to check:\n1. NULL values in NOT NULL fields\n2. Orphaned foreign key records\n3. VARCHAR field max lengths\n4. Table record counts for baseline", + "estimatedHours": 1.5, + "notes": "Major Domo found 206 orphaned decisions and VARCHAR violations" + }, + { + "id": "MIG-015", + "name": "Test on dev PostgreSQL server", + "description": "Full migration test on sba-db dev server with production data copy", + "category": "high", + "priority": 9, + "completed": false, + "tested": false, + "dependencies": ["MIG-007", "MIG-008", "MIG-009", "MIG-010", "MIG-014"], + "files": [], + "suggestedFix": "1. ssh sba-db\n2. Create pd_master database with pd_admin user\n3. Copy production SQLite to dev\n4. Run migration script\n5. Verify record counts\n6. Test API endpoints", + "estimatedHours": 3, + "notes": "Dev server access: ssh sba-db, then cd container-data/dev-sba-database/" + }, + { + "id": "MIG-016", + "name": "Production migration execution", + "description": "Execute migration on production server within maintenance window", + "category": "critical", + "priority": 10, + "completed": false, + "tested": false, + "dependencies": ["MIG-015"], + "files": [], + "suggestedFix": "1. Notify users of maintenance window\n2. Stop Paper Dynasty API\n3. Create SQLite backup\n4. Create pd_master database and pd_admin user\n5. Run migration script\n6. Verify data integrity\n7. Update docker-compose.yml with PostgreSQL env vars\n8. Start API\n9. Smoke test critical endpoints\n10. Announce migration complete", + "estimatedHours": 3, + "notes": "Downtime window: 1-4 hours. Have rollback plan ready." + } + ], + "quickWins": [ + { + "taskId": "MIG-013", + "estimatedMinutes": 15, + "impact": "Prevents boolean comparison issues in team queries" + }, + { + "taskId": "MIG-012", + "estimatedMinutes": 15, + "impact": "Simplify MlbPlayer insert logic" + } + ], + "productionBlockers": [ + { + "taskId": "MIG-007", + "reason": "Without ID-preserving migration, all foreign key references will break" + }, + { + "taskId": "MIG-008", + "reason": "Player upserts will fail without PostgreSQL-compatible syntax" + }, + { + "taskId": "MIG-009", + "reason": "Card data upserts will fail without PostgreSQL-compatible syntax" + }, + { + "taskId": "MIG-010", + "reason": "Game data upserts will fail without PostgreSQL-compatible syntax" + } + ], + "weeklyRoadmap": { + "week1": { + "theme": "Code Changes - Make PostgreSQL Compatible", + "tasks": ["MIG-007", "MIG-008", "MIG-009", "MIG-010", "MIG-011", "MIG-012", "MIG-013"], + "estimatedHours": 8.5 + }, + "week2": { + "theme": "Testing & Validation", + "tasks": ["MIG-014", "MIG-015"], + "estimatedHours": 4.5 + }, + "week3": { + "theme": "Production Migration", + "tasks": ["MIG-016"], + "estimatedHours": 3 + } + }, + "rollbackPlan": { + "triggers": [ + "Data corruption detected", + "More than 5% of endpoints failing", + "Performance more than 5x worse than SQLite", + "Critical functionality broken" + ], + "duringTesting": { + "steps": [ + "Set DATABASE_TYPE=sqlite", + "API immediately uses SQLite", + "No data loss - PostgreSQL was a copy" + ] + }, + "afterProduction": { + "steps": [ + "Stop API: docker-compose down", + "Update docker-compose.yml: DATABASE_TYPE=sqlite", + "Restore SQLite backup if needed", + "Start API: docker-compose up -d", + "Verify SQLite connectivity", + "Document issues for retry" + ], + "timeLimit": "24 hours from migration" + } + } +} diff --git a/app/db_engine.py b/app/db_engine.py index 23ecece..a0774e9 100644 --- a/app/db_engine.py +++ b/app/db_engine.py @@ -10,39 +10,36 @@ from peewee import ModelSelect from playhouse.shortcuts import model_to_dict # Database configuration - supports both SQLite and PostgreSQL -DATABASE_TYPE = os.environ.get('DATABASE_TYPE', 'sqlite') +DATABASE_TYPE = os.environ.get("DATABASE_TYPE", "sqlite") -if DATABASE_TYPE.lower() == 'postgresql': +if DATABASE_TYPE.lower() == "postgresql": from playhouse.pool import PooledPostgresqlDatabase + db = PooledPostgresqlDatabase( - os.environ.get('POSTGRES_DB', 'pd_master'), - user=os.environ.get('POSTGRES_USER', 'pd_admin'), - password=os.environ.get('POSTGRES_PASSWORD'), - host=os.environ.get('POSTGRES_HOST', 'localhost'), - port=int(os.environ.get('POSTGRES_PORT', '5432')), + os.environ.get("POSTGRES_DB", "pd_master"), + user=os.environ.get("POSTGRES_USER", "pd_admin"), + password=os.environ.get("POSTGRES_PASSWORD"), + host=os.environ.get("POSTGRES_HOST", "localhost"), + port=int(os.environ.get("POSTGRES_PORT", "5432")), max_connections=20, stale_timeout=300, # 5 minutes timeout=0, autoconnect=True, - autorollback=True # Automatically rollback failed transactions + autorollback=True, # Automatically rollback failed transactions ) else: # Default SQLite configuration for local development db = SqliteDatabase( - 'storage/pd_master.db', - pragmas={ - 'journal_mode': 'wal', - 'cache_size': -1 * 64000, - 'synchronous': 0 - } + "storage/pd_master.db", + pragmas={"journal_mode": "wal", "cache_size": -1 * 64000, "synchronous": 0}, ) -date = f'{datetime.now().year}-{datetime.now().month}-{datetime.now().day}' -log_level = logging.INFO if os.environ.get('LOG_LEVEL') == 'INFO' else 'WARN' +date = f"{datetime.now().year}-{datetime.now().month}-{datetime.now().day}" +log_level = logging.INFO if os.environ.get("LOG_LEVEL") == "INFO" else "WARN" logging.basicConfig( - filename=f'logs/database/{date}.log', - format='%(asctime)s - database - %(levelname)s - %(message)s', - level=log_level + filename=f"logs/database/{date}.log", + format="%(asctime)s - database - %(levelname)s - %(message)s", + level=log_level, ) # 2025, 2005 @@ -50,63 +47,66 @@ ranked_cardsets = [24, 25, 26, 27, 28, 29] LIVE_CARDSET_ID = 27 LIVE_PROMO_CARDSET_ID = 28 CARDSETS = { - 'ranked': { - 'primary': ranked_cardsets, - 'human': ranked_cardsets + "ranked": {"primary": ranked_cardsets, "human": ranked_cardsets}, + "minor-league": { + "primary": [27, 8], # 2005, Mario + "secondary": [24], # 2025 + "human": [x for x in range(1, 30)], }, - 'minor-league': { - 'primary': [27, 8], # 2005, Mario - 'secondary': [24], # 2025 - 'human': [x for x in range(1, 30)] + "major-league": { + "primary": [ + 27, + 28, + 24, + 25, + 13, + 14, + 6, + 8, + ], # 2005 + Promos, 2025 + Promos, 2018 + Promos, 2012, Mario + "secondary": [5, 3], # 2019, 2022 + "human": ranked_cardsets, }, - 'major-league': { - 'primary': [27, 28, 24, 25, 13, 14, 6, 8], # 2005 + Promos, 2025 + Promos, 2018 + Promos, 2012, Mario - 'secondary': [5, 3], # 2019, 2022 - 'human': ranked_cardsets + "hall-of-fame": {"primary": [x for x in range(1, 30)], "human": ranked_cardsets}, + "flashback": { + "primary": [13, 5, 1, 3, 8], # 2018, 2019, 2021, 2022, Mario + "secondary": [24], # 2025 + "human": [13, 5, 1, 3, 8], # 2018, 2019, 2021, 2022 }, - 'hall-of-fame': { - 'primary': [x for x in range(1, 30)], - 'human': ranked_cardsets + "gauntlet-3": { + "primary": [13], # 2018 + "secondary": [5, 11, 9], # 2019, 2016, 2023 + "human": [x for x in range(1, 30)], }, - 'flashback': { - 'primary': [13, 5, 1, 3, 8], # 2018, 2019, 2021, 2022, Mario - 'secondary': [24], # 2025 - 'human': [13, 5, 1, 3, 8] # 2018, 2019, 2021, 2022 + "gauntlet-4": { + "primary": [3, 6, 16], # 2022, 2013, Backyard Baseball + "secondary": [4, 9], # 2022 Promos, 2023 + "human": [3, 4, 6, 9, 15, 16], }, - 'gauntlet-3': { - 'primary': [13], # 2018 - 'secondary': [5, 11, 9], # 2019, 2016, 2023 - 'human': [x for x in range(1, 30)] + "gauntlet-5": { + "primary": [17, 8], # 2024, Mario + "secondary": [13], # 2018 + "human": [x for x in range(1, 30)], }, - 'gauntlet-4': { - 'primary': [3, 6, 16], # 2022, 2013, Backyard Baseball - 'secondary': [4, 9], # 2022 Promos, 2023 - 'human': [3, 4, 6, 9, 15, 16] + "gauntlet-6": { + "primary": [20, 8], # 1998, Mario + "secondary": [12], # 2008 + "human": [x for x in range(1, 30)], }, - 'gauntlet-5': { - 'primary': [17, 8], # 2024, Mario - 'secondary': [13], # 2018 - 'human': [x for x in range(1, 30)] + "gauntlet-7": { + "primary": [5, 23], # 2019, Brilliant Stars + "secondary": [1], # 2021 + "human": [x for x in range(1, 30)], }, - 'gauntlet-6': { - 'primary': [20, 8], # 1998, Mario - 'secondary': [12], # 2008 - 'human': [x for x in range(1, 30)] + "gauntlet-8": { + "primary": [24], # 2025 + "secondary": [17], + "human": [24, 25, 22, 23], }, - 'gauntlet-7': { - 'primary': [5, 23], # 2019, Brilliant Stars - 'secondary': [1], # 2021 - 'human': [x for x in range(1, 30)] + "gauntlet-9": { + "primary": [27], # 2005 + "secondary": [24], # 2025 }, - 'gauntlet-8': { - 'primary': [24], # 2025 - 'secondary': [17], - 'human': [24, 25, 22, 23] - }, - 'gauntlet-9': { - 'primary': [27], # 2005 - 'secondary': [24] # 2025 - } } @@ -122,7 +122,7 @@ def model_to_csv(this_obj, exclude=None) -> List: def query_to_csv(all_items: ModelSelect, exclude=None): if all_items.count() == 0: - data_list = [['No data found']] + data_list = [["No data found"]] else: data_list = [model_csv_headers(all_items[0], exclude=exclude)] for x in all_items: @@ -133,29 +133,29 @@ def query_to_csv(all_items: ModelSelect, exclude=None): def complex_data_to_csv(complex_data: List): if len(complex_data) == 0: - data_list = [['No data found']] + data_list = [["No data found"]] else: data_list = [[x for x in complex_data[0].keys()]] for line in complex_data: - logging.debug(f'line: {line}') + logging.debug(f"line: {line}") this_row = [] for key in line: - logging.debug(f'key: {key}') + logging.debug(f"key: {key}") if line[key] is None: - this_row.append('') + this_row.append("") elif isinstance(line[key], dict): - if 'name' in line[key]: - this_row.append(line[key]['name']) - elif 'abbrev' in line[key]: - this_row.append(line[key]['abbrev']) + if "name" in line[key]: + this_row.append(line[key]["name"]) + elif "abbrev" in line[key]: + this_row.append(line[key]["abbrev"]) else: - this_row.append(line[key]['id']) + this_row.append(line[key]["id"]) elif isinstance(line[key], int) and line[key] > 100000000: this_row.append(f"'{line[key]}") - elif isinstance(line[key], str) and ',' in line[key]: + elif isinstance(line[key], str) and "," in line[key]: this_row.append(line[key].replace(",", "-_-")) else: @@ -180,7 +180,7 @@ class Current(BaseModel): class Meta: database = db - table_name = 'current' + table_name = "current" @staticmethod def latest(): @@ -198,7 +198,7 @@ class Rarity(BaseModel): class Meta: database = db - table_name = 'rarity' + table_name = "rarity" def __str__(self): return self.name @@ -217,7 +217,7 @@ class Event(BaseModel): class Meta: database = db - table_name = 'event' + table_name = "event" db.create_tables([Event]) @@ -234,7 +234,7 @@ class Cardset(BaseModel): class Meta: database = db - table_name = 'cardset' + table_name = "cardset" def __str__(self): return self.name @@ -254,7 +254,7 @@ class MlbPlayer(BaseModel): class Meta: database = db - table_name = 'mlbplayer' + table_name = "mlbplayer" db.create_tables([MlbPlayer]) @@ -289,7 +289,7 @@ class Player(BaseModel): mlbplayer = ForeignKeyField(MlbPlayer, null=True) def __str__(self): - return f'{self.cardset} {self.p_name} ({self.rarity.name})' + return f"{self.cardset} {self.p_name} ({self.rarity.name})" # def __eq__(self, other): # if self.cardset.id == other.cardset.id and self.name == other.name: @@ -310,21 +310,21 @@ class Player(BaseModel): def get_all_pos(self): all_pos = [] - if self.pos_1 and self.pos_1 != 'CP': + if self.pos_1 and self.pos_1 != "CP": all_pos.append(self.pos_1) - if self.pos_2 and self.pos_2 != 'CP': + if self.pos_2 and self.pos_2 != "CP": all_pos.append(self.pos_2) - if self.pos_3 and self.pos_3 != 'CP': + if self.pos_3 and self.pos_3 != "CP": all_pos.append(self.pos_3) - if self.pos_4 and self.pos_4 != 'CP': + if self.pos_4 and self.pos_4 != "CP": all_pos.append(self.pos_4) - if self.pos_5 and self.pos_5 != 'CP': + if self.pos_5 and self.pos_5 != "CP": all_pos.append(self.pos_5) - if self.pos_6 and self.pos_6 != 'CP': + if self.pos_6 and self.pos_6 != "CP": all_pos.append(self.pos_6) - if self.pos_7 and self.pos_7 != 'CP': + if self.pos_7 and self.pos_7 != "CP": all_pos.append(self.pos_7) - if self.pos_8 and self.pos_8 != 'CP': + if self.pos_8 and self.pos_8 != "CP": all_pos.append(self.pos_8) return all_pos @@ -338,33 +338,33 @@ class Player(BaseModel): # 'mvp': 2500, # 'hof': 999999999 # } - logging.info(f'{self.p_name} cost changing from: {self.cost}') - self.cost = max(math.floor(self.cost * .95), 1) + logging.info(f"{self.p_name} cost changing from: {self.cost}") + self.cost = max(math.floor(self.cost * 0.95), 1) # if self.quantity != 999: # self.quantity += 1 - logging.info(f'{self.p_name} cost now: {self.cost}') + logging.info(f"{self.p_name} cost now: {self.cost}") self.save() def change_on_buy(self): - logging.info(f'{self.p_name} cost changing from: {self.cost}') + logging.info(f"{self.p_name} cost changing from: {self.cost}") self.cost = math.ceil(self.cost * 1.1) # if self.quantity != 999: # self.quantity -= 1 - logging.info(f'{self.p_name} cost now: {self.cost}') + logging.info(f"{self.p_name} cost now: {self.cost}") self.save() class Meta: database = db - table_name = 'player' + table_name = "player" db.create_tables([Player]) class Team(BaseModel): - abbrev = CharField() - sname = CharField() - lname = CharField() + abbrev = CharField(max_length=20) # Gauntlet teams use prefixes like "Gauntlet-NCB" + sname = CharField(max_length=100) + lname = CharField(max_length=255) gmid = IntegerField() gmname = CharField() gsheet = CharField() @@ -381,7 +381,7 @@ class Team(BaseModel): is_ai = IntegerField(null=True) def __str__(self): - return f'S{self.season} {self.lname}' + return f"S{self.season} {self.lname}" @staticmethod def get_by_owner(gmid, season=None): @@ -407,13 +407,13 @@ class Team(BaseModel): return Team.get_or_none(Team.season == season, Team.abbrev == abbrev.upper()) def team_hash(self): - hash_string = f'{self.sname[-1]}{self.gmid / 6950123:.0f}{self.sname[-2]}{self.gmid / 42069123:.0f}' - logging.info(f'string: {hash_string}') + hash_string = f"{self.sname[-1]}{self.gmid / 6950123:.0f}{self.sname[-2]}{self.gmid / 42069123:.0f}" + logging.info(f"string: {hash_string}") return hash_string class Meta: database = db - table_name = 'team' + table_name = "team" db.create_tables([Team]) @@ -428,7 +428,7 @@ class PackType(BaseModel): class Meta: database = db - table_name = 'packtype' + table_name = "packtype" db.create_tables([PackType]) @@ -443,7 +443,7 @@ class Pack(BaseModel): class Meta: database = db - table_name = 'pack' + table_name = "pack" db.create_tables([Pack]) @@ -457,9 +457,9 @@ class Card(BaseModel): def __str__(self): if self.player: - return f'{self.player} - {self.team.sname}' + return f"{self.player} - {self.team.sname}" else: - return f'Blank - {self.team.sname}' + return f"Blank - {self.team.sname}" @staticmethod def select_season(season): @@ -467,7 +467,7 @@ class Card(BaseModel): class Meta: database = db - table_name = 'card' + table_name = "card" db.create_tables([Card]) @@ -505,7 +505,7 @@ class Roster(BaseModel): card_26 = ForeignKeyField(Card) def __str__(self): - return f'{self.team} Roster' + return f"{self.team} Roster" # def get_cards(self, team): # all_cards = Card.select().where(Card.roster == self) @@ -519,7 +519,7 @@ class Roster(BaseModel): class Meta: database = db - table_name = 'roster' + table_name = "roster" class Result(BaseModel): @@ -546,7 +546,7 @@ class Result(BaseModel): class Meta: database = db - table_name = 'result' + table_name = "result" class BattingStat(BaseModel): @@ -589,7 +589,7 @@ class BattingStat(BaseModel): class Meta: database = db - table_name = 'battingstat' + table_name = "battingstat" class PitchingStat(BaseModel): @@ -623,7 +623,7 @@ class PitchingStat(BaseModel): class Meta: database = db - table_name = 'pitchingstat' + table_name = "pitchingstat" class Award(BaseModel): @@ -636,17 +636,17 @@ class Award(BaseModel): class Meta: database = db - table_name = 'award' + table_name = "award" class Paperdex(BaseModel): team = ForeignKeyField(Team) player = ForeignKeyField(Player) - created = DateTimeField(default=int(datetime.timestamp(datetime.now())*1000)) + created = DateTimeField(default=int(datetime.timestamp(datetime.now()) * 1000)) class Meta: database = db - table_name = 'paperdex' + table_name = "paperdex" # def add_to_paperdex(self, team, cards: list): # for x in players: @@ -665,7 +665,7 @@ class Reward(BaseModel): class Meta: database = db - table_name = 'reward' + table_name = "reward" class GameRewards(BaseModel): @@ -676,7 +676,7 @@ class GameRewards(BaseModel): class Meta: database = db - table_name = 'gamerewards' + table_name = "gamerewards" class Notification(BaseModel): @@ -690,7 +690,7 @@ class Notification(BaseModel): class Meta: database = db - table_name = 'notification' + table_name = "notification" class GauntletReward(BaseModel): @@ -702,7 +702,7 @@ class GauntletReward(BaseModel): class Meta: database = db - table_name = 'gauntletreward' + table_name = "gauntletreward" class GauntletRun(BaseModel): @@ -711,18 +711,29 @@ class GauntletRun(BaseModel): wins = IntegerField(default=0) losses = IntegerField(default=0) gsheet = CharField(null=True) - created = DateTimeField(default=int(datetime.timestamp(datetime.now())*1000)) + created = DateTimeField(default=int(datetime.timestamp(datetime.now()) * 1000)) ended = DateTimeField(default=0) class Meta: database = db - table_name = 'gauntletrun' + table_name = "gauntletrun" -db.create_tables([ - Roster, BattingStat, PitchingStat, Result, Award, Paperdex, Reward, GameRewards, Notification, GauntletReward, - GauntletRun -]) +db.create_tables( + [ + Roster, + BattingStat, + PitchingStat, + Result, + Award, + Paperdex, + Reward, + GameRewards, + Notification, + GauntletReward, + GauntletRun, + ] +) class BattingCard(BaseModel): @@ -736,20 +747,22 @@ class BattingCard(BaseModel): hit_and_run = CharField() running = IntegerField() offense_col = IntegerField() - hand = CharField(default='R') + hand = CharField(default="R") class Meta: database = db - table_name = 'battingcard' + table_name = "battingcard" -bc_index = ModelIndex(BattingCard, (BattingCard.player, BattingCard.variant), unique=True) +bc_index = ModelIndex( + BattingCard, (BattingCard.player, BattingCard.variant), unique=True +) BattingCard.add_index(bc_index) class BattingCardRatings(BaseModel): battingcard = ForeignKeyField(BattingCard) - vs_hand = CharField(default='R') + vs_hand = CharField(default="R") pull_rate = FloatField() center_rate = FloatField() slap_rate = FloatField() @@ -781,11 +794,13 @@ class BattingCardRatings(BaseModel): class Meta: database = db - table_name = 'battingcardratings' + table_name = "battingcardratings" bcr_index = ModelIndex( - BattingCardRatings, (BattingCardRatings.battingcard, BattingCardRatings.vs_hand), unique=True + BattingCardRatings, + (BattingCardRatings.battingcard, BattingCardRatings.vs_hand), + unique=True, ) BattingCardRatings.add_index(bcr_index) @@ -801,20 +816,22 @@ class PitchingCard(BaseModel): closer_rating = IntegerField(null=True) batting = CharField(null=True) offense_col = IntegerField() - hand = CharField(default='R') + hand = CharField(default="R") class Meta: database = db - table_name = 'pitchingcard' + table_name = "pitchingcard" -pc_index = ModelIndex(PitchingCard, (PitchingCard.player, PitchingCard.variant), unique=True) +pc_index = ModelIndex( + PitchingCard, (PitchingCard.player, PitchingCard.variant), unique=True +) PitchingCard.add_index(pc_index) class PitchingCardRatings(BaseModel): pitchingcard = ForeignKeyField(PitchingCard) - vs_hand = CharField(default='R') + vs_hand = CharField(default="R") homerun = FloatField() bp_homerun = FloatField() triple = FloatField() @@ -848,11 +865,13 @@ class PitchingCardRatings(BaseModel): class Meta: database = db - table_name = 'pitchingcardratings' + table_name = "pitchingcardratings" pcr_index = ModelIndex( - PitchingCardRatings, (PitchingCardRatings.pitchingcard, PitchingCardRatings.vs_hand), unique=True + PitchingCardRatings, + (PitchingCardRatings.pitchingcard, PitchingCardRatings.vs_hand), + unique=True, ) PitchingCardRatings.add_index(pcr_index) @@ -870,16 +889,20 @@ class CardPosition(BaseModel): class Meta: database = db - table_name = 'cardposition' + table_name = "cardposition" pos_index = ModelIndex( - CardPosition, (CardPosition.player, CardPosition.variant, CardPosition.position), unique=True + CardPosition, + (CardPosition.player, CardPosition.variant, CardPosition.position), + unique=True, ) CardPosition.add_index(pos_index) -db.create_tables([BattingCard, BattingCardRatings, PitchingCard, PitchingCardRatings, CardPosition]) +db.create_tables( + [BattingCard, BattingCardRatings, PitchingCard, PitchingCardRatings, CardPosition] +) class StratGame(BaseModel): @@ -900,7 +923,7 @@ class StratGame(BaseModel): class Meta: database = db - table_name = 'stratgame' + table_name = "stratgame" class StratPlay(BaseModel): @@ -973,7 +996,15 @@ class StratPlay(BaseModel): class Meta: database = db - table_name = 'stratplay' + table_name = "stratplay" + + +# Unique index for StratPlay - a play number should be unique within a game +# Required for PostgreSQL on_conflict() upsert operations +stratplay_index = ModelIndex( + StratPlay, (StratPlay.game, StratPlay.play_num), unique=True +) +StratPlay.add_index(stratplay_index) class Decision(BaseModel): @@ -995,7 +1026,13 @@ class Decision(BaseModel): class Meta: database = db - table_name = 'decision' + table_name = "decision" + + +# Unique index for Decision - one decision per pitcher per game +# Required for PostgreSQL on_conflict() upsert operations +decision_index = ModelIndex(Decision, (Decision.game, Decision.pitcher), unique=True) +Decision.add_index(decision_index) db.create_tables([StratGame, StratPlay, Decision]) @@ -1176,4 +1213,3 @@ db.close() # # # scout_db.close() - diff --git a/app/db_helpers.py b/app/db_helpers.py new file mode 100644 index 0000000..666dc52 --- /dev/null +++ b/app/db_helpers.py @@ -0,0 +1,284 @@ +""" +Database helper functions for PostgreSQL compatibility. + +This module provides cross-database compatible upsert operations that work +with both SQLite and PostgreSQL. + +The key difference: +- SQLite: .on_conflict_replace() works directly +- PostgreSQL: Requires .on_conflict() with explicit conflict_target and update dict + +Usage: + from app.db_helpers import upsert_many, DATABASE_TYPE + + # Instead of: + Model.insert_many(batch).on_conflict_replace().execute() + + # Use: + upsert_many(Model, batch, conflict_fields=['field1', 'field2']) +""" + +import os +from typing import Any, Dict, List, Type, Union + +from peewee import Model, SQL + +# Re-export DATABASE_TYPE for convenience +DATABASE_TYPE = os.environ.get("DATABASE_TYPE", "sqlite").lower() + + +def get_model_fields(model: Type[Model], exclude: List[str] = None) -> List[str]: + """ + Get all field names for a model, excluding specified fields. + + Args: + model: Peewee Model class + exclude: Field names to exclude (e.g., primary key) + + Returns: + List of field names + """ + if exclude is None: + exclude = [] + + return [ + field.name for field in model._meta.sorted_fields if field.name not in exclude + ] + + +def upsert_many( + model: Type[Model], + data: List[Dict[str, Any]], + conflict_fields: List[str], + update_fields: List[str] = None, + batch_size: int = 100, +) -> int: + """ + Insert or update multiple records in a database-agnostic way. + + Works with both SQLite (on_conflict_replace) and PostgreSQL (on_conflict). + + Args: + model: Peewee Model class + data: List of dictionaries with field values + conflict_fields: Fields that define uniqueness (for PostgreSQL ON CONFLICT) + update_fields: Fields to update on conflict (defaults to all non-conflict fields) + batch_size: Number of records per batch + + Returns: + Number of records processed + + Example: + # For BattingCard with unique constraint on (player, variant) + upsert_many( + BattingCard, + batch_data, + conflict_fields=['player', 'variant'] + ) + """ + if not data: + return 0 + + total = 0 + + # Determine update fields if not specified + if update_fields is None: + # Get primary key name + pk_name = model._meta.primary_key.name if model._meta.primary_key else "id" + # Update all fields except PK and conflict fields + exclude = [pk_name] + conflict_fields + update_fields = get_model_fields(model, exclude=exclude) + + # Process in batches + for i in range(0, len(data), batch_size): + batch = data[i : i + batch_size] + + if DATABASE_TYPE == "postgresql": + # PostgreSQL: Use ON CONFLICT with explicit target and update + from peewee import EXCLUDED + + # Build conflict target - get actual field objects + conflict_target = [getattr(model, f) for f in conflict_fields] + + # Build update dict + update_dict = { + getattr(model, f): EXCLUDED[f] + for f in update_fields + if hasattr(model, f) + } + + if update_dict: + model.insert_many(batch).on_conflict( + conflict_target=conflict_target, action="update", update=update_dict + ).execute() + else: + # No fields to update, just ignore conflicts + model.insert_many(batch).on_conflict_ignore().execute() + else: + # SQLite: Use on_conflict_replace (simpler) + model.insert_many(batch).on_conflict_replace().execute() + + total += len(batch) + + return total + + +def upsert_by_pk( + model: Type[Model], + data: List[Dict[str, Any]], + pk_field: str = None, + batch_size: int = 100, +) -> int: + """ + Upsert records using primary key as conflict target. + + This is for models where the primary key is explicitly provided in the data + (like Player with player_id). + + Args: + model: Peewee Model class + data: List of dictionaries with field values (including PK) + pk_field: Primary key field name (auto-detected if not specified) + batch_size: Number of records per batch + + Returns: + Number of records processed + + Example: + # For Player with explicit player_id + upsert_by_pk(Player, player_data, pk_field='player_id') + """ + if not data: + return 0 + + # Auto-detect primary key + if pk_field is None: + pk_field = model._meta.primary_key.name if model._meta.primary_key else "id" + + return upsert_many(model, data, conflict_fields=[pk_field], batch_size=batch_size) + + +# Pre-configured upsert functions for specific models +# These encode the unique constraint knowledge for each model + + +def upsert_players(data: List[Dict], batch_size: int = 15) -> int: + """Upsert Player records using player_id as conflict target.""" + from app.db_engine import Player + + return upsert_by_pk(Player, data, pk_field="player_id", batch_size=batch_size) + + +def upsert_batting_cards(data: List[Dict], batch_size: int = 30) -> int: + """Upsert BattingCard records using (player, variant) unique constraint.""" + from app.db_engine import BattingCard + + return upsert_many( + BattingCard, data, conflict_fields=["player", "variant"], batch_size=batch_size + ) + + +def upsert_pitching_cards(data: List[Dict], batch_size: int = 30) -> int: + """Upsert PitchingCard records using (player, variant) unique constraint.""" + from app.db_engine import PitchingCard + + return upsert_many( + PitchingCard, data, conflict_fields=["player", "variant"], batch_size=batch_size + ) + + +def upsert_batting_card_ratings(data: List[Dict], batch_size: int = 30) -> int: + """Upsert BattingCardRatings using (battingcard, vs_hand) unique constraint.""" + from app.db_engine import BattingCardRatings + + return upsert_many( + BattingCardRatings, + data, + conflict_fields=["battingcard", "vs_hand"], + batch_size=batch_size, + ) + + +def upsert_pitching_card_ratings(data: List[Dict], batch_size: int = 30) -> int: + """Upsert PitchingCardRatings using (pitchingcard, vs_hand) unique constraint.""" + from app.db_engine import PitchingCardRatings + + return upsert_many( + PitchingCardRatings, + data, + conflict_fields=["pitchingcard", "vs_hand"], + batch_size=batch_size, + ) + + +def upsert_card_positions(data: List[Dict], batch_size: int = 30) -> int: + """Upsert CardPosition using (player, variant, position) unique constraint.""" + from app.db_engine import CardPosition + + return upsert_many( + CardPosition, + data, + conflict_fields=["player", "variant", "position"], + batch_size=batch_size, + ) + + +def upsert_strat_plays(data: List[Dict], batch_size: int = 20) -> int: + """Upsert StratPlay using (game, play_num) unique constraint.""" + from app.db_engine import StratPlay + + return upsert_many( + StratPlay, data, conflict_fields=["game", "play_num"], batch_size=batch_size + ) + + +def upsert_decisions(data: List[Dict], batch_size: int = 10) -> int: + """Upsert Decision using (game, pitcher) unique constraint.""" + from app.db_engine import Decision + + return upsert_many( + Decision, data, conflict_fields=["game", "pitcher"], batch_size=batch_size + ) + + +def upsert_gauntlet_rewards(data: List[Dict], batch_size: int = 15) -> int: + """ + Upsert GauntletReward records. + + Note: GauntletReward doesn't have a natural unique key defined. + For PostgreSQL, we use id if provided, otherwise insert-only. + """ + from app.db_engine import GauntletReward + + # Check if any records have 'id' field + has_ids = any("id" in record for record in data) + + if has_ids: + return upsert_by_pk(GauntletReward, data, pk_field="id", batch_size=batch_size) + else: + # No IDs provided - just insert (may fail on duplicates) + total = 0 + for i in range(0, len(data), batch_size): + batch = data[i : i + batch_size] + GauntletReward.insert_many(batch).execute() + total += len(batch) + return total + + +def upsert_mlb_players(data: List[Dict], batch_size: int = 15) -> int: + """ + Upsert MlbPlayer records. + + Note: The calling code already checks for duplicates before insert, + so this is effectively just an insert operation. + """ + from app.db_engine import MlbPlayer + + # MlbPlayer doesn't have a good unique key other than id + # Since duplicates are already checked, just insert + total = 0 + for i in range(0, len(data), batch_size): + batch = data[i : i + batch_size] + MlbPlayer.insert_many(batch).execute() + total += len(batch) + return total diff --git a/app/routers_v2/battingcardratings.py b/app/routers_v2/battingcardratings.py index 96f4a7f..a02a0d1 100644 --- a/app/routers_v2/battingcardratings.py +++ b/app/routers_v2/battingcardratings.py @@ -9,27 +9,34 @@ import pandas as pd import pydantic from pydantic import validator, root_validator -from ..db_engine import db, BattingCardRatings, model_to_dict, chunked, BattingCard, Player, query_to_csv, Team, \ - CardPosition +from ..db_engine import ( + db, + BattingCardRatings, + model_to_dict, + chunked, + BattingCard, + Player, + query_to_csv, + Team, + CardPosition, +) +from ..db_helpers import upsert_batting_card_ratings from ..dependencies import oauth2_scheme, valid_token, LOG_DATA, PRIVATE_IN_SCHEMA logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/battingcardratings', - tags=['battingcardratings'] -) -RATINGS_FILE = 'storage/batting-ratings.csv' -BASIC_FILE = 'storage/batting-basic.csv' +router = APIRouter(prefix="/api/v2/battingcardratings", tags=["battingcardratings"]) +RATINGS_FILE = "storage/batting-ratings.csv" +BASIC_FILE = "storage/batting-basic.csv" class BattingCardRatingsModel(pydantic.BaseModel): battingcard_id: int - vs_hand: Literal['R', 'L', 'vR', 'vL'] + vs_hand: Literal["R", "L", "vR", "vL"] homerun: float = 0.0 bp_homerun: float = 0.0 triple: float = 0.0 @@ -61,33 +68,70 @@ class BattingCardRatingsModel(pydantic.BaseModel): @validator("avg", always=True) def avg_validator(cls, v, values, **kwargs): - return (values['homerun'] + values['bp_homerun'] / 2 + values['triple'] + values['double_three'] + - values['double_two'] + values['double_pull'] + values['single_two'] + values['single_one'] + - values['single_center'] + values['bp_single'] / 2) / 108 + return ( + values["homerun"] + + values["bp_homerun"] / 2 + + values["triple"] + + values["double_three"] + + values["double_two"] + + values["double_pull"] + + values["single_two"] + + values["single_one"] + + values["single_center"] + + values["bp_single"] / 2 + ) / 108 @validator("obp", always=True) def obp_validator(cls, v, values, **kwargs): - return ((values['hbp'] + values['walk']) / 108) + values['avg'] + return ((values["hbp"] + values["walk"]) / 108) + values["avg"] @validator("slg", always=True) def slg_validator(cls, v, values, **kwargs): - return (values['homerun'] * 4 + values['bp_homerun'] * 2 + values['triple'] * 3 + values['double_three'] * 2 + - values['double_two'] * 2 + values['double_pull'] * 2 + values['single_two'] + values['single_one'] + - values['single_center'] + values['bp_single'] / 2) / 108 + return ( + values["homerun"] * 4 + + values["bp_homerun"] * 2 + + values["triple"] * 3 + + values["double_three"] * 2 + + values["double_two"] * 2 + + values["double_pull"] * 2 + + values["single_two"] + + values["single_one"] + + values["single_center"] + + values["bp_single"] / 2 + ) / 108 @root_validator(skip_on_failure=True) def validate_chance_total(cls, values): total_chances = ( - values['homerun'] + values['bp_homerun'] + values['triple'] + values['double_three'] + - values['double_two'] + values['double_pull'] + values['single_two'] + values['single_one'] + - values['single_center'] + values['bp_single'] + values['hbp'] + values['walk'] + - values['strikeout'] + values['lineout'] + values['popout'] + values['flyout_a'] + - values['flyout_bq'] + values['flyout_lf_b'] + values['flyout_rf_b'] + values['groundout_a'] + - values['groundout_b'] + values['groundout_c']) + values["homerun"] + + values["bp_homerun"] + + values["triple"] + + values["double_three"] + + values["double_two"] + + values["double_pull"] + + values["single_two"] + + values["single_one"] + + values["single_center"] + + values["bp_single"] + + values["hbp"] + + values["walk"] + + values["strikeout"] + + values["lineout"] + + values["popout"] + + values["flyout_a"] + + values["flyout_bq"] + + values["flyout_lf_b"] + + values["flyout_rf_b"] + + values["groundout_a"] + + values["groundout_b"] + + values["groundout_c"] + ) if round(total_chances) != 108: - raise ValueError(f'BC {values["battingcard_id"]} must have exactly 108 chances on the card ' - f'{values["vs_hand"]}; {round(total_chances)} listed') + raise ValueError( + f"BC {values['battingcard_id']} must have exactly 108 chances on the card " + f"{values['vs_hand']}; {round(total_chances)} listed" + ) return values @@ -100,18 +144,23 @@ class RatingsList(pydantic.BaseModel): ratings: List[BattingCardRatingsModel] -@router.get('') +@router.get("") async def get_card_ratings( - team_id: int, ts: str, battingcard_id: list = Query(default=None), cardset_id: list = Query(default=None), - vs_hand: Literal['R', 'L', 'vR', 'vL'] = None, short_output: bool = False, csv: bool = False): + team_id: int, + ts: str, + battingcard_id: list = Query(default=None), + cardset_id: list = Query(default=None), + vs_hand: Literal["R", "L", "vR", "vL"] = None, + short_output: bool = False, + csv: bool = False, +): this_team = Team.get_or_none(Team.id == team_id) - logging.debug(f'Team: {this_team} / has_guide: {this_team.has_guide}') + logging.debug(f"Team: {this_team} / has_guide: {this_team.has_guide}") if this_team is None or ts != this_team.team_hash() or this_team.has_guide != 1: - logging.warning(f'Team_id {team_id} attempted to pull ratings') + logging.warning(f"Team_id {team_id} attempted to pull ratings") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to pull card ratings.' + status_code=401, detail="You are not authorized to pull card ratings." ) # elif not valid_token(token): # logging.warning(f'Bad Token: {token}') @@ -124,29 +173,40 @@ async def get_card_ratings( all_ratings = BattingCardRatings.select() if battingcard_id is not None: - all_ratings = all_ratings.where(BattingCardRatings.battingcard_id << battingcard_id) + all_ratings = all_ratings.where( + BattingCardRatings.battingcard_id << battingcard_id + ) if vs_hand is not None: all_ratings = all_ratings.where(BattingCardRatings.vs_hand == vs_hand[-1]) if cardset_id is not None: - set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id) - set_cards = BattingCard.select(BattingCard.id).where(BattingCard.player << set_players) + set_players = Player.select(Player.player_id).where( + Player.cardset_id << cardset_id + ) + set_cards = BattingCard.select(BattingCard.id).where( + BattingCard.player << set_players + ) all_ratings = all_ratings.where(BattingCardRatings.battingcard << set_cards) if csv: # return_val = query_to_csv(all_ratings) return_vals = [model_to_dict(x) for x in all_ratings] for x in return_vals: - x.update(x['battingcard']) - x['player_id'] = x['battingcard']['player']['player_id'] - del x['battingcard'], x['player'] + x.update(x["battingcard"]) + x["player_id"] = x["battingcard"]["player"]["player_id"] + del x["battingcard"], x["player"] db.close() - return Response(content=pd.DataFrame(return_vals).to_csv(index=False), media_type='text/csv') + return Response( + content=pd.DataFrame(return_vals).to_csv(index=False), media_type="text/csv" + ) else: - return_val = {'count': all_ratings.count(), 'ratings': [ - model_to_dict(x, recurse=not short_output) for x in all_ratings - ]} + return_val = { + "count": all_ratings.count(), + "ratings": [ + model_to_dict(x, recurse=not short_output) for x in all_ratings + ], + } db.close() return return_val @@ -154,225 +214,298 @@ async def get_card_ratings( def get_scouting_dfs(cardset_id: list = None): all_ratings = BattingCardRatings.select() if cardset_id is not None: - set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id) - set_cards = BattingCard.select(BattingCard.id).where(BattingCard.player << set_players) + set_players = Player.select(Player.player_id).where( + Player.cardset_id << cardset_id + ) + set_cards = BattingCard.select(BattingCard.id).where( + BattingCard.player << set_players + ) all_ratings = all_ratings.where(BattingCardRatings.battingcard << set_cards) - vl_query = all_ratings.where(BattingCardRatings.vs_hand == 'L') - vr_query = all_ratings.where(BattingCardRatings.vs_hand == 'R') + vl_query = all_ratings.where(BattingCardRatings.vs_hand == "L") + vr_query = all_ratings.where(BattingCardRatings.vs_hand == "R") vl_vals = [model_to_dict(x) for x in vl_query] for x in vl_vals: - x.update(x['battingcard']) - x['player_id'] = x['battingcard']['player']['player_id'] - x['player_name'] = x['battingcard']['player']['p_name'] - x['rarity'] = x['battingcard']['player']['rarity']['name'] - x['cardset_id'] = x['battingcard']['player']['cardset']['id'] - x['cardset_name'] = x['battingcard']['player']['cardset']['name'] - del x['battingcard'] - del x['player'] + x.update(x["battingcard"]) + x["player_id"] = x["battingcard"]["player"]["player_id"] + x["player_name"] = x["battingcard"]["player"]["p_name"] + x["rarity"] = x["battingcard"]["player"]["rarity"]["name"] + x["cardset_id"] = x["battingcard"]["player"]["cardset"]["id"] + x["cardset_name"] = x["battingcard"]["player"]["cardset"]["name"] + del x["battingcard"] + del x["player"] vr_vals = [model_to_dict(x) for x in vr_query] for x in vr_vals: - x['player_id'] = x['battingcard']['player']['player_id'] - del x['battingcard'] + x["player_id"] = x["battingcard"]["player"]["player_id"] + del x["battingcard"] vl = pd.DataFrame(vl_vals) vr = pd.DataFrame(vr_vals) - bat_df = pd.merge(vl, vr, on='player_id', suffixes=('_vl', '_vr')).set_index('player_id', drop=False) + bat_df = pd.merge(vl, vr, on="player_id", suffixes=("_vl", "_vr")).set_index( + "player_id", drop=False + ) - logging.debug(f'bat_df: {bat_df}') + logging.debug(f"bat_df: {bat_df}") positions = CardPosition.select() if cardset_id is not None: - set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id) + set_players = Player.select(Player.player_id).where( + Player.cardset_id << cardset_id + ) positions = positions.where(CardPosition.player << set_players) series_list = [] - for pos_code in ['P', 'C', '1B', '2B', '3B', 'SS', 'LF', 'CF', 'RF']: - series_list.append(pd.Series( - dict([(x.player.player_id, x.range) for x in positions.where(CardPosition.position == pos_code)]), - name=f'Range {pos_code}' - )) - series_list.append(pd.Series( - dict([(x.player.player_id, x.error) for x in positions.where(CardPosition.position == pos_code)]), - name=f'Error {pos_code}' - )) + for pos_code in ["P", "C", "1B", "2B", "3B", "SS", "LF", "CF", "RF"]: + series_list.append( + pd.Series( + dict( + [ + (x.player.player_id, x.range) + for x in positions.where(CardPosition.position == pos_code) + ] + ), + name=f"Range {pos_code}", + ) + ) + series_list.append( + pd.Series( + dict( + [ + (x.player.player_id, x.error) + for x in positions.where(CardPosition.position == pos_code) + ] + ), + name=f"Error {pos_code}", + ) + ) - series_list.append(pd.Series( - dict([(x.player.player_id, x.arm) for x in positions.where(CardPosition.position << ['LF', 'CF', 'RF'])]), - name=f'Arm OF' - )) - series_list.append(pd.Series( - dict([(x.player.player_id, x.arm) for x in positions.where(CardPosition.position == 'C')]), - name=f'Arm C' - )) - series_list.append(pd.Series( - dict([(x.player.player_id, x.pb) for x in positions.where(CardPosition.position == 'C')]), - name=f'PB C' - )) - series_list.append(pd.Series( - dict([(x.player.player_id, x.overthrow) for x in positions.where(CardPosition.position == 'C')]), - name=f'Throw C' - )) + series_list.append( + pd.Series( + dict( + [ + (x.player.player_id, x.arm) + for x in positions.where( + CardPosition.position << ["LF", "CF", "RF"] + ) + ] + ), + name=f"Arm OF", + ) + ) + series_list.append( + pd.Series( + dict( + [ + (x.player.player_id, x.arm) + for x in positions.where(CardPosition.position == "C") + ] + ), + name=f"Arm C", + ) + ) + series_list.append( + pd.Series( + dict( + [ + (x.player.player_id, x.pb) + for x in positions.where(CardPosition.position == "C") + ] + ), + name=f"PB C", + ) + ) + series_list.append( + pd.Series( + dict( + [ + (x.player.player_id, x.overthrow) + for x in positions.where(CardPosition.position == "C") + ] + ), + name=f"Throw C", + ) + ) db.close() - logging.debug(f'series_list: {series_list}') + logging.debug(f"series_list: {series_list}") return bat_df.join(series_list) -@router.get('/scouting') +@router.get("/scouting") async def get_card_scouting(team_id: int, ts: str): this_team = Team.get_or_none(Team.id == team_id) - logging.debug(f'Team: {this_team} / has_guide: {this_team.has_guide}') + logging.debug(f"Team: {this_team} / has_guide: {this_team.has_guide}") if this_team is None or ts != this_team.team_hash() or this_team.has_guide != 1: - logging.warning(f'Team_id {team_id} attempted to pull ratings') + logging.warning(f"Team_id {team_id} attempted to pull ratings") db.close() - return 'Your team does not have the ratings guide enabled. If you have purchased a copy ping Cal to ' \ - 'make sure it is enabled on your team. If you are interested you can pick it up here (thank you!): ' \ - 'https://ko-fi.com/manticorum/shop' + return ( + "Your team does not have the ratings guide enabled. If you have purchased a copy ping Cal to " + "make sure it is enabled on your team. If you are interested you can pick it up here (thank you!): " + "https://ko-fi.com/manticorum/shop" + ) - if os.path.isfile(f'storage/batting-ratings.csv'): + if os.path.isfile(f"storage/batting-ratings.csv"): return FileResponse( - path=f'storage/batting-ratings.csv', - media_type='text/csv', + path=f"storage/batting-ratings.csv", + media_type="text/csv", # headers=headers ) - raise HTTPException(status_code=400, detail='Go pester Cal - the scouting file is missing') + raise HTTPException( + status_code=400, detail="Go pester Cal - the scouting file is missing" + ) -@router.post('/calculate/scouting', include_in_schema=PRIVATE_IN_SCHEMA) +@router.post("/calculate/scouting", include_in_schema=PRIVATE_IN_SCHEMA) async def post_calc_scouting(token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to calculate card ratings.' + status_code=401, detail="You are not authorized to calculate card ratings." ) - logging.warning(f'Re-calculating batting ratings\n\n') + logging.warning(f"Re-calculating batting ratings\n\n") output = get_scouting_dfs() - first = ['player_id', 'player_name', 'cardset_name', 'rarity', 'hand', 'variant'] - exclude = first + ['id_vl', 'id_vr', 'vs_hand_vl', 'vs_hand_vr'] + first = ["player_id", "player_name", "cardset_name", "rarity", "hand", "variant"] + exclude = first + ["id_vl", "id_vr", "vs_hand_vl", "vs_hand_vr"] output = output[first + [col for col in output.columns if col not in exclude]] csv_file = pd.DataFrame(output).to_csv(index=False) - with open(RATINGS_FILE, 'w') as file: + with open(RATINGS_FILE, "w") as file: file.write(csv_file) - return Response(content=csv_file, media_type='text/csv') + return Response(content=csv_file, media_type="text/csv") -@router.get('/basic') +@router.get("/basic") async def get_basic_scouting(cardset_id: list = Query(default=None)): - if os.path.isfile(f'storage/batting-basic.csv'): + if os.path.isfile(f"storage/batting-basic.csv"): return FileResponse( - path=f'storage/batting-basic.csv', - media_type='text/csv', + path=f"storage/batting-basic.csv", + media_type="text/csv", # headers=headers ) - raise HTTPException(status_code=400, detail='Go pester Cal - the scouting file is missing') + raise HTTPException( + status_code=400, detail="Go pester Cal - the scouting file is missing" + ) -@router.post('/calculate/basic', include_in_schema=PRIVATE_IN_SCHEMA) +@router.post("/calculate/basic", include_in_schema=PRIVATE_IN_SCHEMA) async def post_calc_basic(token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to calculate basic ratings.' + status_code=401, detail="You are not authorized to calculate basic ratings." ) - logging.warning(f'Re-calculating basic batting ratings\n\n') + logging.warning(f"Re-calculating basic batting ratings\n\n") raw_data = get_scouting_dfs() - logging.debug(f'output: {raw_data}') + logging.debug(f"output: {raw_data}") def get_raw_speed(df_data): - speed_raw = df_data['running'] / 20 + df_data['steal_jump'] - if df_data['steal_auto']: + speed_raw = df_data["running"] / 20 + df_data["steal_jump"] + if df_data["steal_auto"]: speed_raw += 0.5 return speed_raw raw_series = raw_data.apply(get_raw_speed, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Speed'] = round(rank_series * 100) + raw_data["Speed"] = round(rank_series * 100) def get_raw_steal(df_data): - return ( - ((df_data['steal_high'] / 20) + (df_data['steal_low'] / 20)) * df_data['steal_jump'] - ) + return ((df_data["steal_high"] / 20) + (df_data["steal_low"] / 20)) * df_data[ + "steal_jump" + ] raw_series = raw_data.apply(get_raw_steal, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Steal'] = round(rank_series * 100) + raw_data["Steal"] = round(rank_series * 100) def get_raw_reaction(df_data): raw_total = 0 - for pos_range in [df_data['Range C'], df_data['Range 1B'], df_data['Range 2B'], df_data['Range 3B'], - df_data['Range SS'], df_data['Range LF'], df_data['Range CF'], df_data['Range RF']]: + for pos_range in [ + df_data["Range C"], + df_data["Range 1B"], + df_data["Range 2B"], + df_data["Range 3B"], + df_data["Range SS"], + df_data["Range LF"], + df_data["Range CF"], + df_data["Range RF"], + ]: if pd.notna(pos_range): raw_total += 10 ** (5 - pos_range) return raw_total raw_series = raw_data.apply(get_raw_reaction, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Reaction'] = round(rank_series * 100) + raw_data["Reaction"] = round(rank_series * 100) def get_raw_arm(df_data): of_arm = None of_pos = None - if pd.notna(df_data['Range RF']): - of_pos = 'RF' - elif pd.notna(df_data['Range CF']): - of_pos = 'CF' - elif pd.notna(df_data['Range LF']): - of_pos = 'LF' + if pd.notna(df_data["Range RF"]): + of_pos = "RF" + elif pd.notna(df_data["Range CF"]): + of_pos = "CF" + elif pd.notna(df_data["Range LF"]): + of_pos = "LF" if of_pos is not None: - if df_data['Arm OF'] < 0: - of_raw = df_data['Arm OF'] * -10 + if df_data["Arm OF"] < 0: + of_raw = df_data["Arm OF"] * -10 else: - of_raw = (5 - df_data['Arm OF']) + of_raw = 5 - df_data["Arm OF"] - if of_pos == 'RF': + if of_pos == "RF": of_raw = of_raw * 1.5 - of_raw += ((6 - df_data['Range RF']) * 4) - elif of_pos == 'CF': - of_raw += ((6 - df_data['Range CF']) * 3) - elif of_pos == 'LF': + of_raw += (6 - df_data["Range RF"]) * 4 + elif of_pos == "CF": + of_raw += (6 - df_data["Range CF"]) * 3 + elif of_pos == "LF": of_raw = of_raw / 2 - of_raw += ((6 - df_data['Range LF']) * 2) + of_raw += (6 - df_data["Range LF"]) * 2 of_arm = of_raw if_arm = None - if pd.notna(df_data['Range 3B']) or pd.notna(df_data['Range 2B']) or pd.notna(df_data['Range 1B']) or \ - pd.notna(df_data['Range SS']): + if ( + pd.notna(df_data["Range 3B"]) + or pd.notna(df_data["Range 2B"]) + or pd.notna(df_data["Range 1B"]) + or pd.notna(df_data["Range SS"]) + ): range_totals = 0 - if pd.notna(df_data['Range 3B']): - range_totals += ((6 - df_data['Range 3B']) * 5) - if pd.notna(df_data['Range SS']): - range_totals += ((6 - df_data['Range SS']) * 4) - if pd.notna(df_data['Range 2B']): - range_totals += ((6 - df_data['Range 2B']) * 3) - if pd.notna(df_data['Range 1B']): - range_totals += (6 - df_data['Range 1B']) + if pd.notna(df_data["Range 3B"]): + range_totals += (6 - df_data["Range 3B"]) * 5 + if pd.notna(df_data["Range SS"]): + range_totals += (6 - df_data["Range SS"]) * 4 + if pd.notna(df_data["Range 2B"]): + range_totals += (6 - df_data["Range 2B"]) * 3 + if pd.notna(df_data["Range 1B"]): + range_totals += 6 - df_data["Range 1B"] if_arm = 100 - (50 - range_totals) c_arm = None - if pd.notna(df_data['Arm C']): - if df_data['Arm C'] == -5: + if pd.notna(df_data["Arm C"]): + if df_data["Arm C"] == -5: c_arm = 100 else: - temp_arm = 20 + ((10 - df_data['Arm C']) * 3) + (20 - df_data['PB C']) + (20 - df_data['Throw C']) - \ - df_data['Error C'] + temp_arm = ( + 20 + + ((10 - df_data["Arm C"]) * 3) + + (20 - df_data["PB C"]) + + (20 - df_data["Throw C"]) + - df_data["Error C"] + ) c_arm = min(100, temp_arm) if c_arm is not None: @@ -386,149 +519,192 @@ async def post_calc_basic(token: str = Depends(oauth2_scheme)): raw_series = raw_data.apply(get_raw_arm, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Arm'] = round(rank_series * 100) + raw_data["Arm"] = round(rank_series * 100) def get_raw_fielding(df_data): if_error, of_error, c_error = 0, 0, 0 denom = 0 - if pd.notna(df_data['Error 3B']) or pd.notna(df_data['Error 2B']) or pd.notna(df_data['Error 1B']) or \ - pd.notna(df_data['Error SS']): + if ( + pd.notna(df_data["Error 3B"]) + or pd.notna(df_data["Error 2B"]) + or pd.notna(df_data["Error 1B"]) + or pd.notna(df_data["Error SS"]) + ): raw_if = 100 - if pd.notna(df_data['Error 3B']): - raw_if -= (df_data['Error 3B'] * 2) - if pd.notna(df_data['Error SS']): - raw_if -= (df_data['Error SS'] * .75) - if pd.notna(df_data['Error 2B']): - raw_if -= (df_data['Error 2B'] * 1.25) - if pd.notna(df_data['Error 1B']): - raw_if -= (df_data['Error 1B'] * 2) + if pd.notna(df_data["Error 3B"]): + raw_if -= df_data["Error 3B"] * 2 + if pd.notna(df_data["Error SS"]): + raw_if -= df_data["Error SS"] * 0.75 + if pd.notna(df_data["Error 2B"]): + raw_if -= df_data["Error 2B"] * 1.25 + if pd.notna(df_data["Error 1B"]): + raw_if -= df_data["Error 1B"] * 2 if_error = max(1, raw_if) denom += 1 - if pd.notna(df_data['Error LF']) or pd.notna(df_data['Error CF']) or pd.notna(df_data['Error RF']): + if ( + pd.notna(df_data["Error LF"]) + or pd.notna(df_data["Error CF"]) + or pd.notna(df_data["Error RF"]) + ): raw_of = 100 - if pd.notna(df_data['Error LF']): - raw_of -= (df_data['Error LF'] * 2) - if pd.notna(df_data['Error CF']): - raw_of -= (df_data['Error CF'] * .75) - if pd.notna(df_data['Error RF']): - raw_of -= (df_data['Error RF'] * 1.25) + if pd.notna(df_data["Error LF"]): + raw_of -= df_data["Error LF"] * 2 + if pd.notna(df_data["Error CF"]): + raw_of -= df_data["Error CF"] * 0.75 + if pd.notna(df_data["Error RF"]): + raw_of -= df_data["Error RF"] * 1.25 of_error = max(1, raw_of) denom += 1 - if pd.notna(df_data['Error C']): - c_error = max(100 - (df_data['Error C'] * 5) - df_data['Throw C'] - df_data['PB C'], 1) + if pd.notna(df_data["Error C"]): + c_error = max( + 100 - (df_data["Error C"] * 5) - df_data["Throw C"] - df_data["PB C"], 1 + ) denom += 1 return sum([if_error, of_error, c_error]) / max(denom, 1) raw_series = raw_data.apply(get_raw_fielding, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Fielding'] = round(rank_series * 100) + raw_data["Fielding"] = round(rank_series * 100) - rank_series = raw_data['avg_vl'].rank(pct=True) - raw_data['Contact L'] = round(rank_series * 100) + rank_series = raw_data["avg_vl"].rank(pct=True) + raw_data["Contact L"] = round(rank_series * 100) - rank_series = raw_data['avg_vr'].rank(pct=True) - raw_data['Contact R'] = round(rank_series * 100) + rank_series = raw_data["avg_vr"].rank(pct=True) + raw_data["Contact R"] = round(rank_series * 100) - rank_series = raw_data['slg_vl'].rank(pct=True) - raw_data['Power L'] = round(rank_series * 100) + rank_series = raw_data["slg_vl"].rank(pct=True) + raw_data["Power L"] = round(rank_series * 100) - rank_series = raw_data['slg_vr'].rank(pct=True) - raw_data['Power R'] = round(rank_series * 100) + rank_series = raw_data["slg_vr"].rank(pct=True) + raw_data["Power R"] = round(rank_series * 100) def get_raw_vision(df_data): return ( - ((((df_data['obp_vr'] * 0.67) + (df_data['obp_vl'] * 0.33)) - - ((df_data['avg_vr'] * 0.67) + (df_data['avg_vl'] * 0.33))) * 5) - - (((df_data['strikeout_vl'] * 0.33) + (df_data['strikeout_vr'] * 0.67)) / 208) + ( + ((df_data["obp_vr"] * 0.67) + (df_data["obp_vl"] * 0.33)) + - ((df_data["avg_vr"] * 0.67) + (df_data["avg_vl"] * 0.33)) + ) + * 5 + ) - ( + ((df_data["strikeout_vl"] * 0.33) + (df_data["strikeout_vr"] * 0.67)) / 208 ) raw_series = raw_data.apply(get_raw_vision, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Vision'] = round(rank_series * 100) + raw_data["Vision"] = round(rank_series * 100) def get_raw_rating(df_data): return ( - ((df_data['Reaction'] + df_data['Arm'] + df_data['Fielding']) * 2) + - (df_data['Speed'] + df_data['Steal']) + - ((((df_data['Contact R'] + df_data['Power R']) * 0.67) + - ((df_data['Contact L'] + df_data['Power L']) * 0.33) + df_data['Vision'] ) * 6 - ) + ((df_data["Reaction"] + df_data["Arm"] + df_data["Fielding"]) * 2) + + (df_data["Speed"] + df_data["Steal"]) + + ( + ( + ((df_data["Contact R"] + df_data["Power R"]) * 0.67) + + ((df_data["Contact L"] + df_data["Power L"]) * 0.33) + + df_data["Vision"] + ) + * 6 + ) ) raw_series = raw_data.apply(get_raw_rating, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Rating'] = round(rank_series * 100) + raw_data["Rating"] = round(rank_series * 100) - output = raw_data[[ - 'player_id', 'player_name', 'Rating', 'Contact R', 'Contact L', 'Power R', 'Power L', 'Vision', 'Speed', - 'Steal', 'Reaction', 'Arm', 'Fielding', 'hand', 'cardset_name' - ]] + output = raw_data[ + [ + "player_id", + "player_name", + "Rating", + "Contact R", + "Contact L", + "Power R", + "Power L", + "Vision", + "Speed", + "Steal", + "Reaction", + "Arm", + "Fielding", + "hand", + "cardset_name", + ] + ] csv_file = pd.DataFrame(output).to_csv(index=False) - with open(BASIC_FILE, 'w') as file: + with open(BASIC_FILE, "w") as file: file.write(csv_file) - return Response(content=csv_file, media_type='text/csv') + return Response(content=csv_file, media_type="text/csv") -@router.get('/{ratings_id}') +@router.get("/{ratings_id}") async def get_one_rating(ratings_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to pull card ratings.' + status_code=401, detail="You are not authorized to pull card ratings." ) this_rating = BattingCardRatings.get_or_none(BattingCardRatings.id == ratings_id) if this_rating is None: db.close() - raise HTTPException(status_code=404, detail=f'BattingCardRating id {ratings_id} not found') + raise HTTPException( + status_code=404, detail=f"BattingCardRating id {ratings_id} not found" + ) r_data = model_to_dict(this_rating) db.close() return r_data -@router.get('/player/{player_id}') +@router.get("/player/{player_id}") async def get_player_ratings( - player_id: int, variant: list = Query(default=None), short_output: bool = False, - token: str = Depends(oauth2_scheme)): + player_id: int, + variant: list = Query(default=None), + short_output: bool = False, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to pull card ratings.' + status_code=401, detail="You are not authorized to pull card ratings." ) - all_cards = BattingCard.select().where(BattingCard.player_id == player_id).order_by(BattingCard.variant) + all_cards = ( + BattingCard.select() + .where(BattingCard.player_id == player_id) + .order_by(BattingCard.variant) + ) if variant is not None: all_cards = all_cards.where(BattingCard.variant << variant) - all_ratings = BattingCardRatings.select().where(BattingCardRatings.battingcard << all_cards) + all_ratings = BattingCardRatings.select().where( + BattingCardRatings.battingcard << all_cards + ) - return_val = {'count': all_ratings.count(), 'ratings': [ - model_to_dict(x, recurse=not short_output) for x in all_ratings - ]} + return_val = { + "count": all_ratings.count(), + "ratings": [model_to_dict(x, recurse=not short_output) for x in all_ratings], + } db.close() return return_val -@router.put('', include_in_schema=PRIVATE_IN_SCHEMA) +@router.put("", include_in_schema=PRIVATE_IN_SCHEMA) async def put_ratings(ratings: RatingsList, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to post card ratings.' + status_code=401, detail="You are not authorized to post card ratings." ) new_ratings = [] @@ -536,44 +712,50 @@ async def put_ratings(ratings: RatingsList, token: str = Depends(oauth2_scheme)) for x in ratings.ratings: try: BattingCardRatings.get( - (BattingCardRatings.battingcard_id == x.battingcard_id) & (BattingCardRatings.vs_hand == x.vs_hand) + (BattingCardRatings.battingcard_id == x.battingcard_id) + & (BattingCardRatings.vs_hand == x.vs_hand) + ) + updates += ( + BattingCardRatings.update(x.dict()) + .where( + (BattingCardRatings.battingcard_id == x.battingcard_id) + & (BattingCardRatings.vs_hand == x.vs_hand) + ) + .execute() ) - updates += BattingCardRatings.update(x.dict()).where( - (BattingCardRatings.battingcard_id == x.battingcard_id) & (BattingCardRatings.vs_hand == x.vs_hand) - ).execute() except BattingCardRatings.DoesNotExist: new_ratings.append(x.dict()) with db.atomic(): - for batch in chunked(new_ratings, 30): - BattingCardRatings.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + upsert_batting_card_ratings(new_ratings, batch_size=30) db.close() - return f'Updated ratings: {updates}; new ratings: {len(new_ratings)}' + return f"Updated ratings: {updates}; new ratings: {len(new_ratings)}" -@router.delete('/{ratings_id}', include_in_schema=PRIVATE_IN_SCHEMA) -async def delete_rating( - ratings_id: int, token: str = Depends(oauth2_scheme)): +@router.delete("/{ratings_id}", include_in_schema=PRIVATE_IN_SCHEMA) +async def delete_rating(ratings_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to post card ratings.' + status_code=401, detail="You are not authorized to post card ratings." ) this_rating = BattingCardRatings.get_or_none(BattingCardRatings.id == ratings_id) if this_rating is None: db.close() - raise HTTPException(status_code=404, detail=f'BattingCardRating id {ratings_id} not found') + raise HTTPException( + status_code=404, detail=f"BattingCardRating id {ratings_id} not found" + ) count = this_rating.delete_instance() db.close() if count == 1: - return f'Rating {this_rating} has been deleted' + return f"Rating {this_rating} has been deleted" else: - raise HTTPException(status_code=500, detail=f'Rating {this_rating} could not be deleted') - - + raise HTTPException( + status_code=500, detail=f"Rating {this_rating} could not be deleted" + ) diff --git a/app/routers_v2/battingcards.py b/app/routers_v2/battingcards.py index d3121cf..5142f1b 100644 --- a/app/routers_v2/battingcards.py +++ b/app/routers_v2/battingcards.py @@ -6,18 +6,16 @@ import logging import pydantic from ..db_engine import db, BattingCard, model_to_dict, fn, chunked, Player, MlbPlayer +from ..db_helpers import upsert_batting_cards from ..dependencies import oauth2_scheme, valid_token, LOG_DATA logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/battingcards', - tags=['battingcards'] -) +router = APIRouter(prefix="/api/v2/battingcards", tags=["battingcards"]) class BattingCardModel(pydantic.BaseModel): @@ -27,22 +25,26 @@ class BattingCardModel(pydantic.BaseModel): steal_high: int = 20 steal_auto: bool = False steal_jump: float = 0 - bunting: str = 'C' - hit_and_run: str = 'C' + bunting: str = "C" + hit_and_run: str = "C" running: int = 10 offense_col: int = None - hand: Literal['R', 'L', 'S'] = 'R' + hand: Literal["R", "L", "S"] = "R" class BattingCardList(pydantic.BaseModel): cards: List[BattingCardModel] -@router.get('') +@router.get("") async def get_batting_cards( - player_id: list = Query(default=None), player_name: list = Query(default=None), - cardset_id: list = Query(default=None), short_output: bool = False, limit: Optional[int] = None, - variant: list = Query(default=None)): + player_id: list = Query(default=None), + player_name: list = Query(default=None), + cardset_id: list = Query(default=None), + short_output: bool = False, + limit: Optional[int] = None, + variant: list = Query(default=None), +): all_cards = BattingCard.select() if player_id is not None: all_cards = all_cards.where(BattingCard.player_id << player_id) @@ -59,102 +61,134 @@ async def get_batting_cards( if limit is not None: all_cards = all_cards.limit(limit) - return_val = {'count': all_cards.count(), 'cards': [ - model_to_dict(x, recurse=not short_output) for x in all_cards - ]} + return_val = { + "count": all_cards.count(), + "cards": [model_to_dict(x, recurse=not short_output) for x in all_cards], + } db.close() return return_val -@router.get('/{card_id}') +@router.get("/{card_id}") async def get_one_card(card_id: int): this_card = BattingCard.get_or_none(BattingCard.id == card_id) if this_card is None: db.close() - raise HTTPException(status_code=404, detail=f'BattingCard id {card_id} not found') + raise HTTPException( + status_code=404, detail=f"BattingCard id {card_id} not found" + ) r_card = model_to_dict(this_card) db.close() return r_card -@router.get('/player/{player_id}') -async def get_player_cards(player_id: int, variant: list = Query(default=None), short_output: bool = False): - all_cards = BattingCard.select().where(BattingCard.player_id == player_id).order_by(BattingCard.variant) +@router.get("/player/{player_id}") +async def get_player_cards( + player_id: int, variant: list = Query(default=None), short_output: bool = False +): + all_cards = ( + BattingCard.select() + .where(BattingCard.player_id == player_id) + .order_by(BattingCard.variant) + ) if variant is not None: all_cards = all_cards.where(BattingCard.variant << variant) - return_val = {'count': all_cards.count(), 'cards': [ - model_to_dict(x, recurse=not short_output) for x in all_cards - ]} + return_val = { + "count": all_cards.count(), + "cards": [model_to_dict(x, recurse=not short_output) for x in all_cards], + } db.close() return return_val -@router.put('') +@router.put("") async def put_cards(cards: BattingCardList, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post batting cards. This event has been logged.' + detail="You are not authorized to post batting cards. This event has been logged.", ) new_cards = [] updates = 0 - logging.info(f'here!') + logging.info(f"here!") for x in cards.cards: try: old = BattingCard.get( - (BattingCard.player_id == x.player_id) & (BattingCard.variant == x.variant) + (BattingCard.player_id == x.player_id) + & (BattingCard.variant == x.variant) ) if x.offense_col is None: x.offense_col = old.offense_col - updates += BattingCard.update(x.dict()).where( - (BattingCard.player_id == x.player_id) & (BattingCard.variant == x.variant) - ).execute() + updates += ( + BattingCard.update(x.dict()) + .where( + (BattingCard.player_id == x.player_id) + & (BattingCard.variant == x.variant) + ) + .execute() + ) except BattingCard.DoesNotExist: if x.offense_col is None: this_player = Player.get_or_none(Player.player_id == x.player_id) - mlb_player = MlbPlayer.get_or_none(MlbPlayer.key_bbref == this_player.bbref_id) + mlb_player = MlbPlayer.get_or_none( + MlbPlayer.key_bbref == this_player.bbref_id + ) if mlb_player is not None: - logging.info(f'setting offense_col to {mlb_player.offense_col} for {this_player.p_name}') + logging.info( + f"setting offense_col to {mlb_player.offense_col} for {this_player.p_name}" + ) x.offense_col = mlb_player.offense_col else: - logging.info(f'randomly setting offense_col for {this_player.p_name}') + logging.info( + f"randomly setting offense_col for {this_player.p_name}" + ) x.offense_col = random.randint(1, 3) - logging.debug(f'x.dict(): {x.dict()}') + logging.debug(f"x.dict(): {x.dict()}") new_cards.append(x.dict()) with db.atomic(): - for batch in chunked(new_cards, 30): - BattingCard.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + upsert_batting_cards(new_cards, batch_size=30) db.close() - return f'Updated cards: {updates}; new cards: {len(new_cards)}' + return f"Updated cards: {updates}; new cards: {len(new_cards)}" -@router.patch('/{card_id}') +@router.patch("/{card_id}") async def patch_card( - card_id: int, steal_low: Optional[int] = None, steal_high: Optional[int] = None, - steal_auto: Optional[bool] = None, steal_jump: Optional[float] = None, bunting: Optional[str] = None, - hit_and_run: Optional[str] = None, running: Optional[int] = None, offense_col: Optional[int] = None, - hand: Literal['R', 'L', 'S'] = None, token: str = Depends(oauth2_scheme)): + card_id: int, + steal_low: Optional[int] = None, + steal_high: Optional[int] = None, + steal_auto: Optional[bool] = None, + steal_jump: Optional[float] = None, + bunting: Optional[str] = None, + hit_and_run: Optional[str] = None, + running: Optional[int] = None, + offense_col: Optional[int] = None, + hand: Literal["R", "L", "S"] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch batting cards. This event has been logged.' + detail="You are not authorized to patch batting cards. This event has been logged.", ) this_card = BattingCard.get_or_none(BattingCard.id == card_id) if this_card is None: db.close() - raise HTTPException(status_code=404, detail=f'BattingCard id {card_id} not found') + raise HTTPException( + status_code=404, detail=f"BattingCard id {card_id} not found" + ) if steal_low is not None: this_card.steal_low = steal_low @@ -183,45 +217,49 @@ async def patch_card( db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that card' + detail="Well slap my ass and call me a teapot; I could not save that card", ) -@router.delete('/{card_id}') +@router.delete("/{card_id}") async def delete_card(card_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete batting cards. This event has been logged.' + detail="You are not authorized to delete batting cards. This event has been logged.", ) this_card = BattingCard.get_or_none(BattingCard.id == card_id) if this_card is None: db.close() - raise HTTPException(status_code=404, detail=f'BattingCard id {card_id} not found') + raise HTTPException( + status_code=404, detail=f"BattingCard id {card_id} not found" + ) count = this_card.delete_instance() db.close() if count == 1: - return f'Card {this_card} has been deleted' + return f"Card {this_card} has been deleted" else: - raise HTTPException(status_code=500, detail=f'Card {this_card} could not be deleted') + raise HTTPException( + status_code=500, detail=f"Card {this_card} could not be deleted" + ) -@router.delete('') +@router.delete("") async def delete_all_cards(token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete batting cards. This event has been logged.' + detail="You are not authorized to delete batting cards. This event has been logged.", ) d_query = BattingCard.delete() d_query.execute() - return f'Deleted {d_query.count()} batting cards' + return f"Deleted {d_query.count()} batting cards" diff --git a/app/routers_v2/cardpositions.py b/app/routers_v2/cardpositions.py index 09707e9..75354b8 100644 --- a/app/routers_v2/cardpositions.py +++ b/app/routers_v2/cardpositions.py @@ -5,24 +5,22 @@ import pydantic from pydantic import root_validator from ..db_engine import db, CardPosition, model_to_dict, chunked, Player, fn +from ..db_helpers import upsert_card_positions from ..dependencies import oauth2_scheme, valid_token, LOG_DATA logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/cardpositions', - tags=['cardpositions'] -) +router = APIRouter(prefix="/api/v2/cardpositions", tags=["cardpositions"]) class CardPositionModel(pydantic.BaseModel): player_id: int variant: int = 0 - position: Literal['P', 'C', '1B', '2B', '3B', 'SS', 'LF', 'CF', 'RF', 'DH'] + position: Literal["P", "C", "1B", "2B", "3B", "SS", "LF", "CF", "RF", "DH"] innings: int = 1 range: int = 5 error: int = 0 @@ -32,10 +30,12 @@ class CardPositionModel(pydantic.BaseModel): @root_validator(skip_on_failure=True) def position_validator(cls, values): - if values['position'] in ['C', 'LF', 'CF', 'RF'] and values['arm'] is None: - raise ValueError(f'{values["position"]} must have an arm rating') - if values['position'] == 'C' and (values['pb'] is None or values['overthrow'] is None): - raise ValueError('Catchers must have a pb and overthrow rating') + if values["position"] in ["C", "LF", "CF", "RF"] and values["arm"] is None: + raise ValueError(f"{values['position']} must have an arm rating") + if values["position"] == "C" and ( + values["pb"] is None or values["overthrow"] is None + ): + raise ValueError("Catchers must have a pb and overthrow rating") return values @@ -43,14 +43,24 @@ class PositionList(pydantic.BaseModel): positions: List[CardPositionModel] -@router.get('') +@router.get("") async def get_card_positions( - player_id: list = Query(default=None), position: list = Query(default=None), min_innings: Optional[int] = 1, - r: list = Query(default=None), e: list = Query(default=None), arm: list = Query(default=None), - pb: list = Query(default=None), overthrow: list = Query(default=None), cardset_id: list = Query(default=None), - short_output: Optional[bool] = False, sort: Optional[str] = 'innings-desc'): - all_pos = CardPosition.select().where(CardPosition.innings >= min_innings).order_by( - CardPosition.player, CardPosition.position, CardPosition.variant + player_id: list = Query(default=None), + position: list = Query(default=None), + min_innings: Optional[int] = 1, + r: list = Query(default=None), + e: list = Query(default=None), + arm: list = Query(default=None), + pb: list = Query(default=None), + overthrow: list = Query(default=None), + cardset_id: list = Query(default=None), + short_output: Optional[bool] = False, + sort: Optional[str] = "innings-desc", +): + all_pos = ( + CardPosition.select() + .where(CardPosition.innings >= min_innings) + .order_by(CardPosition.player, CardPosition.position, CardPosition.variant) ) if player_id is not None: @@ -72,42 +82,45 @@ async def get_card_positions( all_players = Player.select().where(Player.cardset_id << cardset_id) all_pos = all_pos.where(CardPosition.player << all_players) - if sort == 'innings-desc': + if sort == "innings-desc": all_pos = all_pos.order_by(CardPosition.innings.desc()) - elif sort == 'innings-asc': + elif sort == "innings-asc": all_pos = all_pos.order_by(CardPosition.innings) - elif sort == 'range-desc': + elif sort == "range-desc": all_pos = all_pos.order_by(CardPosition.range.desc()) - elif sort == 'range-asc': + elif sort == "range-asc": all_pos = all_pos.order_by(CardPosition.range) - return_val = {'count': all_pos.count(), 'positions': [ - model_to_dict(x, recurse=not short_output) for x in all_pos - ]} + return_val = { + "count": all_pos.count(), + "positions": [model_to_dict(x, recurse=not short_output) for x in all_pos], + } db.close() return return_val -@router.get('/{position_id}') +@router.get("/{position_id}") async def get_one_position(position_id: int): this_pos = CardPosition.get_or_none(CardPosition.id == position_id) if this_pos is None: db.close() - raise HTTPException(status_code=404, detail=f'CardPosition id {position_id} not found') + raise HTTPException( + status_code=404, detail=f"CardPosition id {position_id} not found" + ) r_data = model_to_dict(this_pos) db.close() return r_data -@router.put('') +@router.put("") async def put_positions(positions: PositionList, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post card positions. This event has been logged.' + detail="You are not authorized to post card positions. This event has been logged.", ) new_cards = [] @@ -116,43 +129,53 @@ async def put_positions(positions: PositionList, token: str = Depends(oauth2_sch for x in positions.positions: try: CardPosition.get( - (CardPosition.player_id == x.player_id) & (CardPosition.variant == x.variant) & - (CardPosition.position == x.position) + (CardPosition.player_id == x.player_id) + & (CardPosition.variant == x.variant) + & (CardPosition.position == x.position) + ) + updates += ( + CardPosition.update(x.dict()) + .where( + (CardPosition.player_id == x.player_id) + & (CardPosition.variant == x.variant) + & (CardPosition.position == x.position) + ) + .execute() ) - updates += CardPosition.update(x.dict()).where( - (CardPosition.player_id == x.player_id) & (CardPosition.variant == x.variant) & - (CardPosition.position == x.position) - ).execute() except CardPosition.DoesNotExist: new_cards.append(x.dict()) with db.atomic(): - for batch in chunked(new_cards, 30): - CardPosition.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + upsert_card_positions(new_cards, batch_size=30) db.close() - return f'Updated cards: {updates}; new cards: {len(new_cards)}' + return f"Updated cards: {updates}; new cards: {len(new_cards)}" -@router.delete('/{position_id}') +@router.delete("/{position_id}") async def delete_position(position_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete card positions. This event has been logged.' + detail="You are not authorized to delete card positions. This event has been logged.", ) this_pos = CardPosition.get_or_none(CardPosition.id == position_id) if this_pos is None: db.close() - raise HTTPException(status_code=404, detail=f'CardPosition id {position_id} not found') + raise HTTPException( + status_code=404, detail=f"CardPosition id {position_id} not found" + ) count = this_pos.delete_instance() db.close() if count == 1: - return f'Card Position {this_pos} has been deleted' + return f"Card Position {this_pos} has been deleted" else: - raise HTTPException(status_code=500, detail=f'Card Position {this_pos} could not be deleted') + raise HTTPException( + status_code=500, detail=f"Card Position {this_pos} could not be deleted" + ) diff --git a/app/routers_v2/decisions.py b/app/routers_v2/decisions.py index 4966b91..f8f66bd 100644 --- a/app/routers_v2/decisions.py +++ b/app/routers_v2/decisions.py @@ -5,19 +5,28 @@ import logging import pandas as pd import pydantic -from ..db_engine import db, Decision, StratGame, Player, model_to_dict, chunked, fn, Team, Card, StratPlay +from ..db_engine import ( + db, + Decision, + StratGame, + Player, + model_to_dict, + chunked, + fn, + Team, + Card, + StratPlay, +) +from ..db_helpers import upsert_decisions from ..dependencies import oauth2_scheme, valid_token, LOG_DATA logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/decisions', - tags=['decisions'] -) +router = APIRouter(prefix="/api/v2/decisions", tags=["decisions"]) class DecisionModel(pydantic.BaseModel): @@ -42,14 +51,26 @@ class DecisionList(pydantic.BaseModel): decisions: List[DecisionModel] -@router.get('') +@router.get("") async def get_decisions( - season: list = Query(default=None), week: list = Query(default=None), team_id: list = Query(default=None), - win: Optional[int] = None, loss: Optional[int] = None, hold: Optional[int] = None, save: Optional[int] = None, - b_save: Optional[int] = None, irunners: list = Query(default=None), irunners_scored: list = Query(default=None), - game_type: list = Query(default=None), - game_id: list = Query(default=None), player_id: list = Query(default=None), csv: Optional[bool] = False, - limit: Optional[int] = 100, page_num: Optional[int] = 1, short_output: Optional[bool] = False): + season: list = Query(default=None), + week: list = Query(default=None), + team_id: list = Query(default=None), + win: Optional[int] = None, + loss: Optional[int] = None, + hold: Optional[int] = None, + save: Optional[int] = None, + b_save: Optional[int] = None, + irunners: list = Query(default=None), + irunners_scored: list = Query(default=None), + game_type: list = Query(default=None), + game_id: list = Query(default=None), + player_id: list = Query(default=None), + csv: Optional[bool] = False, + limit: Optional[int] = 100, + page_num: Optional[int] = 1, + short_output: Optional[bool] = False, +): all_dec = Decision.select().order_by(-Decision.season, -Decision.week, -Decision.id) if season is not None: @@ -88,40 +109,50 @@ async def get_decisions( all_dec = all_dec.paginate(page_num, limit) return_dec = { - 'count': all_dec.count(), - 'decisions': [model_to_dict(x, recurse=not short_output) for x in all_dec] + "count": all_dec.count(), + "decisions": [model_to_dict(x, recurse=not short_output) for x in all_dec], } db.close() if csv: - return_vals = return_dec['decisions'] + return_vals = return_dec["decisions"] if len(return_vals) == 0: - return Response(content=pd.DataFrame().to_csv(index=False), media_type='text/csv') + return Response( + content=pd.DataFrame().to_csv(index=False), media_type="text/csv" + ) for x in return_vals: - x['game_id'] = x['game']['id'] - x['game_type'] = x['game']['game_type'] - x['player_id'] = x['pitcher']['player_id'] - x['player_name'] = x['pitcher']['p_name'] - x['player_cardset'] = x['pitcher']['cardset']['name'] - x['team_id'] = x['pitcher_team']['id'] - x['team_abbrev'] = x['pitcher_team']['abbrev'] - del x['pitcher'], x['pitcher_team'], x['game'] + x["game_id"] = x["game"]["id"] + x["game_type"] = x["game"]["game_type"] + x["player_id"] = x["pitcher"]["player_id"] + x["player_name"] = x["pitcher"]["p_name"] + x["player_cardset"] = x["pitcher"]["cardset"]["name"] + x["team_id"] = x["pitcher_team"]["id"] + x["team_abbrev"] = x["pitcher_team"]["abbrev"] + del x["pitcher"], x["pitcher_team"], x["game"] output = pd.DataFrame(return_vals) - first = ['player_id', 'player_name', 'player_cardset', 'team_id', 'team_abbrev'] - exclude = first + ['lob_all', 'lob_all_rate', 'lob_2outs', 'rbi%'] + first = ["player_id", "player_name", "player_cardset", "team_id", "team_abbrev"] + exclude = first + ["lob_all", "lob_all_rate", "lob_2outs", "rbi%"] output = output[first + [col for col in output.columns if col not in exclude]] db.close() - return Response(content=pd.DataFrame(output).to_csv(index=False), media_type='text/csv') + return Response( + content=pd.DataFrame(output).to_csv(index=False), media_type="text/csv" + ) return return_dec -@router.get('/rest') -async def get_decisions_for_rest(team_id: int, season: int = None, limit: int = 80, native_rest: bool = False): - all_dec = Decision.select().order_by(-Decision.season, -Decision.week, -Decision.id).paginate(1, limit) +@router.get("/rest") +async def get_decisions_for_rest( + team_id: int, season: int = None, limit: int = 80, native_rest: bool = False +): + all_dec = ( + Decision.select() + .order_by(-Decision.season, -Decision.week, -Decision.id) + .paginate(1, limit) + ) if season is not None: all_dec = all_dec.where(Decision.season == season) @@ -131,41 +162,61 @@ async def get_decisions_for_rest(team_id: int, season: int = None, limit: int = return_dec = [] for x in all_dec: this_val = [] - this_card = Card.get_or_none(Card.player_id == x.pitcher.player_id, Card.team_id == x.pitcher_team.id) + this_card = Card.get_or_none( + Card.player_id == x.pitcher.player_id, Card.team_id == x.pitcher_team.id + ) this_val.append(x.game.id) this_val.append(x.pitcher.player_id) this_val.append(this_card.id if this_card is not None else -1) this_val.append(1 if x.is_start else 0) if not native_rest: this_line = StratPlay.select( - StratPlay.pitcher, StratPlay.game, fn.SUM(StratPlay.outs).alias('sum_outs') + StratPlay.pitcher, + StratPlay.game, + fn.SUM(StratPlay.outs).alias("sum_outs"), ).where((StratPlay.game == x.game) & (StratPlay.pitcher == x.pitcher)) - logging.info(f'this_line: {this_line[0]}') + logging.info(f"this_line: {this_line[0]}") if this_line[0].sum_outs is None: this_val.append(0.0) else: - this_val.append(float(this_line[0].sum_outs // 3) + (float(this_line[0].sum_outs % 3) * .1)) + this_val.append( + float(this_line[0].sum_outs // 3) + + (float(this_line[0].sum_outs % 3) * 0.1) + ) return_dec.append(this_val) db.close() - return Response(content=pd.DataFrame(return_dec).to_csv(index=False, header=False), media_type='text/csv') + return Response( + content=pd.DataFrame(return_dec).to_csv(index=False, header=False), + media_type="text/csv", + ) -@router.patch('/{decision_id}') +@router.patch("/{decision_id}") async def patch_decision( - decision_id: int, win: Optional[int] = None, loss: Optional[int] = None, hold: Optional[int] = None, - save: Optional[int] = None, b_save: Optional[int] = None, irunners: Optional[int] = None, - irunners_scored: Optional[int] = None, rest_ip: Optional[int] = None, rest_required: Optional[int] = None, - token: str = Depends(oauth2_scheme)): + decision_id: int, + win: Optional[int] = None, + loss: Optional[int] = None, + hold: Optional[int] = None, + save: Optional[int] = None, + b_save: Optional[int] = None, + irunners: Optional[int] = None, + irunners_scored: Optional[int] = None, + rest_ip: Optional[int] = None, + rest_required: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'patch_decision - Bad Token: {token}') - raise HTTPException(status_code=401, detail='Unauthorized') + logging.warning(f"patch_decision - Bad Token: {token}") + raise HTTPException(status_code=401, detail="Unauthorized") this_dec = Decision.get_or_none(Decision.id == decision_id) if this_dec is None: db.close() - raise HTTPException(status_code=404, detail=f'Decision ID {decision_id} not found') + raise HTTPException( + status_code=404, detail=f"Decision ID {decision_id} not found" + ) if win is not None: this_dec.win = win @@ -192,72 +243,84 @@ async def patch_decision( return d_result else: db.close() - raise HTTPException(status_code=500, detail=f'Unable to patch decision {decision_id}') + raise HTTPException( + status_code=500, detail=f"Unable to patch decision {decision_id}" + ) -@router.post('') +@router.post("") async def post_decisions(dec_list: DecisionList, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'post_decisions - Bad Token: {token}') - raise HTTPException(status_code=401, detail='Unauthorized') + logging.warning(f"post_decisions - Bad Token: {token}") + raise HTTPException(status_code=401, detail="Unauthorized") new_dec = [] for x in dec_list.decisions: if StratGame.get_or_none(StratGame.id == x.game_id) is None: - raise HTTPException(status_code=404, detail=f'Game ID {x.game_id} not found') + raise HTTPException( + status_code=404, detail=f"Game ID {x.game_id} not found" + ) if Player.get_or_none(Player.player_id == x.pitcher_id) is None: - raise HTTPException(status_code=404, detail=f'Player ID {x.pitcher_id} not found') + raise HTTPException( + status_code=404, detail=f"Player ID {x.pitcher_id} not found" + ) if Team.get_or_none(Team.id == x.pitcher_team_id) is None: - raise HTTPException(status_code=404, detail=f'Team ID {x.pitcher_team_id} not found') + raise HTTPException( + status_code=404, detail=f"Team ID {x.pitcher_team_id} not found" + ) new_dec.append(x.dict()) with db.atomic(): - for batch in chunked(new_dec, 10): - Decision.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + upsert_decisions(new_dec, batch_size=10) db.close() - return f'Inserted {len(new_dec)} decisions' + return f"Inserted {len(new_dec)} decisions" -@router.delete('/{decision_id}') +@router.delete("/{decision_id}") async def delete_decision(decision_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'delete_decision - Bad Token: {token}') - raise HTTPException(status_code=401, detail='Unauthorized') + logging.warning(f"delete_decision - Bad Token: {token}") + raise HTTPException(status_code=401, detail="Unauthorized") this_dec = Decision.get_or_none(Decision.id == decision_id) if this_dec is None: db.close() - raise HTTPException(status_code=404, detail=f'Decision ID {decision_id} not found') + raise HTTPException( + status_code=404, detail=f"Decision ID {decision_id} not found" + ) count = this_dec.delete_instance() db.close() if count == 1: - return f'Decision {decision_id} has been deleted' + return f"Decision {decision_id} has been deleted" else: - raise HTTPException(status_code=500, detail=f'Decision {decision_id} could not be deleted') + raise HTTPException( + status_code=500, detail=f"Decision {decision_id} could not be deleted" + ) -@router.delete('/game/{game_id}') +@router.delete("/game/{game_id}") async def delete_decisions_game(game_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'delete_decisions_game - Bad Token: {token}') - raise HTTPException(status_code=401, detail='Unauthorized') + logging.warning(f"delete_decisions_game - Bad Token: {token}") + raise HTTPException(status_code=401, detail="Unauthorized") this_game = StratGame.get_or_none(StratGame.id == game_id) if not this_game: db.close() - raise HTTPException(status_code=404, detail=f'Game ID {game_id} not found') + raise HTTPException(status_code=404, detail=f"Game ID {game_id} not found") count = Decision.delete().where(Decision.game == this_game).execute() db.close() if count > 0: - return f'Deleted {count} decisions matching Game ID {game_id}' + return f"Deleted {count} decisions matching Game ID {game_id}" else: - raise HTTPException(status_code=500, detail=f'No decisions matching Game ID {game_id} were deleted') - - - + raise HTTPException( + status_code=500, + detail=f"No decisions matching Game ID {game_id} were deleted", + ) diff --git a/app/routers_v2/gauntletrewards.py b/app/routers_v2/gauntletrewards.py index dcbaa72..3b18d1e 100644 --- a/app/routers_v2/gauntletrewards.py +++ b/app/routers_v2/gauntletrewards.py @@ -4,18 +4,16 @@ import logging import pydantic from ..db_engine import db, GauntletReward, model_to_dict, chunked, DatabaseError +from ..db_helpers import upsert_gauntlet_rewards from ..dependencies import oauth2_scheme, valid_token, LOG_DATA logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/gauntletrewards', - tags=['gauntletrewards'] -) +router = APIRouter(prefix="/api/v2/gauntletrewards", tags=["gauntletrewards"]) class GauntletRewardModel(pydantic.BaseModel): @@ -30,10 +28,14 @@ class GauntletRewardList(pydantic.BaseModel): rewards: List[GauntletRewardModel] -@router.get('') +@router.get("") async def v1_gauntletreward_get( - name: Optional[str] = None, gauntlet_id: Optional[int] = None, reward_id: list = Query(default=None), - win_num: Optional[int] = None, loss_max: Optional[int] = None): + name: Optional[str] = None, + gauntlet_id: Optional[int] = None, + reward_id: list = Query(default=None), + win_num: Optional[int] = None, + loss_max: Optional[int] = None, +): all_rewards = GauntletReward.select() if name is not None: @@ -49,44 +51,52 @@ async def v1_gauntletreward_get( all_rewards = all_rewards.order_by(-GauntletReward.loss_max, GauntletReward.win_num) - return_val = {'count': all_rewards.count(), 'rewards': []} + return_val = {"count": all_rewards.count(), "rewards": []} for x in all_rewards: - return_val['rewards'].append(model_to_dict(x)) + return_val["rewards"].append(model_to_dict(x)) db.close() return return_val -@router.get('/{gauntletreward_id}') +@router.get("/{gauntletreward_id}") async def v1_gauntletreward_get_one(gauntletreward_id): try: this_reward = GauntletReward.get_by_id(gauntletreward_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No gauntlet reward found with id {gauntletreward_id}') + raise HTTPException( + status_code=404, + detail=f"No gauntlet reward found with id {gauntletreward_id}", + ) return_val = model_to_dict(this_reward) db.close() return return_val -@router.patch('/{gauntletreward_id}') +@router.patch("/{gauntletreward_id}") async def v1_gauntletreward_patch( - gauntletreward_id, name: Optional[str] = None, gauntlet_id: Optional[int] = None, - reward_id: Optional[int] = None, win_num: Optional[int] = None, loss_max: Optional[int] = None, - token: str = Depends(oauth2_scheme)): + gauntletreward_id, + name: Optional[str] = None, + gauntlet_id: Optional[int] = None, + reward_id: Optional[int] = None, + win_num: Optional[int] = None, + loss_max: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch gauntlet rewards. This event has been logged.' + detail="You are not authorized to patch gauntlet rewards. This event has been logged.", ) this_reward = GauntletReward.get_or_none(GauntletReward.id == gauntletreward_id) if this_reward is None: db.close() - raise KeyError(f'Gauntlet Reward ID {gauntletreward_id} not found') + raise KeyError(f"Gauntlet Reward ID {gauntletreward_id} not found") if gauntlet_id is not None: this_reward.gauntlet_id = gauntlet_id @@ -105,17 +115,19 @@ async def v1_gauntletreward_patch( return r_curr else: db.close() - raise DatabaseError(f'Unable to patch gauntlet reward {gauntletreward_id}') + raise DatabaseError(f"Unable to patch gauntlet reward {gauntletreward_id}") -@router.post('') -async def v1_gauntletreward_post(gauntletreward: GauntletRewardList, token: str = Depends(oauth2_scheme)): +@router.post("") +async def v1_gauntletreward_post( + gauntletreward: GauntletRewardList, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post gauntlets. This event has been logged.' + detail="You are not authorized to post gauntlets. This event has been logged.", ) all_rewards = [] @@ -123,17 +135,16 @@ async def v1_gauntletreward_post(gauntletreward: GauntletRewardList, token: str all_rewards.append(x.dict()) with db.atomic(): - for batch in chunked(all_rewards, 15): - GauntletReward.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + upsert_gauntlet_rewards(all_rewards, batch_size=15) db.close() - return f'Inserted {len(all_rewards)} gauntlet rewards' + return f"Inserted {len(all_rewards)} gauntlet rewards" -@router.delete('/{gauntletreward_id}') +@router.delete("/{gauntletreward_id}") async def v1_gauntletreward_delete(gauntletreward_id): if GauntletReward.delete_by_id(gauntletreward_id) == 1: - return f'Deleted gauntlet reward ID {gauntletreward_id}' - - raise DatabaseError(f'Unable to delete gauntlet run {gauntletreward_id}') + return f"Deleted gauntlet reward ID {gauntletreward_id}" + raise DatabaseError(f"Unable to delete gauntlet run {gauntletreward_id}") diff --git a/app/routers_v2/mlbplayers.py b/app/routers_v2/mlbplayers.py index 02a6b01..e013208 100644 --- a/app/routers_v2/mlbplayers.py +++ b/app/routers_v2/mlbplayers.py @@ -7,19 +7,27 @@ import logging import pydantic from pandas import DataFrame -from ..db_engine import db, MlbPlayer, Player, BattingCard, PitchingCard, model_to_dict, fn, chunked, query_to_csv +from ..db_engine import ( + db, + MlbPlayer, + Player, + BattingCard, + PitchingCard, + model_to_dict, + fn, + chunked, + query_to_csv, +) +from ..db_helpers import upsert_mlb_players from ..dependencies import oauth2_scheme, valid_token, LOG_DATA logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/mlbplayers', - tags=['mlbplayers'] -) +router = APIRouter(prefix="/api/v2/mlbplayers", tags=["mlbplayers"]) class PlayerModel(pydantic.BaseModel): @@ -37,35 +45,50 @@ class PlayerList(pydantic.BaseModel): def update_card_urls(mlbplayer: MlbPlayer): - logging.info(f'Updating cards for mlbplayer: {mlbplayer.first_name} {mlbplayer.last_name} ({mlbplayer.key_bbref})') + logging.info( + f"Updating cards for mlbplayer: {mlbplayer.first_name} {mlbplayer.last_name} ({mlbplayer.key_bbref})" + ) now = datetime.datetime.now() - c1_update = Player.update({ - Player.image: Player.image.name.split('?d=')[0] + f'?d={now.year}-{now.month}-{now.day}' - }).where(Player.mlbplayer_id == mlbplayer.id) + c1_update = Player.update( + { + Player.image: Player.image.name.split("?d=")[0] + + f"?d={now.year}-{now.month}-{now.day}" + } + ).where(Player.mlbplayer_id == mlbplayer.id) count = c1_update.execute() - logging.info(f'Updated {count} image1s') + logging.info(f"Updated {count} image1s") - c2_update = Player.update({ - Player.image2: Player.image2.name.split('?d=')[0] + f'{now.year}-{now.month}-{now.day}' - }).where((Player.mlbplayer_id == mlbplayer.id) & (Player.image2.is_null(False))) + c2_update = Player.update( + { + Player.image2: Player.image2.name.split("?d=")[0] + + f"{now.year}-{now.month}-{now.day}" + } + ).where((Player.mlbplayer_id == mlbplayer.id) & (Player.image2.is_null(False))) count2 = c2_update.execute() - logging.info(f'Updated {count2} image2s') + logging.info(f"Updated {count2} image2s") return count + count2 -@router.get('') +@router.get("") async def get_players( - full_name: list = Query(default=None), first_name: list = Query(default=None), - last_name: list = Query(default=None), key_fangraphs: list = Query(default=None), - key_bbref: list = Query(default=None), key_retro: list = Query(default=None), - key_mlbam: list = Query(default=None), offense_col: list = Query(default=None), csv: Optional[bool] = False): + full_name: list = Query(default=None), + first_name: list = Query(default=None), + last_name: list = Query(default=None), + key_fangraphs: list = Query(default=None), + key_bbref: list = Query(default=None), + key_retro: list = Query(default=None), + key_mlbam: list = Query(default=None), + offense_col: list = Query(default=None), + csv: Optional[bool] = False, +): all_players = MlbPlayer.select() if full_name is not None: name_list = [x.lower() for x in full_name] all_players = all_players.where( - fn.lower(MlbPlayer.first_name) + ' ' + fn.lower(MlbPlayer.last_name) << name_list + fn.lower(MlbPlayer.first_name) + " " + fn.lower(MlbPlayer.last_name) + << name_list ) if first_name is not None: name_list = [x.lower() for x in first_name] @@ -89,44 +112,56 @@ async def get_players( if csv: return_val = query_to_csv(all_players) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") - return_val = {'count': all_players.count(), 'players': [ - model_to_dict(x) for x in all_players - ]} + return_val = { + "count": all_players.count(), + "players": [model_to_dict(x) for x in all_players], + } db.close() return return_val -@router.get('/{player_id}') +@router.get("/{player_id}") async def get_one_player(player_id: int): this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id) if this_player is None: db.close() - raise HTTPException(status_code=404, detail=f'MlbPlayer id {player_id} not found') + raise HTTPException( + status_code=404, detail=f"MlbPlayer id {player_id} not found" + ) r_data = model_to_dict(this_player) db.close() return r_data -@router.patch('/{player_id}') +@router.patch("/{player_id}") async def patch_player( - player_id: int, first_name: Optional[str] = None, last_name: Optional[str] = None, - key_fangraphs: Optional[str] = None, key_bbref: Optional[str] = None, key_retro: Optional[str] = None, - key_mlbam: Optional[str] = None, offense_col: Optional[str] = None, token: str = Depends(oauth2_scheme)): + player_id: int, + first_name: Optional[str] = None, + last_name: Optional[str] = None, + key_fangraphs: Optional[str] = None, + key_bbref: Optional[str] = None, + key_retro: Optional[str] = None, + key_mlbam: Optional[str] = None, + offense_col: Optional[str] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch mlb players. This event has been logged.' + detail="You are not authorized to patch mlb players. This event has been logged.", ) this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id) if this_player is None: db.close() - raise HTTPException(status_code=404, detail=f'MlbPlayer id {player_id} not found') + raise HTTPException( + status_code=404, detail=f"MlbPlayer id {player_id} not found" + ) if first_name is not None: this_player.first_name = first_name @@ -151,65 +186,69 @@ async def patch_player( db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that player' + detail="Well slap my ass and call me a teapot; I could not save that player", ) -@router.post('') +@router.post("") async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post mlb players. This event has been logged.' + detail="You are not authorized to post mlb players. This event has been logged.", ) new_players = [] for x in players.players: dupes = MlbPlayer.select().where( - (MlbPlayer.key_fangraphs == x.key_fangraphs) | (MlbPlayer.key_mlbam == x.key_mlbam) | - (MlbPlayer.key_retro == x.key_retro) | (MlbPlayer.key_bbref == x.key_bbref) + (MlbPlayer.key_fangraphs == x.key_fangraphs) + | (MlbPlayer.key_mlbam == x.key_mlbam) + | (MlbPlayer.key_retro == x.key_retro) + | (MlbPlayer.key_bbref == x.key_bbref) ) if dupes.count() > 0: db.close() raise HTTPException( status_code=400, - detail=f'{x.first_name} {x.last_name} has a key already in the database' + detail=f"{x.first_name} {x.last_name} has a key already in the database", ) new_players.append(x.dict()) with db.atomic(): - for batch in chunked(new_players, 15): - MlbPlayer.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + # Note: Duplicate check is already done above, so this is effectively just insert + upsert_mlb_players(new_players, batch_size=15) db.close() - return f'Inserted {len(new_players)} new MLB players' + return f"Inserted {len(new_players)} new MLB players" -@router.post('/one') +@router.post("/one") async def post_one_player(player: PlayerModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post mlb players. This event has been logged.' + detail="You are not authorized to post mlb players. This event has been logged.", ) dupes = MlbPlayer.select().where( - (MlbPlayer.key_fangraphs == player.key_fangraphs) | (MlbPlayer.key_mlbam == player.key_mlbam) | - (MlbPlayer.key_bbref == player.key_bbref) + (MlbPlayer.key_fangraphs == player.key_fangraphs) + | (MlbPlayer.key_mlbam == player.key_mlbam) + | (MlbPlayer.key_bbref == player.key_bbref) ) if dupes.count() > 0: - logging.info(f'POST /mlbplayers/one - dupes found:') + logging.info(f"POST /mlbplayers/one - dupes found:") for x in dupes: - logging.info(f'{x}') + logging.info(f"{x}") db.close() raise HTTPException( status_code=400, - detail=f'{player.first_name} {player.last_name} has a key already in the database' + detail=f"{player.first_name} {player.last_name} has a key already in the database", ) new_player = MlbPlayer(**player.dict()) @@ -221,43 +260,51 @@ async def post_one_player(player: PlayerModel, token: str = Depends(oauth2_schem else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that player' + detail="Well slap my ass and call me a teapot; I could not save that player", ) -@router.delete('/{player_id}') +@router.delete("/{player_id}") async def delete_player(player_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete mlb players. This event has been logged.' + detail="You are not authorized to delete mlb players. This event has been logged.", ) this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id) if this_player is None: db.close() - raise HTTPException(status_code=404, detail=f'MlbPlayer id {player_id} not found') + raise HTTPException( + status_code=404, detail=f"MlbPlayer id {player_id} not found" + ) count = this_player.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Player {player_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Player {player_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Player {player_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Player {player_id} was not deleted" + ) -@router.post('/update-cols') -@router.post('/update-cols/{mlbplayer_id}') -async def update_columns(mlbplayer_id: Optional[int] = None, token: str = Depends(oauth2_scheme)): +@router.post("/update-cols") +@router.post("/update-cols/{mlbplayer_id}") +async def update_columns( + mlbplayer_id: Optional[int] = None, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to update mlb players. This event has been logged.' + detail="You are not authorized to update mlb players. This event has been logged.", ) p_query = MlbPlayer.select() @@ -267,27 +314,34 @@ async def update_columns(mlbplayer_id: Optional[int] = None, token: str = Depend total_count = 0 for x in p_query: all_players = Player.select().where(Player.mlbplayer == x) - bc_update = BattingCard.update({ - BattingCard.offense_col: x.offense_col, - }).where((BattingCard.player << all_players) & (BattingCard.offense_col != x.offense_col)) + bc_update = BattingCard.update( + { + BattingCard.offense_col: x.offense_col, + } + ).where( + (BattingCard.player << all_players) + & (BattingCard.offense_col != x.offense_col) + ) count = bc_update.execute() total_count += count - logging.info(f'Updated {count} batting cards for {x.first_name} {x.last_name}') + logging.info(f"Updated {count} batting cards for {x.first_name} {x.last_name}") update_card_urls(x) db.close() - return f'Updated {total_count} batting cards' + return f"Updated {total_count} batting cards" -@router.post('/update-names') -@router.post('/update-names/{mlbplayer_id}') -async def update_names(mlbplayer_id: Optional[int] = None, token: str = Depends(oauth2_scheme)): +@router.post("/update-names") +@router.post("/update-names/{mlbplayer_id}") +async def update_names( + mlbplayer_id: Optional[int] = None, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to update mlb players. This event has been logged.' + detail="You are not authorized to update mlb players. This event has been logged.", ) p_query = MlbPlayer.select() @@ -296,16 +350,18 @@ async def update_names(mlbplayer_id: Optional[int] = None, token: str = Depends( total_count = 0 for x in p_query: - p_update = Player.update({ - Player.p_name: f'{x.first_name} {x.last_name}' - }).where((Player.mlbplayer == x) & (Player.p_name != f'{x.first_name} {x.last_name}')) + p_update = Player.update( + {Player.p_name: f"{x.first_name} {x.last_name}"} + ).where( + (Player.mlbplayer == x) & (Player.p_name != f"{x.first_name} {x.last_name}") + ) count = p_update.execute() total_count += count - logging.info(f'Update {count} player records for {x.first_name} {x.last_name}') + logging.info(f"Update {count} player records for {x.first_name} {x.last_name}") update_card_urls(x) db.close() - return f'Updated {total_count} names' + return f"Updated {total_count} names" # @router.post('/link-players') diff --git a/app/routers_v2/pitchingcardratings.py b/app/routers_v2/pitchingcardratings.py index 656f256..822f9fc 100644 --- a/app/routers_v2/pitchingcardratings.py +++ b/app/routers_v2/pitchingcardratings.py @@ -8,27 +8,34 @@ import pandas as pd import pydantic from pydantic import validator, root_validator -from ..db_engine import db, PitchingCardRatings, model_to_dict, chunked, PitchingCard, Player, query_to_csv, Team, \ - CardPosition +from ..db_engine import ( + db, + PitchingCardRatings, + model_to_dict, + chunked, + PitchingCard, + Player, + query_to_csv, + Team, + CardPosition, +) +from ..db_helpers import upsert_pitching_card_ratings from ..dependencies import oauth2_scheme, valid_token, LOG_DATA logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/pitchingcardratings', - tags=['pitchingcardratings'] -) -RATINGS_FILE = 'storage/pitching-ratings.csv' -BASIC_FILE = 'storage/pitching-basic.csv' +router = APIRouter(prefix="/api/v2/pitchingcardratings", tags=["pitchingcardratings"]) +RATINGS_FILE = "storage/pitching-ratings.csv" +BASIC_FILE = "storage/pitching-basic.csv" class PitchingCardRatingsModel(pydantic.BaseModel): pitchingcard_id: int - vs_hand: Literal['R', 'L', 'vR', 'vL'] + vs_hand: Literal["R", "L", "vR", "vL"] homerun: float = 0.0 bp_homerun: float = 0.0 triple: float = 0.0 @@ -62,30 +69,69 @@ class PitchingCardRatingsModel(pydantic.BaseModel): @validator("avg", always=True) def avg_validator(cls, v, values, **kwargs): - return (values['homerun'] + values['bp_homerun'] / 2 + values['triple'] + values['double_three'] + - values['double_two'] + values['double_cf'] + values['single_two'] + values['single_one'] + - values['single_center'] + values['bp_single'] / 2) / 108 + return ( + values["homerun"] + + values["bp_homerun"] / 2 + + values["triple"] + + values["double_three"] + + values["double_two"] + + values["double_cf"] + + values["single_two"] + + values["single_one"] + + values["single_center"] + + values["bp_single"] / 2 + ) / 108 @validator("obp", always=True) def obp_validator(cls, v, values, **kwargs): - return ((values['hbp'] + values['walk']) / 108) + values['avg'] + return ((values["hbp"] + values["walk"]) / 108) + values["avg"] @validator("slg", always=True) def slg_validator(cls, v, values, **kwargs): - return (values['homerun'] * 4 + values['bp_homerun'] * 2 + values['triple'] * 3 + values['double_three'] * 2 + - values['double_two'] * 2 + values['double_cf'] * 2 + values['single_two'] + values['single_one'] + - values['single_center'] + values['bp_single'] / 2) / 108 + return ( + values["homerun"] * 4 + + values["bp_homerun"] * 2 + + values["triple"] * 3 + + values["double_three"] * 2 + + values["double_two"] * 2 + + values["double_cf"] * 2 + + values["single_two"] + + values["single_one"] + + values["single_center"] + + values["bp_single"] / 2 + ) / 108 @root_validator(skip_on_failure=True) def validate_chance_total(cls, values): total_chances = ( - values['homerun'] + values['bp_homerun'] + values['triple'] + values['double_three'] + - values['double_two'] + values['double_cf'] + values['single_two'] + values['single_one'] + - values['single_center'] + values['bp_single'] + values['hbp'] + values['walk'] + - values['strikeout'] + values['flyout_lf_b'] + values['flyout_cf_b'] + values['flyout_rf_b'] + - values['groundout_a'] + values['groundout_b'] + values['xcheck_p'] + values['xcheck_c'] + - values['xcheck_1b'] + values['xcheck_2b'] + values['xcheck_3b'] + values['xcheck_ss'] + - values['xcheck_lf'] + values['xcheck_cf'] + values['xcheck_rf']) + values["homerun"] + + values["bp_homerun"] + + values["triple"] + + values["double_three"] + + values["double_two"] + + values["double_cf"] + + values["single_two"] + + values["single_one"] + + values["single_center"] + + values["bp_single"] + + values["hbp"] + + values["walk"] + + values["strikeout"] + + values["flyout_lf_b"] + + values["flyout_cf_b"] + + values["flyout_rf_b"] + + values["groundout_a"] + + values["groundout_b"] + + values["xcheck_p"] + + values["xcheck_c"] + + values["xcheck_1b"] + + values["xcheck_2b"] + + values["xcheck_3b"] + + values["xcheck_ss"] + + values["xcheck_lf"] + + values["xcheck_cf"] + + values["xcheck_rf"] + ) if round(total_chances) != 108: raise ValueError("Must have exactly 108 chances on the card") @@ -96,39 +142,51 @@ class RatingsList(pydantic.BaseModel): ratings: List[PitchingCardRatingsModel] -@router.get('') +@router.get("") async def get_card_ratings( - pitchingcard_id: list = Query(default=None), vs_hand: Literal['R', 'L', 'vR', 'vL'] = None, - short_output: bool = False, csv: bool = False, cardset_id: list = Query(default=None), - token: str = Depends(oauth2_scheme)): + pitchingcard_id: list = Query(default=None), + vs_hand: Literal["R", "L", "vR", "vL"] = None, + short_output: bool = False, + csv: bool = False, + cardset_id: list = Query(default=None), + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to pull card ratings.' + status_code=401, detail="You are not authorized to pull card ratings." ) all_ratings = PitchingCardRatings.select() if pitchingcard_id is not None: - all_ratings = all_ratings.where(PitchingCardRatings.pitchingcard_id << pitchingcard_id) + all_ratings = all_ratings.where( + PitchingCardRatings.pitchingcard_id << pitchingcard_id + ) if vs_hand is not None: all_ratings = all_ratings.where(PitchingCardRatings.vs_hand == vs_hand[-1]) if cardset_id is not None: - set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id) - set_cards = PitchingCard.select(PitchingCard.id).where(PitchingCard.player << set_players) + set_players = Player.select(Player.player_id).where( + Player.cardset_id << cardset_id + ) + set_cards = PitchingCard.select(PitchingCard.id).where( + PitchingCard.player << set_players + ) all_ratings = all_ratings.where(PitchingCardRatings.pitchingcard << set_cards) if csv: return_val = query_to_csv(all_ratings) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_ratings.count(), 'ratings': [ - model_to_dict(x, recurse=not short_output) for x in all_ratings - ]} + return_val = { + "count": all_ratings.count(), + "ratings": [ + model_to_dict(x, recurse=not short_output) for x in all_ratings + ], + } db.close() return return_val @@ -136,282 +194,358 @@ async def get_card_ratings( def get_scouting_dfs(cardset_id: list = None): all_ratings = PitchingCardRatings.select() if cardset_id is not None: - set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id) - set_cards = PitchingCard.select(PitchingCard.id).where(PitchingCard.player << set_players) + set_players = Player.select(Player.player_id).where( + Player.cardset_id << cardset_id + ) + set_cards = PitchingCard.select(PitchingCard.id).where( + PitchingCard.player << set_players + ) all_ratings = all_ratings.where(PitchingCardRatings.pitchingcard << set_cards) - vl_query = all_ratings.where(PitchingCardRatings.vs_hand == 'L') - vr_query = all_ratings.where(PitchingCardRatings.vs_hand == 'R') + vl_query = all_ratings.where(PitchingCardRatings.vs_hand == "L") + vr_query = all_ratings.where(PitchingCardRatings.vs_hand == "R") vl_vals = [model_to_dict(x) for x in vl_query] for x in vl_vals: - x.update(x['pitchingcard']) - x['player_id'] = x['pitchingcard']['player']['player_id'] - x['player_name'] = x['pitchingcard']['player']['p_name'] - x['rarity'] = x['pitchingcard']['player']['rarity']['name'] - x['cardset_id'] = x['pitchingcard']['player']['cardset']['id'] - x['cardset_name'] = x['pitchingcard']['player']['cardset']['name'] - x['starter_rating'] = x['pitchingcard']['starter_rating'] - x['relief_rating'] = x['pitchingcard']['relief_rating'] - x['closer_rating'] = x['pitchingcard']['closer_rating'] - del x['pitchingcard'], x['player'] + x.update(x["pitchingcard"]) + x["player_id"] = x["pitchingcard"]["player"]["player_id"] + x["player_name"] = x["pitchingcard"]["player"]["p_name"] + x["rarity"] = x["pitchingcard"]["player"]["rarity"]["name"] + x["cardset_id"] = x["pitchingcard"]["player"]["cardset"]["id"] + x["cardset_name"] = x["pitchingcard"]["player"]["cardset"]["name"] + x["starter_rating"] = x["pitchingcard"]["starter_rating"] + x["relief_rating"] = x["pitchingcard"]["relief_rating"] + x["closer_rating"] = x["pitchingcard"]["closer_rating"] + del x["pitchingcard"], x["player"] vr_vals = [model_to_dict(x) for x in vr_query] for x in vr_vals: - x['player_id'] = x['pitchingcard']['player']['player_id'] - del x['pitchingcard'] + x["player_id"] = x["pitchingcard"]["player"]["player_id"] + del x["pitchingcard"] vl = pd.DataFrame(vl_vals) vr = pd.DataFrame(vr_vals) - pit_df = pd.merge(vl, vr, on='player_id', suffixes=('_vl', '_vr')).set_index('player_id', drop=False) - logging.debug(f'pit_df: {pit_df}') + pit_df = pd.merge(vl, vr, on="player_id", suffixes=("_vl", "_vr")).set_index( + "player_id", drop=False + ) + logging.debug(f"pit_df: {pit_df}") - positions = CardPosition.select().where(CardPosition.position == 'P') + positions = CardPosition.select().where(CardPosition.position == "P") if cardset_id is not None: - set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id) + set_players = Player.select(Player.player_id).where( + Player.cardset_id << cardset_id + ) positions = positions.where(CardPosition.player << set_players) - series_list = [pd.Series( - dict([(x.player.player_id, x.range) for x in positions]), - name=f'Range P' - ), pd.Series( - dict([(x.player.player_id, x.error) for x in positions]), - name=f'Error P' - )] + series_list = [ + pd.Series( + dict([(x.player.player_id, x.range) for x in positions]), name=f"Range P" + ), + pd.Series( + dict([(x.player.player_id, x.error) for x in positions]), name=f"Error P" + ), + ] db.close() - logging.debug(f'series_list: {series_list}') + logging.debug(f"series_list: {series_list}") return pit_df.join(series_list) -@router.get('/scouting') +@router.get("/scouting") async def get_card_scouting(team_id: int, ts: str): this_team = Team.get_or_none(Team.id == team_id) - logging.debug(f'Team: {this_team} / has_guide: {this_team.has_guide}') + logging.debug(f"Team: {this_team} / has_guide: {this_team.has_guide}") if this_team is None or ts != this_team.team_hash() or this_team.has_guide != 1: - logging.warning(f'Team_id {team_id} attempted to pull ratings') + logging.warning(f"Team_id {team_id} attempted to pull ratings") db.close() - return 'Your team does not have the ratings guide enabled. If you have purchased a copy ping Cal to ' \ - 'make sure it is enabled on your team. If you are interested you can pick it up here (thank you!): ' \ - 'https://ko-fi.com/manticorum/shop' + return ( + "Your team does not have the ratings guide enabled. If you have purchased a copy ping Cal to " + "make sure it is enabled on your team. If you are interested you can pick it up here (thank you!): " + "https://ko-fi.com/manticorum/shop" + ) if os.path.isfile(RATINGS_FILE): return FileResponse( path=RATINGS_FILE, - media_type='text/csv', + media_type="text/csv", # headers=headers ) - raise HTTPException(status_code=400, detail='Go pester Cal - the scouting file is missing') + raise HTTPException( + status_code=400, detail="Go pester Cal - the scouting file is missing" + ) -@router.post('/calculate/scouting') +@router.post("/calculate/scouting") async def post_calc_scouting(token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to calculate card ratings.' + status_code=401, detail="You are not authorized to calculate card ratings." ) - logging.warning(f'Re-calculating pitching ratings\n\n') + logging.warning(f"Re-calculating pitching ratings\n\n") output = get_scouting_dfs() - first = ['player_id', 'player_name', 'cardset_name', 'rarity', 'hand', 'variant'] - exclude = first + ['id_vl', 'id_vr', 'vs_hand_vl', 'vs_hand_vr'] + first = ["player_id", "player_name", "cardset_name", "rarity", "hand", "variant"] + exclude = first + ["id_vl", "id_vr", "vs_hand_vl", "vs_hand_vr"] output = output[first + [col for col in output.columns if col not in exclude]] csv_file = pd.DataFrame(output).to_csv(index=False) - with open(RATINGS_FILE, 'w') as file: + with open(RATINGS_FILE, "w") as file: file.write(csv_file) - return Response(content=csv_file, media_type='text/csv') + return Response(content=csv_file, media_type="text/csv") -@router.get('/basic') +@router.get("/basic") async def get_basic_scouting(): if os.path.isfile(BASIC_FILE): return FileResponse( path=BASIC_FILE, - media_type='text/csv', + media_type="text/csv", # headers=headers ) - raise HTTPException(status_code=400, detail='Go pester Cal - the scouting file is missing') + raise HTTPException( + status_code=400, detail="Go pester Cal - the scouting file is missing" + ) -@router.post('/calculate/basic') +@router.post("/calculate/basic") async def post_calc_basic(token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to calculate basic ratings.' + status_code=401, detail="You are not authorized to calculate basic ratings." ) - logging.warning(f'Re-calculating basic pitching ratings\n\n') + logging.warning(f"Re-calculating basic pitching ratings\n\n") raw_data = get_scouting_dfs() - logging.debug(f'output: {raw_data}') + logging.debug(f"output: {raw_data}") def get_raw_leftcontrol(df_data): - return ((1 - (df_data['obp_vl'] - df_data['avg_vl'])) * 100) + (1 - (df_data['wild_pitch'] / 20)) + return ((1 - (df_data["obp_vl"] - df_data["avg_vl"])) * 100) + ( + 1 - (df_data["wild_pitch"] / 20) + ) raw_series = raw_data.apply(get_raw_leftcontrol, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Control L'] = round(rank_series * 100) + raw_data["Control L"] = round(rank_series * 100) def get_raw_rightcontrol(df_data): - return ((1 - (df_data['obp_vr'] - df_data['avg_vr'])) * 100) + (1 - (df_data['wild_pitch'] / 20)) + return ((1 - (df_data["obp_vr"] - df_data["avg_vr"])) * 100) + ( + 1 - (df_data["wild_pitch"] / 20) + ) raw_series = raw_data.apply(get_raw_rightcontrol, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Control R'] = round(rank_series * 100) + raw_data["Control R"] = round(rank_series * 100) def get_raw_leftstuff(df_data): - return 10 - (df_data['slg_vl'] + df_data['slg_vl'] + ((df_data['homerun_vl'] + df_data['bp_homerun_vl']) / 108)) + return 10 - ( + df_data["slg_vl"] + + df_data["slg_vl"] + + ((df_data["homerun_vl"] + df_data["bp_homerun_vl"]) / 108) + ) raw_series = raw_data.apply(get_raw_leftstuff, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Stuff L'] = round(rank_series * 100) + raw_data["Stuff L"] = round(rank_series * 100) def get_raw_rightstuff(df_data): - return 10 - (df_data['slg_vr'] + df_data['slg_vr'] + ((df_data['homerun_vr'] + df_data['bp_homerun_vr']) / 108)) + return 10 - ( + df_data["slg_vr"] + + df_data["slg_vr"] + + ((df_data["homerun_vr"] + df_data["bp_homerun_vr"]) / 108) + ) raw_series = raw_data.apply(get_raw_rightstuff, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Stuff R'] = round(rank_series * 100) + raw_data["Stuff R"] = round(rank_series * 100) def get_raw_fielding(df_data): - return ((6 - df_data['Range P']) * 10) + (50 - df_data['Error P']) + return ((6 - df_data["Range P"]) * 10) + (50 - df_data["Error P"]) raw_series = raw_data.apply(get_raw_fielding, axis=1) rank_series = raw_series.rank(pct=True) - logging.debug(f'max fld: {raw_series.max()} / min fld: {raw_series.min()}') - raw_data['Fielding'] = round(rank_series * 100) + logging.debug(f"max fld: {raw_series.max()} / min fld: {raw_series.min()}") + raw_data["Fielding"] = round(rank_series * 100) def get_raw_stamina(df_data): - spow = df_data['starter_rating'] if pd.isna(df_data['starter_rating']) else -1 - rpow = df_data['relief_rating'] if pd.isna(df_data['relief_rating']) else -1 + spow = df_data["starter_rating"] if pd.isna(df_data["starter_rating"]) else -1 + rpow = df_data["relief_rating"] if pd.isna(df_data["relief_rating"]) else -1 this_pow = spow if spow > rpow else rpow - return (((this_pow * (df_data['obp_vr'] * (2 / 3))) + (this_pow * (df_data['obp_vl'] / 3))) * 4.5) + this_pow + return ( + ( + (this_pow * (df_data["obp_vr"] * (2 / 3))) + + (this_pow * (df_data["obp_vl"] / 3)) + ) + * 4.5 + ) + this_pow raw_series = raw_data.apply(get_raw_stamina, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Stamina'] = round(rank_series * 100) + raw_data["Stamina"] = round(rank_series * 100) def get_raw_hit(df_data): - return 1 - (df_data['avg_vr'] * (2 / 3)) + (df_data['avg_vl'] / 3) + return 1 - (df_data["avg_vr"] * (2 / 3)) + (df_data["avg_vl"] / 3) raw_series = raw_data.apply(get_raw_hit, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['H/9'] = round(rank_series * 100) + raw_data["H/9"] = round(rank_series * 100) def get_raw_k(df_data): - return ((df_data['strikeout_vr'] / 108) * (2 / 3)) + ((df_data['strikeout_vl'] / 108) / 3) + return ((df_data["strikeout_vr"] / 108) * (2 / 3)) + ( + (df_data["strikeout_vl"] / 108) / 3 + ) raw_series = raw_data.apply(get_raw_k, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['K/9'] = round(rank_series * 100) + raw_data["K/9"] = round(rank_series * 100) def get_raw_bb(df_data): - return ((df_data['walk_vr'] / 108) * (2 / 3)) + ((df_data['walk_vl'] / 108) / 3) + return ((df_data["walk_vr"] / 108) * (2 / 3)) + ((df_data["walk_vl"] / 108) / 3) raw_series = raw_data.apply(get_raw_bb, axis=1) rank_series = raw_series.rank(pct=True, ascending=False) - raw_data['BB/9'] = round(rank_series * 100) + raw_data["BB/9"] = round(rank_series * 100) def get_raw_hr(df_data): return 1 - ( - (((df_data['homerun_vr'] + df_data['bp_homerun_vr']) / 108) * (2 / 3)) + - (((df_data['homerun_vl'] + df_data['bp_homerun_vl']) / 108) / 3)) + (((df_data["homerun_vr"] + df_data["bp_homerun_vr"]) / 108) * (2 / 3)) + + (((df_data["homerun_vl"] + df_data["bp_homerun_vl"]) / 108) / 3) + ) raw_series = raw_data.apply(get_raw_hr, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['HR/9'] = round(rank_series * 100) + raw_data["HR/9"] = round(rank_series * 100) def get_raw_rating(df_data): - spow = df_data['starter_rating'] if pd.isna(df_data['starter_rating']) else -1 - rpow = df_data['relief_rating'] if pd.isna(df_data['relief_rating']) else -1 + spow = df_data["starter_rating"] if pd.isna(df_data["starter_rating"]) else -1 + rpow = df_data["relief_rating"] if pd.isna(df_data["relief_rating"]) else -1 if spow > rpow and spow >= 4: return ( - ((df_data['H/9'] + df_data['K/9'] + df_data['BB/9'] + df_data['HR/9']) * 5) + - (df_data['Fielding']) + (df_data['Stamina'] * 5) + - (((df_data['Stuff L'] / 3) + (df_data['Stuff R'] * (2 / 3))) * 4) + - (((df_data['Control L'] / 3) + (df_data['Control R'] * (2 / 3))) * 2) + ( + ( + df_data["H/9"] + + df_data["K/9"] + + df_data["BB/9"] + + df_data["HR/9"] + ) + * 5 + ) + + (df_data["Fielding"]) + + (df_data["Stamina"] * 5) + + (((df_data["Stuff L"] / 3) + (df_data["Stuff R"] * (2 / 3))) * 4) + + (((df_data["Control L"] / 3) + (df_data["Control R"] * (2 / 3))) * 2) ) else: return ( - ((df_data['H/9'] + df_data['K/9'] + df_data['BB/9'] + df_data['HR/9']) * 5) + - (df_data['Fielding']) + (df_data['Stamina'] * 5) + - (((df_data['Stuff L'] / 3) + (df_data['Stuff R'] * (2 / 3))) * 4) + - (((df_data['Control L'] / 3) + (df_data['Control R'] * (2 / 3))) * 2) + ( + ( + df_data["H/9"] + + df_data["K/9"] + + df_data["BB/9"] + + df_data["HR/9"] + ) + * 5 + ) + + (df_data["Fielding"]) + + (df_data["Stamina"] * 5) + + (((df_data["Stuff L"] / 3) + (df_data["Stuff R"] * (2 / 3))) * 4) + + (((df_data["Control L"] / 3) + (df_data["Control R"] * (2 / 3))) * 2) ) raw_series = raw_data.apply(get_raw_rating, axis=1) rank_series = raw_series.rank(pct=True) - raw_data['Rating'] = round(rank_series * 100) + raw_data["Rating"] = round(rank_series * 100) - output = raw_data[[ - 'player_id', 'player_name', 'Rating', 'Control R', 'Control L', 'Stuff R', 'Stuff L', 'Stamina', 'Fielding', - 'H/9', 'K/9', 'BB/9', 'HR/9', 'hand', 'cardset_name' - ]] + output = raw_data[ + [ + "player_id", + "player_name", + "Rating", + "Control R", + "Control L", + "Stuff R", + "Stuff L", + "Stamina", + "Fielding", + "H/9", + "K/9", + "BB/9", + "HR/9", + "hand", + "cardset_name", + ] + ] csv_file = pd.DataFrame(output).to_csv(index=False) - with open(BASIC_FILE, 'w') as file: + with open(BASIC_FILE, "w") as file: file.write(csv_file) - return Response(content=csv_file, media_type='text/csv') + return Response(content=csv_file, media_type="text/csv") -@router.get('/{ratings_id}') +@router.get("/{ratings_id}") async def get_one_rating(ratings_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to pull card ratings.' + status_code=401, detail="You are not authorized to pull card ratings." ) this_rating = PitchingCardRatings.get_or_none(PitchingCardRatings.id == ratings_id) if this_rating is None: db.close() - raise HTTPException(status_code=404, detail=f'PitchingCardRating id {ratings_id} not found') + raise HTTPException( + status_code=404, detail=f"PitchingCardRating id {ratings_id} not found" + ) r_data = model_to_dict(this_rating) db.close() return r_data -@router.get('/player/{player_id}') -async def get_player_ratings(player_id: int, variant: list = Query(default=None), short_output: bool = False): - all_cards = PitchingCard.select().where(PitchingCard.player_id == player_id).order_by(PitchingCard.variant) +@router.get("/player/{player_id}") +async def get_player_ratings( + player_id: int, variant: list = Query(default=None), short_output: bool = False +): + all_cards = ( + PitchingCard.select() + .where(PitchingCard.player_id == player_id) + .order_by(PitchingCard.variant) + ) if variant is not None: all_cards = all_cards.where(PitchingCard.variant << variant) - all_ratings = PitchingCardRatings.select().where(PitchingCardRatings.pitchingcard << all_cards) + all_ratings = PitchingCardRatings.select().where( + PitchingCardRatings.pitchingcard << all_cards + ) - return_val = {'count': all_ratings.count(), 'ratings': [ - model_to_dict(x, recurse=not short_output) for x in all_ratings - ]} + return_val = { + "count": all_ratings.count(), + "ratings": [model_to_dict(x, recurse=not short_output) for x in all_ratings], + } db.close() return return_val -@router.put('') +@router.put("") async def put_ratings(ratings: RatingsList, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to post card ratings.' + status_code=401, detail="You are not authorized to post card ratings." ) new_ratings = [] @@ -419,43 +553,50 @@ async def put_ratings(ratings: RatingsList, token: str = Depends(oauth2_scheme)) for x in ratings.ratings: try: PitchingCardRatings.get( - (PitchingCardRatings.pitchingcard_id == x.pitchingcard_id) & (PitchingCardRatings.vs_hand == x.vs_hand) + (PitchingCardRatings.pitchingcard_id == x.pitchingcard_id) + & (PitchingCardRatings.vs_hand == x.vs_hand) + ) + updates += ( + PitchingCardRatings.update(x.dict()) + .where( + (PitchingCardRatings.pitchingcard_id == x.pitchingcard_id) + & (PitchingCardRatings.vs_hand == x.vs_hand) + ) + .execute() ) - updates += PitchingCardRatings.update(x.dict()).where( - (PitchingCardRatings.pitchingcard_id == x.pitchingcard_id) & (PitchingCardRatings.vs_hand == x.vs_hand) - ).execute() except PitchingCardRatings.DoesNotExist: new_ratings.append(x.dict()) with db.atomic(): - for batch in chunked(new_ratings, 30): - PitchingCardRatings.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + upsert_pitching_card_ratings(new_ratings, batch_size=30) db.close() - return f'Updated ratings: {updates}; new ratings: {len(new_ratings)}' + return f"Updated ratings: {updates}; new ratings: {len(new_ratings)}" -@router.delete('/{ratings_id}') -async def delete_rating( - ratings_id: int, token: str = Depends(oauth2_scheme)): +@router.delete("/{ratings_id}") +async def delete_rating(ratings_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( - status_code=401, - detail='You are not authorized to post card ratings.' + status_code=401, detail="You are not authorized to post card ratings." ) this_rating = PitchingCardRatings.get_or_none(PitchingCardRatings.id == ratings_id) if this_rating is None: db.close() - raise HTTPException(status_code=404, detail=f'PitchingCardRating id {ratings_id} not found') + raise HTTPException( + status_code=404, detail=f"PitchingCardRating id {ratings_id} not found" + ) count = this_rating.delete_instance() db.close() if count == 1: - return f'Rating {this_rating} has been deleted' + return f"Rating {this_rating} has been deleted" else: - raise HTTPException(status_code=500, detail=f'Rating {this_rating} could not be deleted') - + raise HTTPException( + status_code=500, detail=f"Rating {this_rating} could not be deleted" + ) diff --git a/app/routers_v2/pitchingcards.py b/app/routers_v2/pitchingcards.py index 77afd6c..784f347 100644 --- a/app/routers_v2/pitchingcards.py +++ b/app/routers_v2/pitchingcards.py @@ -6,18 +6,16 @@ import logging import pydantic from ..db_engine import db, PitchingCard, model_to_dict, chunked, Player, fn, MlbPlayer +from ..db_helpers import upsert_pitching_cards from ..dependencies import oauth2_scheme, valid_token, LOG_DATA logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/pitchingcards', - tags=['pitchingcards'] -) +router = APIRouter(prefix="/api/v2/pitchingcards", tags=["pitchingcards"]) class PitchingCardModel(pydantic.BaseModel): @@ -31,17 +29,21 @@ class PitchingCardModel(pydantic.BaseModel): closer_rating: int = None batting: str = "#1WR-C" offense_col: int = None - hand: Literal['R', 'L', 'S'] = 'R' + hand: Literal["R", "L", "S"] = "R" class PitchingCardList(pydantic.BaseModel): cards: List[PitchingCardModel] -@router.get('') +@router.get("") async def get_pitching_cards( - player_id: list = Query(default=None), player_name: list = Query(default=None), - cardset_id: list = Query(default=None), short_output: bool = False, limit: Optional[int] = None): + player_id: list = Query(default=None), + player_name: list = Query(default=None), + cardset_id: list = Query(default=None), + short_output: bool = False, + limit: Optional[int] = None, +): all_cards = PitchingCard.select() if player_id is not None: all_cards = all_cards.where(PitchingCard.player_id << player_id) @@ -56,46 +58,56 @@ async def get_pitching_cards( if limit is not None: all_cards = all_cards.limit(limit) - return_val = {'count': all_cards.count(), 'cards': [ - model_to_dict(x, recurse=not short_output) for x in all_cards - ]} + return_val = { + "count": all_cards.count(), + "cards": [model_to_dict(x, recurse=not short_output) for x in all_cards], + } db.close() return return_val -@router.get('/{card_id}') +@router.get("/{card_id}") async def get_one_card(card_id: int): this_card = PitchingCard.get_or_none(PitchingCard.id == card_id) if this_card is None: db.close() - raise HTTPException(status_code=404, detail=f'PitchingCard id {card_id} not found') + raise HTTPException( + status_code=404, detail=f"PitchingCard id {card_id} not found" + ) r_card = model_to_dict(this_card) db.close() return r_card -@router.get('/player/{player_id}') -async def get_player_cards(player_id: int, variant: list = Query(default=None), short_output: bool = False): - all_cards = PitchingCard.select().where(PitchingCard.player_id == player_id).order_by(PitchingCard.variant) +@router.get("/player/{player_id}") +async def get_player_cards( + player_id: int, variant: list = Query(default=None), short_output: bool = False +): + all_cards = ( + PitchingCard.select() + .where(PitchingCard.player_id == player_id) + .order_by(PitchingCard.variant) + ) if variant is not None: all_cards = all_cards.where(PitchingCard.variant << variant) - return_val = {'count': all_cards.count(), 'cards': [ - model_to_dict(x, recurse=not short_output) for x in all_cards - ]} + return_val = { + "count": all_cards.count(), + "cards": [model_to_dict(x, recurse=not short_output) for x in all_cards], + } db.close() return return_val -@router.put('') +@router.put("") async def put_cards(cards: PitchingCardList, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post pitching cards. This event has been logged.' + detail="You are not authorized to post pitching cards. This event has been logged.", ) new_cards = [] @@ -104,52 +116,73 @@ async def put_cards(cards: PitchingCardList, token: str = Depends(oauth2_scheme) for x in cards.cards: try: old = PitchingCard.get( - (PitchingCard.player_id == x.player_id) & (PitchingCard.variant == x.variant) + (PitchingCard.player_id == x.player_id) + & (PitchingCard.variant == x.variant) ) if x.offense_col is None: x.offense_col = old.offense_col - updates += PitchingCard.update(x.dict()).where( - (PitchingCard.player_id == x.player_id) & (PitchingCard.variant == x.variant) - ).execute() + updates += ( + PitchingCard.update(x.dict()) + .where( + (PitchingCard.player_id == x.player_id) + & (PitchingCard.variant == x.variant) + ) + .execute() + ) except PitchingCard.DoesNotExist: if x.offense_col is None: this_player = Player.get_or_none(Player.player_id == x.player_id) - mlb_player = MlbPlayer.get_or_none(MlbPlayer.key_bbref == this_player.bbref_id) + mlb_player = MlbPlayer.get_or_none( + MlbPlayer.key_bbref == this_player.bbref_id + ) if mlb_player is not None: - logging.info(f'setting offense_col to {mlb_player.offense_col} for {this_player.p_name}') + logging.info( + f"setting offense_col to {mlb_player.offense_col} for {this_player.p_name}" + ) x.offense_col = mlb_player.offense_col else: - logging.info(f'randomly setting offense_col for {this_player.p_name}') + logging.info( + f"randomly setting offense_col for {this_player.p_name}" + ) x.offense_col = random.randint(1, 3) - logging.debug(f'x.dict(): {x.dict()}') + logging.debug(f"x.dict(): {x.dict()}") new_cards.append(x.dict()) with db.atomic(): - for batch in chunked(new_cards, 30): - PitchingCard.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + upsert_pitching_cards(new_cards, batch_size=30) db.close() - return f'Updated cards: {updates}; new cards: {len(new_cards)}' + return f"Updated cards: {updates}; new cards: {len(new_cards)}" -@router.patch('/{card_id}') +@router.patch("/{card_id}") async def patch_card( - card_id: int, balk: Optional[int] = None, wild_pitch: Optional[int] = None, hold: Optional[int] = None, - starter_rating: Optional[int] = None, relief_rating: Optional[int] = None, closer_rating: Optional[int] = None, - batting: Optional[int] = None, token: str = Depends(oauth2_scheme)): + card_id: int, + balk: Optional[int] = None, + wild_pitch: Optional[int] = None, + hold: Optional[int] = None, + starter_rating: Optional[int] = None, + relief_rating: Optional[int] = None, + closer_rating: Optional[int] = None, + batting: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch pitching cards. This event has been logged.' + detail="You are not authorized to patch pitching cards. This event has been logged.", ) this_card = PitchingCard.get_or_none(PitchingCard.id == card_id) if this_card is None: db.close() - raise HTTPException(status_code=404, detail=f'PitchingCard id {card_id} not found') + raise HTTPException( + status_code=404, detail=f"PitchingCard id {card_id} not found" + ) if balk is not None: this_card.balk = balk @@ -174,45 +207,47 @@ async def patch_card( db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that card' + detail="Well slap my ass and call me a teapot; I could not save that card", ) -@router.delete('/{card_id}') +@router.delete("/{card_id}") async def delete_card(card_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete pitching cards. This event has been logged.' + detail="You are not authorized to delete pitching cards. This event has been logged.", ) this_card = PitchingCard.get_or_none(PitchingCard.id == card_id) if this_card is None: db.close() - raise HTTPException(status_code=404, detail=f'Pitching id {card_id} not found') + raise HTTPException(status_code=404, detail=f"Pitching id {card_id} not found") count = this_card.delete_instance() db.close() if count == 1: - return f'Card {this_card} has been deleted' + return f"Card {this_card} has been deleted" else: - raise HTTPException(status_code=500, detail=f'Card {this_card} could not be deleted') + raise HTTPException( + status_code=500, detail=f"Card {this_card} could not be deleted" + ) -@router.delete('') +@router.delete("") async def delete_all_cards(token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete pitching cards. This event has been logged.' + detail="You are not authorized to delete pitching cards. This event has been logged.", ) d_query = PitchingCard.delete() d_query.execute() - return f'Deleted {d_query.count()} pitching cards' + return f"Deleted {d_query.count()} pitching cards" diff --git a/app/routers_v2/players.py b/app/routers_v2/players.py index bd81765..13dca54 100644 --- a/app/routers_v2/players.py +++ b/app/routers_v2/players.py @@ -14,44 +14,59 @@ from pandas import DataFrame from playwright.async_api import async_playwright from ..card_creation import get_batter_card_data, get_pitcher_card_data -from ..db_engine import db, Player, model_to_dict, fn, chunked, Paperdex, Cardset, Rarity, BattingCard, \ - BattingCardRatings, PitchingCard, PitchingCardRatings, CardPosition, MlbPlayer +from ..db_engine import ( + db, + Player, + model_to_dict, + fn, + chunked, + Paperdex, + Cardset, + Rarity, + BattingCard, + BattingCardRatings, + PitchingCard, + PitchingCardRatings, + CardPosition, + MlbPlayer, +) +from ..db_helpers import upsert_players from ..dependencies import oauth2_scheme, valid_token, LOG_DATA # Franchise normalization: Convert city+team names to city-agnostic team names # This enables cross-era player matching (e.g., 'Oakland Athletics' -> 'Athletics') FRANCHISE_NORMALIZE = { - 'Arizona Diamondbacks': 'Diamondbacks', - 'Atlanta Braves': 'Braves', - 'Baltimore Orioles': 'Orioles', - 'Boston Red Sox': 'Red Sox', - 'Chicago Cubs': 'Cubs', - 'Chicago White Sox': 'White Sox', - 'Cincinnati Reds': 'Reds', - 'Cleveland Guardians': 'Guardians', - 'Colorado Rockies': 'Rockies', - 'Detroit Tigers': 'Tigers', - 'Houston Astros': 'Astros', - 'Kansas City Royals': 'Royals', - 'Los Angeles Angels': 'Angels', - 'Los Angeles Dodgers': 'Dodgers', - 'Miami Marlins': 'Marlins', - 'Milwaukee Brewers': 'Brewers', - 'Minnesota Twins': 'Twins', - 'New York Mets': 'Mets', - 'New York Yankees': 'Yankees', - 'Oakland Athletics': 'Athletics', - 'Philadelphia Phillies': 'Phillies', - 'Pittsburgh Pirates': 'Pirates', - 'San Diego Padres': 'Padres', - 'San Francisco Giants': 'Giants', - 'Seattle Mariners': 'Mariners', - 'St Louis Cardinals': 'Cardinals', - 'St. Louis Cardinals': 'Cardinals', - 'Tampa Bay Rays': 'Rays', - 'Texas Rangers': 'Rangers', - 'Toronto Blue Jays': 'Blue Jays', - 'Washington Nationals': 'Nationals', + "Arizona Diamondbacks": "Diamondbacks", + "Atlanta Braves": "Braves", + "Baltimore Orioles": "Orioles", + "Boston Red Sox": "Red Sox", + "Chicago Cubs": "Cubs", + "Chicago White Sox": "White Sox", + "Cincinnati Reds": "Reds", + "Cleveland Guardians": "Guardians", + "Colorado Rockies": "Rockies", + "Detroit Tigers": "Tigers", + "Houston Astros": "Astros", + "Kansas City Royals": "Royals", + "Los Angeles Angels": "Angels", + "Los Angeles Dodgers": "Dodgers", + "Miami Marlins": "Marlins", + "Milwaukee Brewers": "Brewers", + "Minnesota Twins": "Twins", + "New York Mets": "Mets", + "New York Yankees": "Yankees", + "Oakland Athletics": "Athletics", + "Philadelphia Phillies": "Phillies", + "Pittsburgh Pirates": "Pirates", + "San Diego Padres": "Padres", + "San Francisco Giants": "Giants", + "Seattle Mariners": "Mariners", + "St Louis Cardinals": "Cardinals", + "St. Louis Cardinals": "Cardinals", + "Tampa Bay Rays": "Rays", + "Texas Rangers": "Rangers", + "Toronto Blue Jays": "Blue Jays", + "Washington Nationals": "Nationals", } @@ -62,15 +77,12 @@ def normalize_franchise(franchise: str) -> str: logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/players', - tags=['players'] -) +router = APIRouter(prefix="/api/v2/players", tags=["players"]) templates = Jinja2Templates(directory="storage/templates") @@ -109,21 +121,39 @@ class PlayerModel(pydantic.BaseModel): players: List[PlayerPydantic] -@router.get('') +@router.get("") async def get_players( - name: Optional[str] = None, value: Optional[int] = None, min_cost: Optional[int] = None, - max_cost: Optional[int] = None, has_image2: Optional[bool] = None, mlbclub: Optional[str] = None, - franchise: Optional[str] = None, cardset_id: list = Query(default=None), rarity_id: list = Query(default=None), - pos_include: list = Query(default=None), pos_exclude: list = Query(default=None), has_headshot: Optional[bool] = None, - has_vanity_card: Optional[bool] = None, strat_code: Optional[str] = None, bbref_id: Optional[str] = None, - fangr_id: Optional[str] = None, inc_dex: Optional[bool] = True, in_desc: Optional[str] = None, - flat: Optional[bool] = False, sort_by: Optional[str] = False, cardset_id_exclude: list = Query(default=None), - limit: Optional[int] = None, csv: Optional[bool] = None, short_output: Optional[bool] = False, mlbplayer_id: Optional[int] = None, - inc_keys: Optional[bool] = False): + name: Optional[str] = None, + value: Optional[int] = None, + min_cost: Optional[int] = None, + max_cost: Optional[int] = None, + has_image2: Optional[bool] = None, + mlbclub: Optional[str] = None, + franchise: Optional[str] = None, + cardset_id: list = Query(default=None), + rarity_id: list = Query(default=None), + pos_include: list = Query(default=None), + pos_exclude: list = Query(default=None), + has_headshot: Optional[bool] = None, + has_vanity_card: Optional[bool] = None, + strat_code: Optional[str] = None, + bbref_id: Optional[str] = None, + fangr_id: Optional[str] = None, + inc_dex: Optional[bool] = True, + in_desc: Optional[str] = None, + flat: Optional[bool] = False, + sort_by: Optional[str] = False, + cardset_id_exclude: list = Query(default=None), + limit: Optional[int] = None, + csv: Optional[bool] = None, + short_output: Optional[bool] = False, + mlbplayer_id: Optional[int] = None, + inc_keys: Optional[bool] = False, +): all_players = Player.select() if all_players.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'There are no players to filter') + raise HTTPException(status_code=404, detail=f"There are no players to filter") if name is not None: all_players = all_players.where(fn.Lower(Player.p_name) == name.lower()) @@ -148,8 +178,14 @@ async def get_players( if pos_include is not None: p_list = [x.upper() for x in pos_include] all_players = all_players.where( - (Player.pos_1 << p_list) | (Player.pos_2 << p_list) | (Player.pos_3 << p_list) | (Player.pos_4 << p_list) | - (Player.pos_5 << p_list) | (Player.pos_6 << p_list) | (Player.pos_7 << p_list) | (Player.pos_8 << p_list) + (Player.pos_1 << p_list) + | (Player.pos_2 << p_list) + | (Player.pos_3 << p_list) + | (Player.pos_4 << p_list) + | (Player.pos_5 << p_list) + | (Player.pos_6 << p_list) + | (Player.pos_7 << p_list) + | (Player.pos_8 << p_list) ) if has_headshot is not None: all_players = all_players.where(Player.headshot.is_null(not has_headshot)) @@ -164,26 +200,30 @@ async def get_players( if mlbplayer_id is not None: all_players = all_players.where(Player.mlbplayer_id == mlbplayer_id) if in_desc is not None: - all_players = all_players.where(fn.Lower(Player.description).contains(in_desc.lower())) + all_players = all_players.where( + fn.Lower(Player.description).contains(in_desc.lower()) + ) if sort_by is not None: - if sort_by == 'cost-desc': + if sort_by == "cost-desc": all_players = all_players.order_by(-Player.cost) - elif sort_by == 'cost-asc': + elif sort_by == "cost-asc": all_players = all_players.order_by(Player.cost) - elif sort_by == 'name-asc': + elif sort_by == "name-asc": all_players = all_players.order_by(Player.p_name) - elif sort_by == 'name-desc': + elif sort_by == "name-desc": all_players = all_players.order_by(-Player.p_name) - elif sort_by == 'rarity-desc': + elif sort_by == "rarity-desc": all_players = all_players.order_by(Player.rarity) - elif sort_by == 'rarity-asc': + elif sort_by == "rarity-asc": all_players = all_players.order_by(-Player.rarity) final_players = [] # logging.info(f'pos_exclude: {type(pos_exclude)} - {pos_exclude} - is None: {pos_exclude is None}') for x in all_players: - if pos_exclude is not None and set([x.upper() for x in pos_exclude]).intersection(x.get_all_pos()): + if pos_exclude is not None and set( + [x.upper() for x in pos_exclude] + ).intersection(x.get_all_pos()): pass else: final_players.append(x) @@ -200,21 +240,46 @@ async def get_players( db.close() for x in card_vals: - x['player_name'] = x['p_name'] - x['cardset_name'] = x['cardset']['name'] - x['rarity'] = x['rarity']['name'] - x['for_purchase'] = x['cardset']['for_purchase'] - x['ranked_legal'] = x['cardset']['ranked_legal'] - if x['player_name'] not in x['description']: - x['description'] = f'{x["description"]} {x["player_name"]}' + x["player_name"] = x["p_name"] + x["cardset_name"] = x["cardset"]["name"] + x["rarity"] = x["rarity"]["name"] + x["for_purchase"] = x["cardset"]["for_purchase"] + x["ranked_legal"] = x["cardset"]["ranked_legal"] + if x["player_name"] not in x["description"]: + x["description"] = f"{x['description']} {x['player_name']}" card_df = pd.DataFrame(card_vals) - output = card_df[[ - 'player_id', 'player_name', 'cost', 'image', 'image2', 'mlbclub', 'franchise', 'cardset_name', 'rarity', - 'pos_1', 'pos_2', 'pos_3', 'pos_4', 'pos_5', 'pos_6', 'pos_7', 'pos_8', 'headshot', 'vanity_card', - 'fangr_id', 'bbref_id', 'description', 'for_purchase', 'ranked_legal' - ]] - return Response(content=pd.DataFrame(output).to_csv(index=False), media_type='text/csv') + output = card_df[ + [ + "player_id", + "player_name", + "cost", + "image", + "image2", + "mlbclub", + "franchise", + "cardset_name", + "rarity", + "pos_1", + "pos_2", + "pos_3", + "pos_4", + "pos_5", + "pos_6", + "pos_7", + "pos_8", + "headshot", + "vanity_card", + "fangr_id", + "bbref_id", + "description", + "for_purchase", + "ranked_legal", + ] + ] + return Response( + content=pd.DataFrame(output).to_csv(index=False), media_type="text/csv" + ) # all_players.order_by(-Player.rarity.value, Player.p_name) # data_list = [['id', 'name', 'value', 'image', 'image2', 'mlbclub', 'franchise', 'cardset', 'rarity', 'pos_1', @@ -236,27 +301,28 @@ async def get_players( # return Response(content=return_val, media_type='text/csv') else: - return_val = {'count': len(final_players), 'players': []} + return_val = {"count": len(final_players), "players": []} for x in final_players: - this_record = model_to_dict(x, recurse=not (flat or short_output)) if inc_dex: this_dex = Paperdex.select().where(Paperdex.player == x) - this_record['paperdex'] = {'count': this_dex.count(), 'paperdex': []} + this_record["paperdex"] = {"count": this_dex.count(), "paperdex": []} for y in this_dex: - this_record['paperdex']['paperdex'].append(model_to_dict(y, recurse=False)) + this_record["paperdex"]["paperdex"].append( + model_to_dict(y, recurse=False) + ) if inc_keys and (flat or short_output): - if this_record['mlbplayer'] is not None: - this_mlb = MlbPlayer.get_by_id(this_record['mlbplayer']) - this_record['key_mlbam'] = this_mlb.key_mlbam - this_record['key_fangraphs'] = this_mlb.key_fangraphs - this_record['key_bbref'] = this_mlb.key_bbref - this_record['key_retro'] = this_mlb.key_retro - this_record['offense_col'] = this_mlb.offense_col + if this_record["mlbplayer"] is not None: + this_mlb = MlbPlayer.get_by_id(this_record["mlbplayer"]) + this_record["key_mlbam"] = this_mlb.key_mlbam + this_record["key_fangraphs"] = this_mlb.key_fangraphs + this_record["key_bbref"] = this_mlb.key_bbref + this_record["key_retro"] = this_mlb.key_retro + this_record["offense_col"] = this_mlb.offense_col - return_val['players'].append(this_record) + return_val["players"].append(this_record) # return_val['players'].append(model_to_dict(x, recurse=not flat)) @@ -264,19 +330,26 @@ async def get_players( return return_val -@router.get('/random') +@router.get("/random") async def get_random_player( - min_cost: Optional[int] = None, max_cost: Optional[int] = None, in_packs: Optional[bool] = None, - min_rarity: Optional[int] = None, max_rarity: Optional[int] = None, limit: Optional[int] = None, - pos_include: Optional[str] = None, pos_exclude: Optional[str] = None, franchise: Optional[str] = None, - mlbclub: Optional[str] = None, cardset_id: list = Query(default=None), pos_inc: list = Query(default=None), - pos_exc: list = Query(default=None), csv: Optional[bool] = None): - all_players = (Player - .select() - .join(Cardset) - .switch(Player) - .join(Rarity) - .order_by(fn.Random())) + min_cost: Optional[int] = None, + max_cost: Optional[int] = None, + in_packs: Optional[bool] = None, + min_rarity: Optional[int] = None, + max_rarity: Optional[int] = None, + limit: Optional[int] = None, + pos_include: Optional[str] = None, + pos_exclude: Optional[str] = None, + franchise: Optional[str] = None, + mlbclub: Optional[str] = None, + cardset_id: list = Query(default=None), + pos_inc: list = Query(default=None), + pos_exc: list = Query(default=None), + csv: Optional[bool] = None, +): + all_players = ( + Player.select().join(Cardset).switch(Player).join(Rarity).order_by(fn.Random()) + ) if min_cost is not None: all_players = all_players.where(Player.cost >= min_cost) @@ -291,10 +364,14 @@ async def get_random_player( all_players = all_players.where(Player.rarity.value <= max_rarity) if pos_include is not None: all_players = all_players.where( - (fn.lower(Player.pos_1) == pos_include.lower()) | (fn.lower(Player.pos_2) == pos_include.lower()) | - (fn.lower(Player.pos_3) == pos_include.lower()) | (fn.lower(Player.pos_4) == pos_include.lower()) | - (fn.lower(Player.pos_5) == pos_include.lower()) | (fn.lower(Player.pos_6) == pos_include.lower()) | - (fn.lower(Player.pos_7) == pos_include.lower()) | (fn.lower(Player.pos_8) == pos_include.lower()) + (fn.lower(Player.pos_1) == pos_include.lower()) + | (fn.lower(Player.pos_2) == pos_include.lower()) + | (fn.lower(Player.pos_3) == pos_include.lower()) + | (fn.lower(Player.pos_4) == pos_include.lower()) + | (fn.lower(Player.pos_5) == pos_include.lower()) + | (fn.lower(Player.pos_6) == pos_include.lower()) + | (fn.lower(Player.pos_7) == pos_include.lower()) + | (fn.lower(Player.pos_8) == pos_include.lower()) ) if franchise is not None: all_players = all_players.where(fn.Lower(Player.franchise) == franchise.lower()) @@ -305,8 +382,14 @@ async def get_random_player( if pos_inc is not None: p_list = [x.upper() for x in pos_inc] all_players = all_players.where( - (Player.pos_1 << p_list) | (Player.pos_2 << p_list) | (Player.pos_3 << p_list) | (Player.pos_4 << p_list) | - (Player.pos_5 << p_list) | (Player.pos_6 << p_list) | (Player.pos_7 << p_list) | (Player.pos_8 << p_list) + (Player.pos_1 << p_list) + | (Player.pos_2 << p_list) + | (Player.pos_3 << p_list) + | (Player.pos_4 << p_list) + | (Player.pos_5 << p_list) + | (Player.pos_6 << p_list) + | (Player.pos_7 << p_list) + | (Player.pos_8 << p_list) ) # if pos_exc is not None: # p_list = [x.upper() for x in pos_exc] @@ -339,49 +422,96 @@ async def get_random_player( # raise HTTPException(status_code=404, detail=f'No players found') if csv: - data_list = [['id', 'name', 'cost', 'image', 'image2', 'mlbclub', 'franchise', 'cardset', 'rarity', 'pos_1', - 'pos_2', 'pos_3', 'pos_4', 'pos_5', 'pos_6', 'pos_7', 'pos_8', 'headshot', 'vanity_card', - 'strat_code', 'bbref_id', 'description']] + data_list = [ + [ + "id", + "name", + "cost", + "image", + "image2", + "mlbclub", + "franchise", + "cardset", + "rarity", + "pos_1", + "pos_2", + "pos_3", + "pos_4", + "pos_5", + "pos_6", + "pos_7", + "pos_8", + "headshot", + "vanity_card", + "strat_code", + "bbref_id", + "description", + ] + ] for line in final_players: data_list.append( [ - line.id, line.p_name, line.cost, line.image, line.image2, - line.mlbclub, line.franchise, line.cardset.name, line.rarity.name, - line.pos_1, line.pos_2, line.pos_3, line.pos_4, line.pos_5, - line.pos_6, line.pos_7, line.pos_8, line.headshot, line.vanity_card, - line.strat_code, line.bbref_id, line.description + line.id, + line.p_name, + line.cost, + line.image, + line.image2, + line.mlbclub, + line.franchise, + line.cardset.name, + line.rarity.name, + line.pos_1, + line.pos_2, + line.pos_3, + line.pos_4, + line.pos_5, + line.pos_6, + line.pos_7, + line.pos_8, + line.headshot, + line.vanity_card, + line.strat_code, + line.bbref_id, + line.description, ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': len(final_players), 'players': []} + return_val = {"count": len(final_players), "players": []} for x in final_players: this_record = model_to_dict(x) this_dex = Paperdex.select().where(Paperdex.player == x) - this_record['paperdex'] = {'count': this_dex.count(), 'paperdex': []} + this_record["paperdex"] = {"count": this_dex.count(), "paperdex": []} for y in this_dex: - this_record['paperdex']['paperdex'].append(model_to_dict(y, recurse=False)) + this_record["paperdex"]["paperdex"].append( + model_to_dict(y, recurse=False) + ) - return_val['players'].append(this_record) + return_val["players"].append(this_record) # return_val['players'].append(model_to_dict(x)) db.close() return return_val -@router.get('/search') +@router.get("/search") async def search_players( - q: str = Query(..., description="Search query for player name"), - cardset_id: list = Query(default=None), - rarity_id: list = Query(default=None), - limit: int = Query(default=25, ge=1, le=100, description="Maximum number of results to return"), - unique_names: bool = Query(default=False, description="Return only unique player names (highest player_id)"), - short_output: bool = False): + q: str = Query(..., description="Search query for player name"), + cardset_id: list = Query(default=None), + rarity_id: list = Query(default=None), + limit: int = Query( + default=25, ge=1, le=100, description="Maximum number of results to return" + ), + unique_names: bool = Query( + default=False, description="Return only unique player names (highest player_id)" + ), + short_output: bool = False, +): """ Real-time fuzzy search for players by name. @@ -432,7 +562,10 @@ async def search_players( seen_names = {} for player in results: name_lower = player.p_name.lower() - if name_lower not in seen_names or player.player_id > seen_names[name_lower].player_id: + if ( + name_lower not in seen_names + or player.player_id > seen_names[name_lower].player_id + ): seen_names[name_lower] = player results = list(seen_names.values()) @@ -441,9 +574,9 @@ async def search_players( # Build response return_val = { - 'count': len(limited_results), - 'total_matches': total_matches, - 'players': [] + "count": len(limited_results), + "total_matches": total_matches, + "players": [], } for x in limited_results: @@ -454,111 +587,202 @@ async def search_players( # for y in this_dex: # this_record['paperdex']['paperdex'].append(model_to_dict(y, recurse=False)) - return_val['players'].append(this_record) + return_val["players"].append(this_record) db.close() return return_val -@router.get('/{player_id}') +@router.get("/{player_id}") async def get_one_player(player_id, csv: Optional[bool] = False): try: this_player = Player.get_by_id(player_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No player found with id {player_id}') + raise HTTPException( + status_code=404, detail=f"No player found with id {player_id}" + ) if csv: - data_list = [['id', 'name', 'cost', 'image', 'image2', 'mlbclub', 'franchise', 'cardset', 'rarity', 'pos_1', - 'pos_2', 'pos_3', 'pos_4', 'pos_5', 'pos_6', 'pos_7', 'pos_8', 'headshot', 'vanity_card', - 'strat_code', 'bbref_id', 'description']] + data_list = [ + [ + "id", + "name", + "cost", + "image", + "image2", + "mlbclub", + "franchise", + "cardset", + "rarity", + "pos_1", + "pos_2", + "pos_3", + "pos_4", + "pos_5", + "pos_6", + "pos_7", + "pos_8", + "headshot", + "vanity_card", + "strat_code", + "bbref_id", + "description", + ] + ] return_val = DataFrame(data_list).to_csv(header=False, index=False) data_list.append( [ - this_player.id, this_player.p_name, this_player.cost, this_player.image, this_player.image2, - this_player.mlbclub, this_player.franchise, this_player.cardset.name, this_player.rarity.name, - this_player.pos_1, this_player.pos_2, this_player.pos_3, this_player.pos_4, this_player.pos_5, - this_player.pos_6, this_player.pos_7, this_player.pos_8, this_player.headshot, this_player.vanity_card, - this_player.strat_code, this_player.bbref_id, this_player.description + this_player.id, + this_player.p_name, + this_player.cost, + this_player.image, + this_player.image2, + this_player.mlbclub, + this_player.franchise, + this_player.cardset.name, + this_player.rarity.name, + this_player.pos_1, + this_player.pos_2, + this_player.pos_3, + this_player.pos_4, + this_player.pos_5, + this_player.pos_6, + this_player.pos_7, + this_player.pos_8, + this_player.headshot, + this_player.vanity_card, + this_player.strat_code, + this_player.bbref_id, + this_player.description, ] ) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_player) this_dex = Paperdex.select().where(Paperdex.player == this_player) - return_val['paperdex'] = {'count': this_dex.count(), 'paperdex': []} + return_val["paperdex"] = {"count": this_dex.count(), "paperdex": []} for x in this_dex: - return_val['paperdex']['paperdex'].append(model_to_dict(x, recurse=False)) + return_val["paperdex"]["paperdex"].append(model_to_dict(x, recurse=False)) db.close() return return_val -@router.get('/{player_id}/{card_type}card') -@router.get('/{player_id}/{card_type}card/{d}') -@router.get('/{player_id}/{card_type}card/{d}/{variant}') +@router.get("/{player_id}/{card_type}card") +@router.get("/{player_id}/{card_type}card/{d}") +@router.get("/{player_id}/{card_type}card/{d}/{variant}") async def get_batter_card( - request: Request, player_id: int, card_type: Literal['batting', 'pitching'], variant: int = 0, d: str = None, - html: Optional[bool] = False): + request: Request, + player_id: int, + card_type: Literal["batting", "pitching"], + variant: int = 0, + d: str = None, + html: Optional[bool] = False, +): try: this_player = Player.get_by_id(player_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No player found with id {player_id}') - - headers = {'Cache-Control': 'public, max-age=86400'} - filename = f'{this_player.description} {this_player.p_name} {card_type} {d}-v{variant}' - if os.path.isfile(f'storage/cards/cardset-{this_player.cardset.id}/{card_type}/{player_id}-{d}-v{variant}.png') and html is False: - db.close() - return FileResponse( - path=f'storage/cards/cardset-{this_player.cardset.id}/{card_type}/{player_id}-{d}-v{variant}.png', - media_type='image/png', - headers=headers + raise HTTPException( + status_code=404, detail=f"No player found with id {player_id}" ) - all_pos = CardPosition.select().where(CardPosition.player == this_player).order_by(CardPosition.innings.desc()) + headers = {"Cache-Control": "public, max-age=86400"} + filename = ( + f"{this_player.description} {this_player.p_name} {card_type} {d}-v{variant}" + ) + if ( + os.path.isfile( + f"storage/cards/cardset-{this_player.cardset.id}/{card_type}/{player_id}-{d}-v{variant}.png" + ) + and html is False + ): + db.close() + return FileResponse( + path=f"storage/cards/cardset-{this_player.cardset.id}/{card_type}/{player_id}-{d}-v{variant}.png", + media_type="image/png", + headers=headers, + ) - if card_type == 'batting': - this_bc = BattingCard.get_or_none(BattingCard.player == this_player, BattingCard.variant == variant) + all_pos = ( + CardPosition.select() + .where(CardPosition.player == this_player) + .order_by(CardPosition.innings.desc()) + ) + + if card_type == "batting": + this_bc = BattingCard.get_or_none( + BattingCard.player == this_player, BattingCard.variant == variant + ) if this_bc is None: - raise HTTPException(status_code=404, detail=f'Batting card not found for id {player_id}, variant {variant}') + raise HTTPException( + status_code=404, + detail=f"Batting card not found for id {player_id}, variant {variant}", + ) rating_vl = BattingCardRatings.get_or_none( - BattingCardRatings.battingcard == this_bc, BattingCardRatings.vs_hand == 'L') + BattingCardRatings.battingcard == this_bc, BattingCardRatings.vs_hand == "L" + ) rating_vr = BattingCardRatings.get_or_none( - BattingCardRatings.battingcard == this_bc, BattingCardRatings.vs_hand == 'R') + BattingCardRatings.battingcard == this_bc, BattingCardRatings.vs_hand == "R" + ) if None in [rating_vr, rating_vl]: - raise HTTPException(status_code=404, detail=f'Ratings not found for batting card {this_bc.id}') + raise HTTPException( + status_code=404, + detail=f"Ratings not found for batting card {this_bc.id}", + ) - card_data = get_batter_card_data(this_player, this_bc, rating_vl, rating_vr, all_pos) + card_data = get_batter_card_data( + this_player, this_bc, rating_vl, rating_vr, all_pos + ) # Include Pokemon cardsets here to remove "Pokemon" from cardset name on card - if this_player.description in this_player.cardset.name and this_player.cardset.id not in [23]: - card_data['cardset_name'] = this_player.cardset.name + if ( + this_player.description in this_player.cardset.name + and this_player.cardset.id not in [23] + ): + card_data["cardset_name"] = this_player.cardset.name else: - card_data['cardset_name'] = this_player.description - card_data['request'] = request + card_data["cardset_name"] = this_player.description + card_data["request"] = request html_response = templates.TemplateResponse("player_card.html", card_data) else: - this_pc = PitchingCard.get_or_none(PitchingCard.player == this_player, PitchingCard.variant == variant) + this_pc = PitchingCard.get_or_none( + PitchingCard.player == this_player, PitchingCard.variant == variant + ) if this_pc is None: raise HTTPException( - status_code=404, detail=f'Pitching card not found for id {player_id}, variant {variant}') + status_code=404, + detail=f"Pitching card not found for id {player_id}, variant {variant}", + ) rating_vl = PitchingCardRatings.get_or_none( - PitchingCardRatings.pitchingcard == this_pc, PitchingCardRatings.vs_hand == 'L') + PitchingCardRatings.pitchingcard == this_pc, + PitchingCardRatings.vs_hand == "L", + ) rating_vr = PitchingCardRatings.get_or_none( - PitchingCardRatings.pitchingcard == this_pc, PitchingCardRatings.vs_hand == 'R') + PitchingCardRatings.pitchingcard == this_pc, + PitchingCardRatings.vs_hand == "R", + ) if None in [rating_vr, rating_vl]: - raise HTTPException(status_code=404, detail=f'Ratings not found for pitching card {this_pc.id}') + raise HTTPException( + status_code=404, + detail=f"Ratings not found for pitching card {this_pc.id}", + ) - card_data = get_pitcher_card_data(this_player, this_pc, rating_vl, rating_vr, all_pos) - if this_player.description in this_player.cardset.name and this_player.cardset.id not in [23]: - card_data['cardset_name'] = this_player.cardset.name + card_data = get_pitcher_card_data( + this_player, this_pc, rating_vl, rating_vr, all_pos + ) + if ( + this_player.description in this_player.cardset.name + and this_player.cardset.id not in [23] + ): + card_data["cardset_name"] = this_player.cardset.name else: - card_data['cardset_name'] = this_player.description - card_data['request'] = request + card_data["cardset_name"] = this_player.description + card_data["request"] = request html_response = templates.TemplateResponse("player_card.html", card_data) if html: @@ -566,30 +790,42 @@ async def get_batter_card( return html_response updates = 0 - if card_type == 'batting': - updates += BattingCardRatings.update(card_data['new_ratings_vl'].dict()).where( - (BattingCardRatings.id == rating_vl.id) - ).execute() - updates += BattingCardRatings.update(card_data['new_ratings_vr'].dict()).where( - (BattingCardRatings.id == rating_vr.id) - ).execute() + if card_type == "batting": + updates += ( + BattingCardRatings.update(card_data["new_ratings_vl"].dict()) + .where((BattingCardRatings.id == rating_vl.id)) + .execute() + ) + updates += ( + BattingCardRatings.update(card_data["new_ratings_vr"].dict()) + .where((BattingCardRatings.id == rating_vr.id)) + .execute() + ) else: - updates += PitchingCardRatings.update(card_data['new_ratings_vl'].dict()).where( - (PitchingCardRatings.id == rating_vl.id) - ).execute() - updates += PitchingCardRatings.update(card_data['new_ratings_vr'].dict()).where( - (PitchingCardRatings.id == rating_vr.id) - ).execute() + updates += ( + PitchingCardRatings.update(card_data["new_ratings_vl"].dict()) + .where((PitchingCardRatings.id == rating_vl.id)) + .execute() + ) + updates += ( + PitchingCardRatings.update(card_data["new_ratings_vr"].dict()) + .where((PitchingCardRatings.id == rating_vr.id)) + .execute() + ) - logging.debug(f'Rating updates: {updates}') - logging.debug(f'body:\n{html_response.body.decode("UTF-8")}') + logging.debug(f"Rating updates: {updates}") + logging.debug(f"body:\n{html_response.body.decode('UTF-8')}") - file_path = f'storage/cards/cardset-{this_player.cardset.id}/{card_type}/{player_id}-{d}-v{variant}.png' + file_path = f"storage/cards/cardset-{this_player.cardset.id}/{card_type}/{player_id}-{d}-v{variant}.png" async with async_playwright() as p: browser = await p.chromium.launch() page = await browser.new_page() await page.set_content(html_response.body.decode("UTF-8")) - await page.screenshot(path=file_path, type='png', clip={'x': 0.0, 'y': 0, 'width': 1200, 'height': 600}) + await page.screenshot( + path=file_path, + type="png", + clip={"x": 0.0, "y": 0, "width": 1200, "height": 600}, + ) await browser.close() # hti = Html2Image( @@ -606,7 +842,7 @@ async def get_batter_card( # ) db.close() - return FileResponse(path=file_path, media_type='image/png', headers=headers) + return FileResponse(path=file_path, media_type="image/png", headers=headers) # @router.get('/{player_id}/pitchingcard') @@ -614,29 +850,49 @@ async def get_batter_card( # request: Request, player_id: int, variant: int = 0, d: str = None, html: Optional[bool] = False) -@router.patch('/{player_id}') +@router.patch("/{player_id}") async def v1_players_patch( - player_id, name: Optional[str] = None, image: Optional[str] = None, image2: Optional[str] = None, - mlbclub: Optional[str] = None, franchise: Optional[str] = None, cardset_id: Optional[int] = None, - rarity_id: Optional[int] = None, pos_1: Optional[str] = None, pos_2: Optional[str] = None, - pos_3: Optional[str] = None, pos_4: Optional[str] = None, pos_5: Optional[str] = None, mlbplayer_id: Optional[int] = None, - pos_6: Optional[str] = None, pos_7: Optional[str] = None, pos_8: Optional[str] = None, - headshot: Optional[str] = None, vanity_card: Optional[str] = None, strat_code: Optional[str] = None, - bbref_id: Optional[str] = None, description: Optional[str] = None, cost: Optional[int] = None, - fangr_id: Optional[str] = None, token: str = Depends(oauth2_scheme)): + player_id, + name: Optional[str] = None, + image: Optional[str] = None, + image2: Optional[str] = None, + mlbclub: Optional[str] = None, + franchise: Optional[str] = None, + cardset_id: Optional[int] = None, + rarity_id: Optional[int] = None, + pos_1: Optional[str] = None, + pos_2: Optional[str] = None, + pos_3: Optional[str] = None, + pos_4: Optional[str] = None, + pos_5: Optional[str] = None, + mlbplayer_id: Optional[int] = None, + pos_6: Optional[str] = None, + pos_7: Optional[str] = None, + pos_8: Optional[str] = None, + headshot: Optional[str] = None, + vanity_card: Optional[str] = None, + strat_code: Optional[str] = None, + bbref_id: Optional[str] = None, + description: Optional[str] = None, + cost: Optional[int] = None, + fangr_id: Optional[str] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch players. This event has been logged.' + detail="You are not authorized to patch players. This event has been logged.", ) try: this_player = Player.get_by_id(player_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No player found with id {player_id}') + raise HTTPException( + status_code=404, detail=f"No player found with id {player_id}" + ) if cost is not None: this_player.cost = cost @@ -645,7 +901,7 @@ async def v1_players_patch( if image is not None: this_player.image = image if image2 is not None: - if image2.lower() == 'false': + if image2.lower() == "false": this_player.image2 = None else: this_player.image2 = image2 @@ -658,52 +914,56 @@ async def v1_players_patch( this_cardset = Cardset.get_by_id(cardset_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No cardset found with id {cardset_id}') + raise HTTPException( + status_code=404, detail=f"No cardset found with id {cardset_id}" + ) this_player.cardset = this_cardset if rarity_id is not None: try: this_rarity = Rarity.get_by_id(rarity_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No rarity found with id {rarity_id}') + raise HTTPException( + status_code=404, detail=f"No rarity found with id {rarity_id}" + ) this_player.rarity = this_rarity if pos_1 is not None: - if pos_1 in ['None', 'False', '']: + if pos_1 in ["None", "False", ""]: this_player.pos_1 = None else: this_player.pos_1 = pos_1 if pos_2 is not None: - if pos_2 in ['None', 'False', '']: + if pos_2 in ["None", "False", ""]: this_player.pos_2 = None else: this_player.pos_2 = pos_2 if pos_3 is not None: - if pos_3 in ['None', 'False', '']: + if pos_3 in ["None", "False", ""]: this_player.pos_3 = None else: this_player.pos_3 = pos_3 if pos_4 is not None: - if pos_4 in ['None', 'False', '']: + if pos_4 in ["None", "False", ""]: this_player.pos_4 = None else: this_player.pos_4 = pos_4 if pos_5 is not None: - if pos_5 in ['None', 'False', '']: + if pos_5 in ["None", "False", ""]: this_player.pos_5 = None else: this_player.pos_5 = pos_5 if pos_6 is not None: - if pos_6 in ['None', 'False', '']: + if pos_6 in ["None", "False", ""]: this_player.pos_6 = None else: this_player.pos_6 = pos_6 if pos_7 is not None: - if pos_7 in ['None', 'False', '']: + if pos_7 in ["None", "False", ""]: this_player.pos_7 = None else: this_player.pos_7 = pos_7 if pos_8 is not None: - if pos_8 in ['None', 'False', '']: + if pos_8 in ["None", "False", ""]: this_player.pos_8 = None else: this_player.pos_8 = pos_8 @@ -729,18 +989,18 @@ async def v1_players_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@router.put('') +@router.put("") async def put_players(players: PlayerModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post players. This event has been logged.' + detail="You are not authorized to post players. This event has been logged.", ) new_players = [] @@ -772,64 +1032,67 @@ async def put_players(players: PlayerModel, token: str = Depends(oauth2_scheme)) # description=x.description # ) # new_players.append(this_player) - new_players.append({ - 'player_id': x.player_id, - 'p_name': x.p_name, - 'cost': x.cost, - 'image': x.image, - 'image2': x.image2, - 'mlbclub': x.mlbclub.title(), - 'franchise': normalize_franchise(x.franchise), - 'cardset_id': x.cardset_id, - 'rarity_id': x.rarity_id, - 'set_num': x.set_num, - 'pos_1': x.pos_1, - 'pos_2': x.pos_2, - 'pos_3': x.pos_3, - 'pos_4': x.pos_4, - 'pos_5': x.pos_5, - 'pos_6': x.pos_6, - 'pos_7': x.pos_7, - 'pos_8': x.pos_8, - 'headshot': x.headshot, - 'vanity_card': x.vanity_card, - 'strat_code': x.strat_code, - 'fangr_id': x.fangr_id, - 'bbref_id': x.bbref_id, - 'description': x.description - }) + new_players.append( + { + "player_id": x.player_id, + "p_name": x.p_name, + "cost": x.cost, + "image": x.image, + "image2": x.image2, + "mlbclub": x.mlbclub.title(), + "franchise": normalize_franchise(x.franchise), + "cardset_id": x.cardset_id, + "rarity_id": x.rarity_id, + "set_num": x.set_num, + "pos_1": x.pos_1, + "pos_2": x.pos_2, + "pos_3": x.pos_3, + "pos_4": x.pos_4, + "pos_5": x.pos_5, + "pos_6": x.pos_6, + "pos_7": x.pos_7, + "pos_8": x.pos_8, + "headshot": x.headshot, + "vanity_card": x.vanity_card, + "strat_code": x.strat_code, + "fangr_id": x.fangr_id, + "bbref_id": x.bbref_id, + "description": x.description, + } + ) - logging.debug(f'new_players: {new_players}') + logging.debug(f"new_players: {new_players}") with db.atomic(): - # Player.bulk_create(new_players, batch_size=15) - for batch in chunked(new_players, 15): - logging.debug(f'batch: {batch}') - Player.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper (preserves SQLite compatibility) + upsert_players(new_players, batch_size=15) db.close() # sheets.update_all_players(SHEETS_AUTH) - raise HTTPException(status_code=200, detail=f'{len(new_players)} players have been added') + raise HTTPException( + status_code=200, detail=f"{len(new_players)} players have been added" + ) -@router.post('') +@router.post("") async def post_players(new_player: PlayerPydantic, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post players. This event has been logged.' + detail="You are not authorized to post players. This event has been logged.", ) dupe_query = Player.select().where( - (Player.bbref_id == new_player.bbref_id) & (Player.cardset_id == new_player.cardset_id) + (Player.bbref_id == new_player.bbref_id) + & (Player.cardset_id == new_player.cardset_id) ) if dupe_query.count() != 0: db.close() raise HTTPException( status_code=400, - detail=f'This appears to be a duplicate with player {dupe_query[0].player_id}' + detail=f"This appears to be a duplicate with player {dupe_query[0].player_id}", ) p_query = Player.select(Player.player_id).order_by(-Player.player_id).limit(1) @@ -843,31 +1106,37 @@ async def post_players(new_player: PlayerPydantic, token: str = Depends(oauth2_s return return_val -@router.post('/{player_id}/image-reset') -async def post_image_reset(player_id: int, dev: bool = False, token: str = Depends(oauth2_scheme)): +@router.post("/{player_id}/image-reset") +async def post_image_reset( + player_id: int, dev: bool = False, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to modify players. This event has been logged.' + detail="You are not authorized to modify players. This event has been logged.", ) this_player = Player.get_or_none(Player.player_id == player_id) if this_player is None: db.close() - raise HTTPException(status_code=404, detail=f'Player ID {player_id} not found') + raise HTTPException(status_code=404, detail=f"Player ID {player_id} not found") now = datetime.datetime.now() - today_url = f'https://pd{"dev" if dev else ""}.manticorum.com/api/v2/players/{player_id}/' \ - f'{"pitch" if "pitch" in this_player.image else "batt"}ingcard?d={now.year}-{now.month}-{now.day}' - logging.debug(f'image1 url: {today_url}') + today_url = ( + f"https://pd{'dev' if dev else ''}.manticorum.com/api/v2/players/{player_id}/" + f"{'pitch' if 'pitch' in this_player.image else 'batt'}ingcard?d={now.year}-{now.month}-{now.day}" + ) + logging.debug(f"image1 url: {today_url}") this_player.image = today_url if this_player.image2 is not None: - today_url = f'https://pd{"dev" if dev else ""}.manticorum.com/api/v2/players/{player_id}/' \ - f'{"pitch" if "pitch" in this_player.image2 else "batt"}ingcard?d={now.year}-{now.month}-{now.day}' - logging.debug(f'image2 url: {today_url}') + today_url = ( + f"https://pd{'dev' if dev else ''}.manticorum.com/api/v2/players/{player_id}/" + f"{'pitch' if 'pitch' in this_player.image2 else 'batt'}ingcard?d={now.year}-{now.month}-{now.day}" + ) + logging.debug(f"image2 url: {today_url}") this_player.image2 = today_url this_player.save() @@ -876,26 +1145,32 @@ async def post_image_reset(player_id: int, dev: bool = False, token: str = Depen return r_player -@router.delete('/{player_id}') +@router.delete("/{player_id}") async def delete_player(player_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete players. This event has been logged.' + detail="You are not authorized to delete players. This event has been logged.", ) try: this_player = Player.get_by_id(player_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No player found with id {player_id}') + raise HTTPException( + status_code=404, detail=f"No player found with id {player_id}" + ) count = this_player.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Player {player_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Player {player_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Player {player_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Player {player_id} was not deleted" + ) diff --git a/app/routers_v2/stratplays.py b/app/routers_v2/stratplays.py index 324eae4..ef58a8f 100644 --- a/app/routers_v2/stratplays.py +++ b/app/routers_v2/stratplays.py @@ -6,22 +6,33 @@ import logging import pandas as pd from pydantic import BaseModel, validator -from ..db_engine import db, StratPlay, StratGame, Team, Player, model_to_dict, chunked, fn, SQL, \ - complex_data_to_csv, Decision +from ..db_engine import ( + db, + StratPlay, + StratGame, + Team, + Player, + model_to_dict, + chunked, + fn, + SQL, + complex_data_to_csv, + Decision, +) +from ..db_helpers import upsert_strat_plays from ..dependencies import oauth2_scheme, valid_token, LOG_DATA logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/plays', - tags=['plays'] -) +router = APIRouter(prefix="/api/v2/plays", tags=["plays"]) -POS_LIST = Literal['C', '1B', '2B', '3B', 'SS', 'LF', 'CF', 'RF', 'P', 'DH', 'PH', 'PR', 'GHOST'] +POS_LIST = Literal[ + "C", "1B", "2B", "3B", "SS", "LF", "CF", "RF", "P", "DH", "PH", "PR", "GHOST" +] class PlayModel(BaseModel): @@ -32,7 +43,7 @@ class PlayModel(BaseModel): pitcher_id: int pitcher_team_id: int = None on_base_code: str - inning_half: Literal['top', 'bot', 'Top', 'Bot'] + inning_half: Literal["top", "bot", "Top", "Bot"] inning_num: int batting_order: int starting_outs: int @@ -90,27 +101,27 @@ class PlayModel(BaseModel): is_tied: bool = False is_new_inning: bool = False - @validator('on_first_final') + @validator("on_first_final") def no_final_if_no_runner_one(cls, v, values): - if values['on_first_id'] is None: + if values["on_first_id"] is None: return None return v - @validator('on_second_final') + @validator("on_second_final") def no_final_if_no_runner_two(cls, v, values): - if values['on_second_id'] is None: + if values["on_second_id"] is None: return None return v - @validator('on_third_final') + @validator("on_third_final") def no_final_if_no_runner_three(cls, v, values): - if values['on_third_id'] is None: + if values["on_third_id"] is None: return None return v - @validator('batter_final') + @validator("batter_final") def no_final_if_no_batter(cls, v, values): - if values['batter_id'] is None: + if values["batter_id"] is None: return None return v @@ -119,24 +130,52 @@ class PlayList(BaseModel): plays: List[PlayModel] -@router.get('') +@router.get("") async def get_plays( - game_id: list = Query(default=None), batter_id: list = Query(default=None), season: list = Query(default=None), - week: list = Query(default=None), has_defender: Optional[bool] = None, has_catcher: Optional[bool] = None, - has_defender_or_catcher: Optional[bool] = None, is_scoring_play: Optional[bool] = None, - pitcher_id: list = Query(default=None), obc: list = Query(default=None), inning: list = Query(default=None), - batting_order: list = Query(default=None), starting_outs: list = Query(default=None), - batter_pos: list = Query(default=None), catcher_id: list = Query(default=None), - defender_id: list = Query(default=None), runner_id: list = Query(default=None), - offense_team_id: list = Query(default=None), defense_team_id: list = Query(default=None), - hit: Optional[int] = None, double: Optional[int] = None, triple: Optional[int] = None, - homerun: Optional[int] = None, play_num: list = Query(default=None), game_type: list = Query(default=None), - sb: Optional[int] = None, cs: Optional[int] = None, csv: Optional[bool] = False, - run: Optional[int] = None, e_run: Optional[int] = None, rbi: list = Query(default=None), - outs: list = Query(default=None), wild_pitch: Optional[int] = None, is_final_out: Optional[bool] = None, - is_go_ahead: Optional[bool] = None, is_tied: Optional[bool] = None, is_new_inning: Optional[bool] = None, - min_wpa: Optional[float] = None, max_wpa: Optional[float] = None, sort: Optional[str] = None, - short_output: Optional[bool] = False, limit: Optional[int] = 200, page_num: Optional[int] = 1): + game_id: list = Query(default=None), + batter_id: list = Query(default=None), + season: list = Query(default=None), + week: list = Query(default=None), + has_defender: Optional[bool] = None, + has_catcher: Optional[bool] = None, + has_defender_or_catcher: Optional[bool] = None, + is_scoring_play: Optional[bool] = None, + pitcher_id: list = Query(default=None), + obc: list = Query(default=None), + inning: list = Query(default=None), + batting_order: list = Query(default=None), + starting_outs: list = Query(default=None), + batter_pos: list = Query(default=None), + catcher_id: list = Query(default=None), + defender_id: list = Query(default=None), + runner_id: list = Query(default=None), + offense_team_id: list = Query(default=None), + defense_team_id: list = Query(default=None), + hit: Optional[int] = None, + double: Optional[int] = None, + triple: Optional[int] = None, + homerun: Optional[int] = None, + play_num: list = Query(default=None), + game_type: list = Query(default=None), + sb: Optional[int] = None, + cs: Optional[int] = None, + csv: Optional[bool] = False, + run: Optional[int] = None, + e_run: Optional[int] = None, + rbi: list = Query(default=None), + outs: list = Query(default=None), + wild_pitch: Optional[int] = None, + is_final_out: Optional[bool] = None, + is_go_ahead: Optional[bool] = None, + is_tied: Optional[bool] = None, + is_new_inning: Optional[bool] = None, + min_wpa: Optional[float] = None, + max_wpa: Optional[float] = None, + sort: Optional[str] = None, + short_output: Optional[bool] = False, + limit: Optional[int] = 200, + page_num: Optional[int] = 1, +): all_plays = StratPlay.select() if season is not None: @@ -183,7 +222,8 @@ async def get_plays( if defense_team_id is not None: all_teams = Team.select().where(Team.id << defense_team_id) all_plays = all_plays.where( - (StratPlay.catcher_team << all_teams) | (StratPlay.defender_team << all_teams) + (StratPlay.catcher_team << all_teams) + | (StratPlay.defender_team << all_teams) ) if hit is not None: all_plays = all_plays.where(StratPlay.hit == hit) @@ -217,8 +257,10 @@ async def get_plays( all_plays = all_plays.where(StratPlay.is_new_inning == is_new_inning) if is_scoring_play is not None: all_plays = all_plays.where( - (StratPlay.on_first_final == 4) | (StratPlay.on_second_final == 4) | (StratPlay.on_third_final == 4) | - (StratPlay.batter_final == 4) + (StratPlay.on_first_final == 4) + | (StratPlay.on_second_final == 4) + | (StratPlay.on_third_final == 4) + | (StratPlay.batter_final == 4) ) if min_wpa is not None: all_plays = all_plays.where(StratPlay.wpa >= min_wpa) @@ -238,17 +280,19 @@ async def get_plays( if page_num < 1: page_num = 1 - if sort == 'wpa-desc': + if sort == "wpa-desc": all_plays = all_plays.order_by(-fn.ABS(StratPlay.wpa)) - elif sort == 'wpa-asc': + elif sort == "wpa-asc": all_plays = all_plays.order_by(fn.ABS(StratPlay.wpa)) - elif sort == 're24-desc': + elif sort == "re24-desc": all_plays = all_plays.order_by(-fn.ABS(StratPlay.re24)) - elif sort == 're24-asc': + elif sort == "re24-asc": all_plays = all_plays.order_by(fn.ABS(StratPlay.re24)) - elif sort == 'newest': - all_plays = all_plays.order_by(StratPlay.game_id.desc(), StratPlay.play_num.desc()) - elif sort == 'oldest': + elif sort == "newest": + all_plays = all_plays.order_by( + StratPlay.game_id.desc(), StratPlay.play_num.desc() + ) + elif sort == "oldest": all_plays = all_plays.order_by(StratPlay.game_id, StratPlay.play_num) all_plays = all_plays.paginate(page_num, limit) @@ -256,83 +300,117 @@ async def get_plays( if csv: return_vals = [model_to_dict(x) for x in all_plays] for x in return_vals: - x['game_id'] = x['game']['id'] - x['game_type'] = x['game']['game_type'] - x['batter_id'] = x['batter']['player_id'] - x['batter_name'] = x['batter']['p_name'] - x['batter_cardset'] = x['batter']['cardset']['name'] - x['batter_team_id'] = x['batter_team']['id'] - x['batter_team_abbrev'] = x['batter_team']['abbrev'] - x['pitcher_id'] = x['pitcher']['player_id'] - x['pitcher_name'] = x['pitcher']['p_name'] - x['pitcher_cardset'] = x['pitcher']['cardset']['name'] - x['pitcher_team_id'] = x['pitcher_team']['id'] - x['pitcher_team_abbrev'] = x['pitcher_team']['abbrev'] + x["game_id"] = x["game"]["id"] + x["game_type"] = x["game"]["game_type"] + x["batter_id"] = x["batter"]["player_id"] + x["batter_name"] = x["batter"]["p_name"] + x["batter_cardset"] = x["batter"]["cardset"]["name"] + x["batter_team_id"] = x["batter_team"]["id"] + x["batter_team_abbrev"] = x["batter_team"]["abbrev"] + x["pitcher_id"] = x["pitcher"]["player_id"] + x["pitcher_name"] = x["pitcher"]["p_name"] + x["pitcher_cardset"] = x["pitcher"]["cardset"]["name"] + x["pitcher_team_id"] = x["pitcher_team"]["id"] + x["pitcher_team_abbrev"] = x["pitcher_team"]["abbrev"] - if x['catcher'] is not None: - x['catcher_id'] = x['catcher']['player_id'] - x['catcher_name'] = x['catcher']['p_name'] - x['catcher_cardset'] = x['catcher']['cardset']['name'] - x['catcher_team_id'] = x['catcher_team']['id'] - x['catcher_team_abbrev'] = x['catcher_team']['abbrev'] + if x["catcher"] is not None: + x["catcher_id"] = x["catcher"]["player_id"] + x["catcher_name"] = x["catcher"]["p_name"] + x["catcher_cardset"] = x["catcher"]["cardset"]["name"] + x["catcher_team_id"] = x["catcher_team"]["id"] + x["catcher_team_abbrev"] = x["catcher_team"]["abbrev"] else: - x['catcher_id'] = None - x['catcher_name'] = None - x['catcher_cardset'] = None - x['catcher_team_id'] = None - x['catcher_team_abbrev'] = None + x["catcher_id"] = None + x["catcher_name"] = None + x["catcher_cardset"] = None + x["catcher_team_id"] = None + x["catcher_team_abbrev"] = None - if x['defender'] is not None: - x['defender_id'] = x['defender']['player_id'] - x['defender_name'] = x['defender']['p_name'] - x['defender_cardset'] = x['defender']['cardset']['name'] - x['defender_team_id'] = x['defender_team']['id'] - x['defender_team_abbrev'] = x['defender_team']['abbrev'] + if x["defender"] is not None: + x["defender_id"] = x["defender"]["player_id"] + x["defender_name"] = x["defender"]["p_name"] + x["defender_cardset"] = x["defender"]["cardset"]["name"] + x["defender_team_id"] = x["defender_team"]["id"] + x["defender_team_abbrev"] = x["defender_team"]["abbrev"] else: - x['defender_id'] = None - x['defender_name'] = None - x['defender_cardset'] = None - x['defender_team_id'] = None - x['defender_team_abbrev'] = None + x["defender_id"] = None + x["defender_name"] = None + x["defender_cardset"] = None + x["defender_team_id"] = None + x["defender_team_abbrev"] = None - if x['runner'] is not None: - x['runner_id'] = x['runner']['player_id'] - x['runner_name'] = x['runner']['p_name'] - x['runner_cardset'] = x['runner']['cardset']['name'] - x['runner_team_id'] = x['runner_team']['id'] - x['runner_team_abbrev'] = x['runner_team']['abbrev'] + if x["runner"] is not None: + x["runner_id"] = x["runner"]["player_id"] + x["runner_name"] = x["runner"]["p_name"] + x["runner_cardset"] = x["runner"]["cardset"]["name"] + x["runner_team_id"] = x["runner_team"]["id"] + x["runner_team_abbrev"] = x["runner_team"]["abbrev"] else: - x['runner_id'] = None - x['runner_name'] = None - x['runner_cardset'] = None - x['runner_team_id'] = None - x['runner_team_abbrev'] = None + x["runner_id"] = None + x["runner_name"] = None + x["runner_cardset"] = None + x["runner_team_id"] = None + x["runner_team_abbrev"] = None - del x['game'], x['batter'], x['batter_team'], x['pitcher'], x['pitcher_team'], x['catcher'], \ - x['catcher_team'], x['defender'], x['defender_team'], x['runner'], x['runner_team'] + del ( + x["game"], + x["batter"], + x["batter_team"], + x["pitcher"], + x["pitcher_team"], + x["catcher"], + x["catcher_team"], + x["defender"], + x["defender_team"], + x["runner"], + x["runner_team"], + ) db.close() - return Response(content=pd.DataFrame(return_vals).to_csv(index=False), media_type='text/csv') + return Response( + content=pd.DataFrame(return_vals).to_csv(index=False), media_type="text/csv" + ) return_plays = { - 'count': all_plays.count(), - 'plays': [model_to_dict(x, recurse=not short_output) for x in all_plays] + "count": all_plays.count(), + "plays": [model_to_dict(x, recurse=not short_output) for x in all_plays], } db.close() return return_plays -@router.get('/batting') +@router.get("/batting") async def get_batting_totals( - season: list = Query(default=None), week: list = Query(default=None), position: list = Query(default=None), - player_id: list = Query(default=None), min_wpa: Optional[float] = -999, max_wpa: Optional[float] = 999, - group_by: Literal[ - 'team', 'player', 'playerteam', 'playergame', 'teamgame', 'league', 'gmtype', 'playergtype', - 'playerteamgtype'] = 'player', is_gauntlet: Optional[bool] = None, - min_pa: Optional[int] = 1, team_id: list = Query(default=None), inning: list = Query(default=None), - obc: list = Query(default=None), risp: Optional[bool] = None, game_type: list = Query(default=None), - page_num: Optional[int] = 1, sort: Optional[str] = 'pa-desc', limit: Optional[int] = 500, - short_output: Optional[bool] = False, csv: Optional[bool] = False): + season: list = Query(default=None), + week: list = Query(default=None), + position: list = Query(default=None), + player_id: list = Query(default=None), + min_wpa: Optional[float] = -999, + max_wpa: Optional[float] = 999, + group_by: Literal[ + "team", + "player", + "playerteam", + "playergame", + "teamgame", + "league", + "gmtype", + "playergtype", + "playerteamgtype", + ] = "player", + is_gauntlet: Optional[bool] = None, + min_pa: Optional[int] = 1, + team_id: list = Query(default=None), + inning: list = Query(default=None), + obc: list = Query(default=None), + risp: Optional[bool] = None, + game_type: list = Query(default=None), + page_num: Optional[int] = 1, + sort: Optional[str] = "pa-desc", + limit: Optional[int] = 500, + short_output: Optional[bool] = False, + csv: Optional[bool] = False, +): season_games = StratGame.select() if season is not None: season_games = season_games.where(StratGame.season << season) @@ -342,83 +420,119 @@ async def get_batting_totals( # Build SELECT fields conditionally based on group_by to satisfy PostgreSQL's # strict GROUP BY requirement (all non-aggregated SELECT fields must be in GROUP BY) base_select_fields = [ - fn.SUM(StratPlay.pa).alias('sum_pa'), - fn.SUM(StratPlay.ab).alias('sum_ab'), - fn.SUM(StratPlay.run).alias('sum_run'), - fn.SUM(StratPlay.hit).alias('sum_hit'), - fn.SUM(StratPlay.rbi).alias('sum_rbi'), - fn.SUM(StratPlay.double).alias('sum_double'), - fn.SUM(StratPlay.triple).alias('sum_triple'), - fn.SUM(StratPlay.homerun).alias('sum_hr'), - fn.SUM(StratPlay.bb).alias('sum_bb'), - fn.SUM(StratPlay.so).alias('sum_so'), - fn.SUM(StratPlay.hbp).alias('sum_hbp'), - fn.SUM(StratPlay.sac).alias('sum_sac'), - fn.SUM(StratPlay.ibb).alias('sum_ibb'), - fn.SUM(StratPlay.gidp).alias('sum_gidp'), - fn.SUM(StratPlay.sb).alias('sum_sb'), - fn.SUM(StratPlay.cs).alias('sum_cs'), - fn.SUM(StratPlay.bphr).alias('sum_bphr'), - fn.SUM(StratPlay.bpfo).alias('sum_bpfo'), - fn.SUM(StratPlay.bp1b).alias('sum_bp1b'), - fn.SUM(StratPlay.bplo).alias('sum_bplo'), - fn.SUM(StratPlay.wpa).alias('sum_wpa'), - fn.SUM(StratPlay.re24).alias('sum_re24'), - fn.COUNT(StratPlay.on_first_final).filter( - StratPlay.on_first_final.is_null(False) & (StratPlay.on_first_final != 4)).alias('count_lo1'), - fn.COUNT(StratPlay.on_second_final).filter( - StratPlay.on_second_final.is_null(False) & (StratPlay.on_second_final != 4)).alias('count_lo2'), - fn.COUNT(StratPlay.on_third_final).filter( - StratPlay.on_third_final.is_null(False) & (StratPlay.on_third_final != 4)).alias('count_lo3'), - fn.COUNT(StratPlay.on_first).filter(StratPlay.on_first.is_null(False)).alias('count_runner1'), - fn.COUNT(StratPlay.on_second).filter(StratPlay.on_second.is_null(False)).alias('count_runner2'), - fn.COUNT(StratPlay.on_third).filter(StratPlay.on_third.is_null(False)).alias('count_runner3'), - fn.COUNT(StratPlay.on_first_final).filter( - StratPlay.on_first_final.is_null(False) & (StratPlay.on_first_final != 4) & - (StratPlay.starting_outs + StratPlay.outs == 3)).alias('count_lo1_3out'), - fn.COUNT(StratPlay.on_second_final).filter( - StratPlay.on_second_final.is_null(False) & (StratPlay.on_second_final != 4) & - (StratPlay.starting_outs + StratPlay.outs == 3)).alias('count_lo2_3out'), - fn.COUNT(StratPlay.on_third_final).filter( - StratPlay.on_third_final.is_null(False) & (StratPlay.on_third_final != 4) & - (StratPlay.starting_outs + StratPlay.outs == 3)).alias('count_lo3_3out') + fn.SUM(StratPlay.pa).alias("sum_pa"), + fn.SUM(StratPlay.ab).alias("sum_ab"), + fn.SUM(StratPlay.run).alias("sum_run"), + fn.SUM(StratPlay.hit).alias("sum_hit"), + fn.SUM(StratPlay.rbi).alias("sum_rbi"), + fn.SUM(StratPlay.double).alias("sum_double"), + fn.SUM(StratPlay.triple).alias("sum_triple"), + fn.SUM(StratPlay.homerun).alias("sum_hr"), + fn.SUM(StratPlay.bb).alias("sum_bb"), + fn.SUM(StratPlay.so).alias("sum_so"), + fn.SUM(StratPlay.hbp).alias("sum_hbp"), + fn.SUM(StratPlay.sac).alias("sum_sac"), + fn.SUM(StratPlay.ibb).alias("sum_ibb"), + fn.SUM(StratPlay.gidp).alias("sum_gidp"), + fn.SUM(StratPlay.sb).alias("sum_sb"), + fn.SUM(StratPlay.cs).alias("sum_cs"), + fn.SUM(StratPlay.bphr).alias("sum_bphr"), + fn.SUM(StratPlay.bpfo).alias("sum_bpfo"), + fn.SUM(StratPlay.bp1b).alias("sum_bp1b"), + fn.SUM(StratPlay.bplo).alias("sum_bplo"), + fn.SUM(StratPlay.wpa).alias("sum_wpa"), + fn.SUM(StratPlay.re24).alias("sum_re24"), + fn.COUNT(StratPlay.on_first_final) + .filter( + StratPlay.on_first_final.is_null(False) & (StratPlay.on_first_final != 4) + ) + .alias("count_lo1"), + fn.COUNT(StratPlay.on_second_final) + .filter( + StratPlay.on_second_final.is_null(False) & (StratPlay.on_second_final != 4) + ) + .alias("count_lo2"), + fn.COUNT(StratPlay.on_third_final) + .filter( + StratPlay.on_third_final.is_null(False) & (StratPlay.on_third_final != 4) + ) + .alias("count_lo3"), + fn.COUNT(StratPlay.on_first) + .filter(StratPlay.on_first.is_null(False)) + .alias("count_runner1"), + fn.COUNT(StratPlay.on_second) + .filter(StratPlay.on_second.is_null(False)) + .alias("count_runner2"), + fn.COUNT(StratPlay.on_third) + .filter(StratPlay.on_third.is_null(False)) + .alias("count_runner3"), + fn.COUNT(StratPlay.on_first_final) + .filter( + StratPlay.on_first_final.is_null(False) + & (StratPlay.on_first_final != 4) + & (StratPlay.starting_outs + StratPlay.outs == 3) + ) + .alias("count_lo1_3out"), + fn.COUNT(StratPlay.on_second_final) + .filter( + StratPlay.on_second_final.is_null(False) + & (StratPlay.on_second_final != 4) + & (StratPlay.starting_outs + StratPlay.outs == 3) + ) + .alias("count_lo2_3out"), + fn.COUNT(StratPlay.on_third_final) + .filter( + StratPlay.on_third_final.is_null(False) + & (StratPlay.on_third_final != 4) + & (StratPlay.starting_outs + StratPlay.outs == 3) + ) + .alias("count_lo3_3out"), ] # Add non-aggregated fields based on grouping type - if group_by in ['player', 'playerteam', 'playergame', 'playergtype', 'playerteamgtype']: + if group_by in [ + "player", + "playerteam", + "playergame", + "playergtype", + "playerteamgtype", + ]: base_select_fields.insert(0, StratPlay.batter) - if group_by in ['team', 'playerteam', 'teamgame', 'playerteamgtype']: + if group_by in ["team", "playerteam", "teamgame", "playerteamgtype"]: base_select_fields.append(StratPlay.batter_team) - if group_by in ['playergame', 'teamgame']: + if group_by in ["playergame", "teamgame"]: base_select_fields.append(StratPlay.game) bat_plays = ( - StratPlay - .select(*base_select_fields) - .where((StratPlay.game << season_games) & (StratPlay.batter.is_null(False))) - .having(fn.SUM(StratPlay.pa) >= min_pa) + StratPlay.select(*base_select_fields) + .where((StratPlay.game << season_games) & (StratPlay.batter.is_null(False))) + .having(fn.SUM(StratPlay.pa) >= min_pa) ) # Build run_plays SELECT fields conditionally run_select_fields = [ - fn.SUM(StratPlay.sb).alias('sum_sb'), - fn.SUM(StratPlay.cs).alias('sum_cs'), - fn.SUM(StratPlay.pick_off).alias('sum_pick'), - fn.SUM(StratPlay.wpa).alias('sum_wpa'), - fn.SUM(StratPlay.re24).alias('sum_re24') + fn.SUM(StratPlay.sb).alias("sum_sb"), + fn.SUM(StratPlay.cs).alias("sum_cs"), + fn.SUM(StratPlay.pick_off).alias("sum_pick"), + fn.SUM(StratPlay.wpa).alias("sum_wpa"), + fn.SUM(StratPlay.re24).alias("sum_re24"), ] - if group_by in ['player', 'playerteam', 'playergame', 'playergtype', 'playerteamgtype']: + if group_by in [ + "player", + "playerteam", + "playergame", + "playergtype", + "playerteamgtype", + ]: run_select_fields.insert(0, StratPlay.runner) - if group_by in ['team', 'playerteam', 'teamgame', 'playerteamgtype']: + if group_by in ["team", "playerteam", "teamgame", "playerteamgtype"]: run_select_fields.append(StratPlay.runner_team) - if group_by in ['playergame', 'teamgame']: + if group_by in ["playergame", "teamgame"]: run_select_fields.append(StratPlay.game) - run_plays = ( - StratPlay - .select(*run_select_fields) - .where((StratPlay.game << season_games) & (StratPlay.runner.is_null(False))) + run_plays = StratPlay.select(*run_select_fields).where( + (StratPlay.game << season_games) & (StratPlay.runner.is_null(False)) ) if player_id is not None: @@ -435,7 +549,9 @@ async def get_batting_totals( if obc is not None: bat_plays = bat_plays.where(StratPlay.on_base_code << obc) if risp is not None: - bat_plays = bat_plays.where(StratPlay.on_base_code << ['100', '101', '110', '111', '010', '011']) + bat_plays = bat_plays.where( + StratPlay.on_base_code << ["100", "101", "110", "111", "010", "011"] + ) if inning is not None: bat_plays = bat_plays.where(StratPlay.inning_num << inning) if game_type is not None: @@ -444,71 +560,79 @@ async def get_batting_totals( bat_plays = bat_plays.where(StratPlay.game << all_games) run_plays = run_plays.where(StratPlay.game << all_games) if is_gauntlet is not None: - all_games = StratGame.select().where(fn.Lower(StratGame.game_type).contains('gauntlet')) + all_games = StratGame.select().where( + fn.Lower(StratGame.game_type).contains("gauntlet") + ) bat_plays = bat_plays.where(StratPlay.game << all_games) if group_by is not None: - if group_by == 'player': + if group_by == "player": bat_plays = bat_plays.group_by(StratPlay.batter) run_plays = run_plays.group_by(StratPlay.runner) - elif group_by == 'team': + elif group_by == "team": bat_plays = bat_plays.group_by(StratPlay.batter_team) run_plays = run_plays.group_by(StratPlay.runner_team) - elif group_by == 'playerteam': + elif group_by == "playerteam": bat_plays = bat_plays.group_by(StratPlay.batter, StratPlay.batter_team) run_plays = run_plays.group_by(StratPlay.runner, StratPlay.runner_team) - elif group_by == 'playergame': + elif group_by == "playergame": bat_plays = bat_plays.group_by(StratPlay.batter, StratPlay.game) run_plays = run_plays.group_by(StratPlay.runner, StratPlay.game) - elif group_by == 'teamgame': + elif group_by == "teamgame": bat_plays = bat_plays.group_by(StratPlay.batter_team, StratPlay.game) run_plays = run_plays.group_by(StratPlay.runner_team, StratPlay.game) - elif group_by == 'league': + elif group_by == "league": bat_plays = bat_plays.join(StratGame) bat_plays = bat_plays.group_by(StratPlay.game.season) run_plays = run_plays.join(StratGame) run_plays = run_plays.group_by(StratPlay.game.season) - elif group_by == 'gtype': + elif group_by == "gtype": bat_plays = bat_plays.join(StratGame) bat_plays = bat_plays.group_by(StratPlay.game.game_type) run_plays = run_plays.join(StratGame) run_plays = run_plays.group_by(StratPlay.game.game_type) - elif group_by == 'playergtype': + elif group_by == "playergtype": bat_plays = bat_plays.join(StratGame) bat_plays = bat_plays.group_by(StratPlay.batter, StratPlay.game.game_type) run_plays = run_plays.join(StratGame) run_plays = run_plays.group_by(StratPlay.runner, StratPlay.game.game_type) - elif group_by == 'playerteamgtype': + elif group_by == "playerteamgtype": bat_plays = bat_plays.join(StratGame) bat_plays = bat_plays.group_by( - StratPlay.batter, StratPlay.batter_team, StratPlay.game.game_type) + StratPlay.batter, StratPlay.batter_team, StratPlay.game.game_type + ) run_plays = run_plays.join(StratGame) run_plays = run_plays.group_by( - StratPlay.runner, StratPlay.runner_team, StratPlay.game.game_type) + StratPlay.runner, StratPlay.runner_team, StratPlay.game.game_type + ) if sort is not None: - if sort == 'player': + if sort == "player": bat_plays = bat_plays.order_by(StratPlay.batter) run_plays = run_plays.order_by(StratPlay.runner) - elif sort == 'team': + elif sort == "team": bat_plays = bat_plays.order_by(StratPlay.batter_team) run_plays = run_plays.order_by(StratPlay.runner_team) - elif sort == 'wpa-desc': - bat_plays = bat_plays.order_by(SQL('sum_wpa').desc()) - elif sort == 'wpa-asc': - bat_plays = bat_plays.order_by(SQL('sum_wpa').asc()) - elif sort == 'pa-desc': - bat_plays = bat_plays.order_by(SQL('sum_pa').desc()) - elif sort == 'pa-asc': - bat_plays = bat_plays.order_by(SQL('sum_pa').asc()) - elif sort == 're24-desc': - bat_plays = bat_plays.order_by(SQL('sum_re24').desc()) - elif sort == 're24-asc': - bat_plays = bat_plays.order_by(SQL('sum_re24').asc()) - elif sort == 'newest': - bat_plays = bat_plays.order_by(StratPlay.game_id.desc(), StratPlay.play_num.desc()) - run_plays = run_plays.order_by(StratPlay.game_id.desc(), StratPlay.play_num.desc()) - elif sort == 'oldest': + elif sort == "wpa-desc": + bat_plays = bat_plays.order_by(SQL("sum_wpa").desc()) + elif sort == "wpa-asc": + bat_plays = bat_plays.order_by(SQL("sum_wpa").asc()) + elif sort == "pa-desc": + bat_plays = bat_plays.order_by(SQL("sum_pa").desc()) + elif sort == "pa-asc": + bat_plays = bat_plays.order_by(SQL("sum_pa").asc()) + elif sort == "re24-desc": + bat_plays = bat_plays.order_by(SQL("sum_re24").desc()) + elif sort == "re24-asc": + bat_plays = bat_plays.order_by(SQL("sum_re24").asc()) + elif sort == "newest": + bat_plays = bat_plays.order_by( + StratPlay.game_id.desc(), StratPlay.play_num.desc() + ) + run_plays = run_plays.order_by( + StratPlay.game_id.desc(), StratPlay.play_num.desc() + ) + elif sort == "oldest": bat_plays = bat_plays.order_by(StratPlay.game_id, StratPlay.play_num) run_plays = run_plays.order_by(StratPlay.game_id, StratPlay.play_num) @@ -518,23 +642,20 @@ async def get_batting_totals( limit = 500 bat_plays = bat_plays.paginate(page_num, limit) - logging.debug(f'bat_plays query: {bat_plays}') - logging.debug(f'run_plays query: {run_plays}') + logging.debug(f"bat_plays query: {bat_plays}") + logging.debug(f"run_plays query: {run_plays}") - return_stats = { - 'count': bat_plays.count(), - 'stats': [] - } + return_stats = {"count": bat_plays.count(), "stats": []} for x in bat_plays: this_run = run_plays.order_by(StratPlay.id) - if 'player' in group_by: + if "player" in group_by: this_run = this_run.where(StratPlay.runner == x.batter) - if 'game' in group_by: + if "game" in group_by: this_run = this_run.where(StratPlay.game == x.game) - if 'team' in group_by: + if "team" in group_by: this_run = this_run.where(StratPlay.runner_team == x.batter_team) - if 'gtype' in group_by: + if "gtype" in group_by: this_run = this_run.where(StratPlay.game.game_type == x.game.game_type) if this_run.count() > 0: @@ -549,7 +670,9 @@ async def get_batting_totals( run_wpa = 0 run_re24 = 0 this_wpa = bat_plays.where( - (StratPlay.wpa >= min_wpa) & (StratPlay.wpa <= max_wpa) & (StratPlay.batter == x.batter) + (StratPlay.wpa >= min_wpa) + & (StratPlay.wpa <= max_wpa) + & (StratPlay.batter == x.batter) ) if this_wpa.count() > 0: sum_wpa = this_wpa[0].sum_wpa @@ -558,58 +681,80 @@ async def get_batting_totals( tot_ab = x.sum_ab if x.sum_ab > 0 else 1 obp = (x.sum_hit + x.sum_bb + x.sum_hbp + x.sum_ibb) / x.sum_pa - slg = (x.sum_hr * 4 + x.sum_triple * 3 + x.sum_double * 2 + - (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr)) / tot_ab + slg = ( + x.sum_hr * 4 + + x.sum_triple * 3 + + x.sum_double * 2 + + (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr) + ) / tot_ab - this_game = 'TOT' - if group_by in ['playergame', 'teamgame']: - this_game = x.game_id if short_output else model_to_dict(x.game, recurse=False) - elif 'gtype' in group_by: + this_game = "TOT" + if group_by in ["playergame", "teamgame"]: + this_game = ( + x.game_id if short_output else model_to_dict(x.game, recurse=False) + ) + elif "gtype" in group_by: this_game = x.game.game_type lob_all_rate, lob_2outs_rate, rbi_rate = 0, 0, 0 if x.count_runner1 + x.count_runner2 + x.count_runner3 > 0: - lob_all_rate = (x.count_lo1 + x.count_lo2 + x.count_lo3) / \ - (x.count_runner1 + x.count_runner2 + x.count_runner3) - rbi_rate = (x.sum_rbi - x.sum_hr) / (x.count_runner1 + x.count_runner2 + x.count_runner3) + lob_all_rate = (x.count_lo1 + x.count_lo2 + x.count_lo3) / ( + x.count_runner1 + x.count_runner2 + x.count_runner3 + ) + rbi_rate = (x.sum_rbi - x.sum_hr) / ( + x.count_runner1 + x.count_runner2 + x.count_runner3 + ) - return_stats['stats'].append({ - 'player': x.batter_id if short_output else model_to_dict(x.batter, recurse=True, max_depth=1), - 'team': x.batter_team_id if short_output else model_to_dict(x.batter_team, recurse=True, max_depth=1), - 'pa': x.sum_pa, - 'ab': x.sum_ab, - 'run': x.sum_run, - 'hit': x.sum_hit, - 'rbi': x.sum_rbi, - 'double': x.sum_double, - 'triple': x.sum_triple, - 'hr': x.sum_hr, - 'bb': x.sum_bb, - 'so': x.sum_so, - 'hbp': x.sum_hbp, - 'sac': x.sum_sac, - 'ibb': x.sum_ibb, - 'gidp': x.sum_gidp, - 'sb': sum_sb, - 'cs': sum_cs, - 'bphr': x.sum_bphr, - 'bpfo': x.sum_bpfo, - 'bp1b': x.sum_bp1b, - 'bplo': x.sum_bplo, - 'wpa': sum_wpa + run_wpa, - 're24': x.sum_re24 + run_re24, - 'avg': x.sum_hit / tot_ab, - 'obp': obp, - 'slg': slg, - 'ops': obp + slg, - 'woba': (.69 * x.sum_bb + .72 * x.sum_hbp + .89 * (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr) + - 1.27 * x.sum_double + 1.62 * x.sum_triple + 2.1 * x.sum_hr) / max(x.sum_pa - x.sum_ibb, 1), - 'game': this_game, - 'lob_all': x.count_lo1 + x.count_lo2 + x.count_lo3, - 'lob_all_rate': lob_all_rate, - 'lob_2outs': x.count_lo1_3out + x.count_lo2_3out + x.count_lo3_3out, - 'rbi%': rbi_rate - }) + return_stats["stats"].append( + { + "player": x.batter_id + if short_output + else model_to_dict(x.batter, recurse=True, max_depth=1), + "team": x.batter_team_id + if short_output + else model_to_dict(x.batter_team, recurse=True, max_depth=1), + "pa": x.sum_pa, + "ab": x.sum_ab, + "run": x.sum_run, + "hit": x.sum_hit, + "rbi": x.sum_rbi, + "double": x.sum_double, + "triple": x.sum_triple, + "hr": x.sum_hr, + "bb": x.sum_bb, + "so": x.sum_so, + "hbp": x.sum_hbp, + "sac": x.sum_sac, + "ibb": x.sum_ibb, + "gidp": x.sum_gidp, + "sb": sum_sb, + "cs": sum_cs, + "bphr": x.sum_bphr, + "bpfo": x.sum_bpfo, + "bp1b": x.sum_bp1b, + "bplo": x.sum_bplo, + "wpa": sum_wpa + run_wpa, + "re24": x.sum_re24 + run_re24, + "avg": x.sum_hit / tot_ab, + "obp": obp, + "slg": slg, + "ops": obp + slg, + "woba": ( + 0.69 * x.sum_bb + + 0.72 * x.sum_hbp + + 0.89 * (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr) + + 1.27 * x.sum_double + + 1.62 * x.sum_triple + + 2.1 * x.sum_hr + ) + / max(x.sum_pa - x.sum_ibb, 1), + "game": this_game, + "lob_all": x.count_lo1 + x.count_lo2 + x.count_lo3, + "lob_all_rate": lob_all_rate, + "lob_2outs": x.count_lo1_3out + x.count_lo2_3out + x.count_lo3_3out, + "rbi%": rbi_rate, + } + ) # if group_by == 're24-desc': # return_stats['stats'].sort(key=lambda x: x['re24'], reverse=True) @@ -617,135 +762,198 @@ async def get_batting_totals( # return_stats['stats'].sort(key=lambda x: x['re24']) if csv: - return_vals = return_stats['stats'] + return_vals = return_stats["stats"] if len(return_vals) == 0: - return Response(content=pd.DataFrame().to_csv(index=False), media_type='text/csv') + return Response( + content=pd.DataFrame().to_csv(index=False), media_type="text/csv" + ) for x in return_vals: - x['player_id'] = x['player']['player_id'] - x['player_name'] = x['player']['p_name'] - x['player_cardset'] = x['player']['cardset']['name'] - x['team_id'] = x['team']['id'] - x['team_abbrev'] = x['team']['abbrev'] - if 'id' in x['game']: - x['game_id'] = x['game']['id'] - if 'game_type' in x['game']: - x['game_type'] = x['game']['game_type'] - del x['game'] - del x['player'], x['team'] + x["player_id"] = x["player"]["player_id"] + x["player_name"] = x["player"]["p_name"] + x["player_cardset"] = x["player"]["cardset"]["name"] + x["team_id"] = x["team"]["id"] + x["team_abbrev"] = x["team"]["abbrev"] + if "id" in x["game"]: + x["game_id"] = x["game"]["id"] + if "game_type" in x["game"]: + x["game_type"] = x["game"]["game_type"] + del x["game"] + del x["player"], x["team"] output = pd.DataFrame(return_vals) - first = ['player_id', 'player_name', 'player_cardset', 'team_id', 'team_abbrev'] - exclude = first + ['lob_all', 'lob_all_rate', 'lob_2outs', 'rbi%'] + first = ["player_id", "player_name", "player_cardset", "team_id", "team_abbrev"] + exclude = first + ["lob_all", "lob_all_rate", "lob_2outs", "rbi%"] output = output[first + [col for col in output.columns if col not in exclude]] db.close() - return Response(content=pd.DataFrame(output).to_csv(index=False), media_type='text/csv') + return Response( + content=pd.DataFrame(output).to_csv(index=False), media_type="text/csv" + ) db.close() return return_stats -@router.get('/pitching') +@router.get("/pitching") async def get_pitching_totals( - season: list = Query(default=None), week: list = Query(default=None), - s_type: Literal['regular', 'post', 'total', None] = None, player_id: list = Query(default=None), - group_by: Literal[ - 'team', 'player', 'playerteam', 'playergame', 'teamgame', 'league', 'gmtype', 'playergtype', - 'playerteamgtype'] = 'player', is_gauntlet: Optional[bool] = None, - min_pa: Optional[int] = 1, team_id: list = Query(default=None), manager_id: list = Query(default=None), - obc: list = Query(default=None), risp: Optional[bool] = None, inning: list = Query(default=None), - page_num: Optional[int] = 1, game_type: list = Query(default=None), sort: Optional[str] = 'ip-desc', - limit: Optional[int] = 500, short_output: Optional[bool] = False, csv: Optional[bool] = False): + season: list = Query(default=None), + week: list = Query(default=None), + s_type: Literal["regular", "post", "total", None] = None, + player_id: list = Query(default=None), + group_by: Literal[ + "team", + "player", + "playerteam", + "playergame", + "teamgame", + "league", + "gmtype", + "playergtype", + "playerteamgtype", + ] = "player", + is_gauntlet: Optional[bool] = None, + min_pa: Optional[int] = 1, + team_id: list = Query(default=None), + manager_id: list = Query(default=None), + obc: list = Query(default=None), + risp: Optional[bool] = None, + inning: list = Query(default=None), + page_num: Optional[int] = 1, + game_type: list = Query(default=None), + sort: Optional[str] = "ip-desc", + limit: Optional[int] = 500, + short_output: Optional[bool] = False, + csv: Optional[bool] = False, +): season_games = StratGame.select() if season is not None: season_games = season_games.where(StratGame.season << season) if week is not None and s_type is not None: - raise HTTPException(status_code=400, detail=f'Week and s_type parameters cannot be used in the same query') + raise HTTPException( + status_code=400, + detail=f"Week and s_type parameters cannot be used in the same query", + ) if week is not None: season_games = season_games.where(StratGame.week << week) if s_type is not None: - if s_type == 'regular': + if s_type == "regular": season_games = season_games.where(StratGame.week <= 18) - elif s_type == 'post': + elif s_type == "post": season_games = season_games.where(StratGame.week > 18) if manager_id is not None: season_games = season_games.where( - (StratGame.away_manager_id << manager_id) | (StratGame.home_manager_id << manager_id) + (StratGame.away_manager_id << manager_id) + | (StratGame.home_manager_id << manager_id) ) # Build SELECT fields conditionally based on group_by to satisfy PostgreSQL's # strict GROUP BY requirement (all non-aggregated SELECT fields must be in GROUP BY) pit_select_fields = [ - fn.SUM(StratPlay.pa).alias('sum_pa'), - fn.SUM(StratPlay.ab).alias('sum_ab'), - fn.SUM(StratPlay.run).alias('sum_run'), - fn.SUM(StratPlay.hit).alias('sum_hit'), - fn.SUM(StratPlay.rbi).alias('sum_rbi'), - fn.SUM(StratPlay.double).alias('sum_double'), - fn.SUM(StratPlay.triple).alias('sum_triple'), - fn.SUM(StratPlay.homerun).alias('sum_hr'), - fn.SUM(StratPlay.bb).alias('sum_bb'), - fn.SUM(StratPlay.so).alias('sum_so'), - fn.SUM(StratPlay.wpa).alias('sum_wpa'), - fn.SUM(StratPlay.hbp).alias('sum_hbp'), - fn.SUM(StratPlay.sac).alias('sum_sac'), - fn.SUM(StratPlay.ibb).alias('sum_ibb'), - fn.SUM(StratPlay.gidp).alias('sum_gidp'), - fn.SUM(StratPlay.sb).alias('sum_sb'), - fn.SUM(StratPlay.cs).alias('sum_cs'), - fn.SUM(StratPlay.bphr).alias('sum_bphr'), - fn.SUM(StratPlay.bpfo).alias('sum_bpfo'), - fn.SUM(StratPlay.bp1b).alias('sum_bp1b'), - fn.SUM(StratPlay.bplo).alias('sum_bplo'), - fn.SUM(StratPlay.wild_pitch).alias('sum_wp'), - fn.SUM(StratPlay.balk).alias('sum_balk'), - fn.SUM(StratPlay.outs).alias('sum_outs'), - fn.SUM(StratPlay.e_run).alias('sum_erun'), - fn.SUM(StratPlay.re24).alias('sum_re24'), - fn.COUNT(StratPlay.on_first_final).filter( - StratPlay.on_first_final.is_null(False) & (StratPlay.on_first_final != 4)).alias('count_lo1'), - fn.COUNT(StratPlay.on_second_final).filter( - StratPlay.on_second_final.is_null(False) & (StratPlay.on_second_final != 4)).alias('count_lo2'), - fn.COUNT(StratPlay.on_third_final).filter( - StratPlay.on_third_final.is_null(False) & (StratPlay.on_third_final != 4)).alias('count_lo3'), - fn.COUNT(StratPlay.on_first).filter(StratPlay.on_first.is_null(False)).alias('count_runner1'), - fn.COUNT(StratPlay.on_second).filter(StratPlay.on_second.is_null(False)).alias('count_runner2'), - fn.COUNT(StratPlay.on_third).filter(StratPlay.on_third.is_null(False)).alias('count_runner3'), - fn.COUNT(StratPlay.on_first_final).filter( - StratPlay.on_first_final.is_null(False) & (StratPlay.on_first_final != 4) & - (StratPlay.starting_outs + StratPlay.outs == 3)).alias('count_lo1_3out'), - fn.COUNT(StratPlay.on_second_final).filter( - StratPlay.on_second_final.is_null(False) & (StratPlay.on_second_final != 4) & - (StratPlay.starting_outs + StratPlay.outs == 3)).alias('count_lo2_3out'), - fn.COUNT(StratPlay.on_third_final).filter( - StratPlay.on_third_final.is_null(False) & (StratPlay.on_third_final != 4) & - (StratPlay.starting_outs + StratPlay.outs == 3)).alias('count_lo3_3out') + fn.SUM(StratPlay.pa).alias("sum_pa"), + fn.SUM(StratPlay.ab).alias("sum_ab"), + fn.SUM(StratPlay.run).alias("sum_run"), + fn.SUM(StratPlay.hit).alias("sum_hit"), + fn.SUM(StratPlay.rbi).alias("sum_rbi"), + fn.SUM(StratPlay.double).alias("sum_double"), + fn.SUM(StratPlay.triple).alias("sum_triple"), + fn.SUM(StratPlay.homerun).alias("sum_hr"), + fn.SUM(StratPlay.bb).alias("sum_bb"), + fn.SUM(StratPlay.so).alias("sum_so"), + fn.SUM(StratPlay.wpa).alias("sum_wpa"), + fn.SUM(StratPlay.hbp).alias("sum_hbp"), + fn.SUM(StratPlay.sac).alias("sum_sac"), + fn.SUM(StratPlay.ibb).alias("sum_ibb"), + fn.SUM(StratPlay.gidp).alias("sum_gidp"), + fn.SUM(StratPlay.sb).alias("sum_sb"), + fn.SUM(StratPlay.cs).alias("sum_cs"), + fn.SUM(StratPlay.bphr).alias("sum_bphr"), + fn.SUM(StratPlay.bpfo).alias("sum_bpfo"), + fn.SUM(StratPlay.bp1b).alias("sum_bp1b"), + fn.SUM(StratPlay.bplo).alias("sum_bplo"), + fn.SUM(StratPlay.wild_pitch).alias("sum_wp"), + fn.SUM(StratPlay.balk).alias("sum_balk"), + fn.SUM(StratPlay.outs).alias("sum_outs"), + fn.SUM(StratPlay.e_run).alias("sum_erun"), + fn.SUM(StratPlay.re24).alias("sum_re24"), + fn.COUNT(StratPlay.on_first_final) + .filter( + StratPlay.on_first_final.is_null(False) & (StratPlay.on_first_final != 4) + ) + .alias("count_lo1"), + fn.COUNT(StratPlay.on_second_final) + .filter( + StratPlay.on_second_final.is_null(False) & (StratPlay.on_second_final != 4) + ) + .alias("count_lo2"), + fn.COUNT(StratPlay.on_third_final) + .filter( + StratPlay.on_third_final.is_null(False) & (StratPlay.on_third_final != 4) + ) + .alias("count_lo3"), + fn.COUNT(StratPlay.on_first) + .filter(StratPlay.on_first.is_null(False)) + .alias("count_runner1"), + fn.COUNT(StratPlay.on_second) + .filter(StratPlay.on_second.is_null(False)) + .alias("count_runner2"), + fn.COUNT(StratPlay.on_third) + .filter(StratPlay.on_third.is_null(False)) + .alias("count_runner3"), + fn.COUNT(StratPlay.on_first_final) + .filter( + StratPlay.on_first_final.is_null(False) + & (StratPlay.on_first_final != 4) + & (StratPlay.starting_outs + StratPlay.outs == 3) + ) + .alias("count_lo1_3out"), + fn.COUNT(StratPlay.on_second_final) + .filter( + StratPlay.on_second_final.is_null(False) + & (StratPlay.on_second_final != 4) + & (StratPlay.starting_outs + StratPlay.outs == 3) + ) + .alias("count_lo2_3out"), + fn.COUNT(StratPlay.on_third_final) + .filter( + StratPlay.on_third_final.is_null(False) + & (StratPlay.on_third_final != 4) + & (StratPlay.starting_outs + StratPlay.outs == 3) + ) + .alias("count_lo3_3out"), ] # Add non-aggregated fields based on grouping type - if group_by in ['player', 'playerteam', 'playergame', 'playergtype', 'playerteamgtype']: + if group_by in [ + "player", + "playerteam", + "playergame", + "playergtype", + "playerteamgtype", + ]: pit_select_fields.insert(0, StratPlay.pitcher) - if group_by in ['team', 'playerteam', 'teamgame', 'playerteamgtype']: + if group_by in ["team", "playerteam", "teamgame", "playerteamgtype"]: pit_select_fields.append(StratPlay.pitcher_team) - if group_by in ['playergame', 'teamgame']: + if group_by in ["playergame", "teamgame"]: pit_select_fields.append(StratPlay.game) pit_plays = ( - StratPlay - .select(*pit_select_fields) - .where((StratPlay.game << season_games) & (StratPlay.pitcher.is_null(False))) - .having(fn.SUM(StratPlay.pa) >= min_pa) - ) - all_dec = ( - Decision - .select(Decision.pitcher, fn.SUM(Decision.win).alias('sum_win'), fn.SUM(Decision.loss).alias('sum_loss'), - fn.SUM(Decision.hold).alias('sum_hold'), fn.SUM(Decision.is_save).alias('sum_save'), - fn.SUM(Decision.b_save).alias('sum_b_save'), fn.SUM(Decision.irunners).alias('sum_irunners'), - fn.SUM(Decision.irunners_scored).alias('sum_irun_scored'), - fn.SUM(Decision.is_start).alias('sum_gs'), fn.COUNT(Decision.game).alias('sum_game')) - .where(Decision.game << season_games) + StratPlay.select(*pit_select_fields) + .where((StratPlay.game << season_games) & (StratPlay.pitcher.is_null(False))) + .having(fn.SUM(StratPlay.pa) >= min_pa) ) + all_dec = Decision.select( + Decision.pitcher, + fn.SUM(Decision.win).alias("sum_win"), + fn.SUM(Decision.loss).alias("sum_loss"), + fn.SUM(Decision.hold).alias("sum_hold"), + fn.SUM(Decision.is_save).alias("sum_save"), + fn.SUM(Decision.b_save).alias("sum_b_save"), + fn.SUM(Decision.irunners).alias("sum_irunners"), + fn.SUM(Decision.irunners_scored).alias("sum_irun_scored"), + fn.SUM(Decision.is_start).alias("sum_gs"), + fn.COUNT(Decision.game).alias("sum_game"), + ).where(Decision.game << season_games) if player_id is not None: all_players = Player.select().where(Player.player_id << player_id) @@ -758,7 +966,9 @@ async def get_pitching_totals( if obc is not None: pit_plays = pit_plays.where(StratPlay.on_base_code << obc) if risp is not None: - pit_plays = pit_plays.where(StratPlay.on_base_code << ['100', '101', '110', '111', '010', '011']) + pit_plays = pit_plays.where( + StratPlay.on_base_code << ["100", "101", "110", "111", "010", "011"] + ) if inning is not None: pit_plays = pit_plays.where(StratPlay.inning_num << inning) if game_type is not None: @@ -767,52 +977,60 @@ async def get_pitching_totals( pit_plays = pit_plays.where(StratPlay.game << all_games) if group_by is not None: - if group_by == 'player': + if group_by == "player": pit_plays = pit_plays.group_by(StratPlay.pitcher) - elif group_by == 'team': + elif group_by == "team": pit_plays = pit_plays.group_by(StratPlay.pitcher_team) - elif group_by == 'playerteam': + elif group_by == "playerteam": pit_plays = pit_plays.group_by(StratPlay.pitcher, StratPlay.pitcher_team) - elif group_by == 'playergame': + elif group_by == "playergame": pit_plays = pit_plays.group_by(StratPlay.pitcher, StratPlay.game) - elif group_by == 'teamgame': + elif group_by == "teamgame": pit_plays = pit_plays.group_by(StratPlay.pitcher_team, StratPlay.game) - elif group_by == 'league': + elif group_by == "league": pit_plays = pit_plays.join(StratGame) pit_plays = pit_plays.group_by(StratPlay.game.season) - elif group_by == 'gtype': + elif group_by == "gtype": pit_plays = pit_plays.join(StratGame) pit_plays = pit_plays.group_by(StratPlay.game.game_type) - elif group_by == 'playergtype': + elif group_by == "playergtype": pit_plays = pit_plays.join(StratGame) pit_plays = pit_plays.group_by(StratPlay.pitcher, StratPlay.game.game_type) - elif group_by == 'playerteamgtype': + elif group_by == "playerteamgtype": pit_plays = pit_plays.join(StratGame) - pit_plays = pit_plays.group_by(StratPlay.pitcher, StratPlay.pitcher_team, StratPlay.game.game_type) + pit_plays = pit_plays.group_by( + StratPlay.pitcher, StratPlay.pitcher_team, StratPlay.game.game_type + ) if sort is not None: - if sort == 'player': + if sort == "player": pit_plays = pit_plays.order_by(StratPlay.pitcher) - elif sort == 'team': + elif sort == "team": pit_plays = pit_plays.order_by(StratPlay.pitcher_team) - elif sort == 'wpa-desc': - pit_plays = pit_plays.order_by(SQL('sum_wpa').asc()) # functions seem reversed since pitcher plays negative - elif sort == 'wpa-asc': - pit_plays = pit_plays.order_by(SQL('sum_wpa').desc()) - elif sort == 're24-desc': - pit_plays = pit_plays.order_by(SQL('sum_re24').asc()) # functions seem reversed since pitcher plays negative - elif sort == 're24-asc': - pit_plays = pit_plays.order_by(SQL('sum_re24').desc()) - elif sort == 'ip-desc': - pit_plays = pit_plays.order_by(SQL('sum_outs').desc()) - elif sort == 'ip-asc': - pit_plays = pit_plays.order_by(SQL('sum_outs').asc()) - elif sort == 'game-desc': - pit_plays = pit_plays.order_by(SQL('sum_game').desc()) - elif sort == 'game-asc': - pit_plays = pit_plays.order_by(SQL('sum_game').asc()) - elif sort == 'newest': - pit_plays = pit_plays.order_by(StratPlay.game_id.desc(), StratPlay.play_num.desc()) - elif sort == 'oldest': + elif sort == "wpa-desc": + pit_plays = pit_plays.order_by( + SQL("sum_wpa").asc() + ) # functions seem reversed since pitcher plays negative + elif sort == "wpa-asc": + pit_plays = pit_plays.order_by(SQL("sum_wpa").desc()) + elif sort == "re24-desc": + pit_plays = pit_plays.order_by( + SQL("sum_re24").asc() + ) # functions seem reversed since pitcher plays negative + elif sort == "re24-asc": + pit_plays = pit_plays.order_by(SQL("sum_re24").desc()) + elif sort == "ip-desc": + pit_plays = pit_plays.order_by(SQL("sum_outs").desc()) + elif sort == "ip-asc": + pit_plays = pit_plays.order_by(SQL("sum_outs").asc()) + elif sort == "game-desc": + pit_plays = pit_plays.order_by(SQL("sum_game").desc()) + elif sort == "game-asc": + pit_plays = pit_plays.order_by(SQL("sum_game").asc()) + elif sort == "newest": + pit_plays = pit_plays.order_by( + StratPlay.game_id.desc(), StratPlay.play_num.desc() + ) + elif sort == "oldest": pit_plays = pit_plays.order_by(StratPlay.game_id, StratPlay.play_num) if limit < 1: @@ -821,126 +1039,156 @@ async def get_pitching_totals( limit = 500 pit_plays = pit_plays.paginate(page_num, limit) - return_stats = { - 'count': pit_plays.count(), - 'stats': [] - } + return_stats = {"count": pit_plays.count(), "stats": []} for x in pit_plays: this_dec = all_dec.where(Decision.pitcher == x.pitcher) if game_type is not None: all_types = [x.lower() for x in game_type] - all_games = StratGame.select().where(fn.Lower(StratGame.game_type) << all_types) + all_games = StratGame.select().where( + fn.Lower(StratGame.game_type) << all_types + ) this_dec = this_dec.where(Decision.game << all_games) tot_outs = x.sum_outs if x.sum_outs > 0 else 1 obp = (x.sum_hit + x.sum_bb + x.sum_hbp + x.sum_ibb) / x.sum_pa - slg = (x.sum_hr * 4 + x.sum_triple * 3 + x.sum_double * 2 + - (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr)) / max(x.sum_ab, 1) + slg = ( + x.sum_hr * 4 + + x.sum_triple * 3 + + x.sum_double * 2 + + (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr) + ) / max(x.sum_ab, 1) tot_bb = 0.1 if x.sum_bb == 0 else x.sum_bb - this_game = 'TOT' - if group_by in ['playergame', 'teamgame']: - this_game = x.game_id if short_output else model_to_dict(x.game, recurse=False) - this_dec = all_dec.where((Decision.pitcher == x.pitcher) & (Decision.game == x.game)) - elif 'gtype' in group_by: + this_game = "TOT" + if group_by in ["playergame", "teamgame"]: + this_game = ( + x.game_id if short_output else model_to_dict(x.game, recurse=False) + ) + this_dec = all_dec.where( + (Decision.pitcher == x.pitcher) & (Decision.game == x.game) + ) + elif "gtype" in group_by: this_game = x.game.game_type lob_all_rate, lob_2outs_rate, rbi_rate = 0, 0, 0 if x.count_runner1 + x.count_runner2 + x.count_runner3 > 0: - lob_all_rate = (x.count_lo1 + x.count_lo2 + x.count_lo3) / \ - (x.count_runner1 + x.count_runner2 + x.count_runner3) - rbi_rate = (x.sum_rbi - x.sum_hr) / (x.count_runner1 + x.count_runner2 + x.count_runner3) + lob_all_rate = (x.count_lo1 + x.count_lo2 + x.count_lo3) / ( + x.count_runner1 + x.count_runner2 + x.count_runner3 + ) + rbi_rate = (x.sum_rbi - x.sum_hr) / ( + x.count_runner1 + x.count_runner2 + x.count_runner3 + ) - return_stats['stats'].append({ - 'player': x.pitcher_id if short_output else model_to_dict(x.pitcher), - 'team': x.pitcher_team_id if short_output else model_to_dict(x.pitcher_team), - 'tbf': x.sum_pa, - 'outs': x.sum_outs, - 'games': this_dec[0].sum_game, - 'gs': this_dec[0].sum_gs, - 'win': this_dec[0].sum_win, - 'loss': this_dec[0].sum_loss, - 'hold': this_dec[0].sum_hold, - 'save': this_dec[0].sum_save, - 'bsave': this_dec[0].sum_b_save, - 'ir': this_dec[0].sum_irunners, - 'ir_sc': this_dec[0].sum_irun_scored, - 'ab': x.sum_ab, - 'run': x.sum_run, - 'e_run': x.sum_erun, - 'hits': x.sum_hit, - 'double': x.sum_double, - 'triple': x.sum_triple, - 'hr': x.sum_hr, - 'bb': x.sum_bb, - 'so': x.sum_so, - 'hbp': x.sum_hbp, - 'sac': x.sum_sac, - 'ibb': x.sum_ibb, - 'gidp': x.sum_gidp, - 'sb': x.sum_sb, - 'cs': x.sum_cs, - 'bphr': x.sum_bphr, - 'bpfo': x.sum_bpfo, - 'bp1b': x.sum_bp1b, - 'bplo': x.sum_bplo, - 'wp': x.sum_wp, - 'balk': x.sum_balk, - 'wpa': x.sum_wpa * -1, - 're24': x.sum_re24 * -1, - 'era': (x.sum_erun * 27) / tot_outs, - 'whip': ((x.sum_bb + x.sum_hit + x.sum_ibb) * 3) / tot_outs, - 'avg': x.sum_hit / max(x.sum_ab, 1), - 'obp': obp, - 'slg': slg, - 'ops': obp + slg, - 'woba': (.69 * x.sum_bb + .72 * x.sum_hbp + .89 * (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr) + - 1.27 * x.sum_double + 1.62 * x.sum_triple + 2.1 * x.sum_hr) / max(x.sum_pa - x.sum_ibb, 1), - 'k/9': x.sum_so * 9 / (tot_outs / 3), - 'bb/9': x.sum_bb * 9 / (tot_outs / 3), - 'k/bb': x.sum_so / tot_bb, - 'game': this_game, - 'lob_2outs': x.count_lo1_3out + x.count_lo2_3out + x.count_lo3_3out, - 'rbi%': rbi_rate - }) + return_stats["stats"].append( + { + "player": x.pitcher_id if short_output else model_to_dict(x.pitcher), + "team": x.pitcher_team_id + if short_output + else model_to_dict(x.pitcher_team), + "tbf": x.sum_pa, + "outs": x.sum_outs, + "games": this_dec[0].sum_game, + "gs": this_dec[0].sum_gs, + "win": this_dec[0].sum_win, + "loss": this_dec[0].sum_loss, + "hold": this_dec[0].sum_hold, + "save": this_dec[0].sum_save, + "bsave": this_dec[0].sum_b_save, + "ir": this_dec[0].sum_irunners, + "ir_sc": this_dec[0].sum_irun_scored, + "ab": x.sum_ab, + "run": x.sum_run, + "e_run": x.sum_erun, + "hits": x.sum_hit, + "double": x.sum_double, + "triple": x.sum_triple, + "hr": x.sum_hr, + "bb": x.sum_bb, + "so": x.sum_so, + "hbp": x.sum_hbp, + "sac": x.sum_sac, + "ibb": x.sum_ibb, + "gidp": x.sum_gidp, + "sb": x.sum_sb, + "cs": x.sum_cs, + "bphr": x.sum_bphr, + "bpfo": x.sum_bpfo, + "bp1b": x.sum_bp1b, + "bplo": x.sum_bplo, + "wp": x.sum_wp, + "balk": x.sum_balk, + "wpa": x.sum_wpa * -1, + "re24": x.sum_re24 * -1, + "era": (x.sum_erun * 27) / tot_outs, + "whip": ((x.sum_bb + x.sum_hit + x.sum_ibb) * 3) / tot_outs, + "avg": x.sum_hit / max(x.sum_ab, 1), + "obp": obp, + "slg": slg, + "ops": obp + slg, + "woba": ( + 0.69 * x.sum_bb + + 0.72 * x.sum_hbp + + 0.89 * (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr) + + 1.27 * x.sum_double + + 1.62 * x.sum_triple + + 2.1 * x.sum_hr + ) + / max(x.sum_pa - x.sum_ibb, 1), + "k/9": x.sum_so * 9 / (tot_outs / 3), + "bb/9": x.sum_bb * 9 / (tot_outs / 3), + "k/bb": x.sum_so / tot_bb, + "game": this_game, + "lob_2outs": x.count_lo1_3out + x.count_lo2_3out + x.count_lo3_3out, + "rbi%": rbi_rate, + } + ) db.close() if csv: - return_vals = return_stats['stats'] + return_vals = return_stats["stats"] if len(return_vals) == 0: - return Response(content=pd.DataFrame().to_csv(index=False), media_type='text/csv') + return Response( + content=pd.DataFrame().to_csv(index=False), media_type="text/csv" + ) for x in return_vals: - x['player_id'] = x['player']['player_id'] - x['player_name'] = x['player']['p_name'] - x['player_cardset'] = x['player']['cardset']['name'] - x['team_id'] = x['team']['id'] - x['team_abbrev'] = x['team']['abbrev'] - if 'id' in x['game']: - x['game_id'] = x['game']['id'] - if 'game_type' in x['game']: - x['game_type'] = x['game']['game_type'] - del x['game'] - del x['player'], x['team'] + x["player_id"] = x["player"]["player_id"] + x["player_name"] = x["player"]["p_name"] + x["player_cardset"] = x["player"]["cardset"]["name"] + x["team_id"] = x["team"]["id"] + x["team_abbrev"] = x["team"]["abbrev"] + if "id" in x["game"]: + x["game_id"] = x["game"]["id"] + if "game_type" in x["game"]: + x["game_type"] = x["game"]["game_type"] + del x["game"] + del x["player"], x["team"] output = pd.DataFrame(return_vals) - first = ['player_id', 'player_name', 'player_cardset', 'team_id', 'team_abbrev'] - exclude = first + ['lob_2outs', 'rbi%'] + first = ["player_id", "player_name", "player_cardset", "team_id", "team_abbrev"] + exclude = first + ["lob_2outs", "rbi%"] output = output[first + [col for col in output.columns if col not in exclude]] db.close() - return Response(content=pd.DataFrame(output).to_csv(index=False), media_type='text/csv') + return Response( + content=pd.DataFrame(output).to_csv(index=False), media_type="text/csv" + ) return return_stats -@router.get('/game-summary/{game_id}') +@router.get("/game-summary/{game_id}") async def get_game_summary( - game_id: int, csv: Optional[bool] = False, short_output: Optional[bool] = False, tp_max: Optional[int] = 1, poop_max: Optional[int] = 1): + game_id: int, + csv: Optional[bool] = False, + short_output: Optional[bool] = False, + tp_max: Optional[int] = 1, + poop_max: Optional[int] = 1, +): this_game = StratGame.get_or_none(StratGame.id == game_id) if this_game is None: db.close() - raise HTTPException(status_code=404, detail=f'Game {game_id} not found') + raise HTTPException(status_code=404, detail=f"Game {game_id} not found") game_plays = StratPlay.select().where(StratPlay.game_id == game_id) all_hits = game_plays.where(StratPlay.hit == 1) @@ -967,136 +1215,174 @@ async def get_game_summary( all_cs = steal_att.where(StratPlay.cs == 1) all_batters = ( - StratPlay - .select(StratPlay.batter, fn.SUM(StratPlay.re24).alias('sum_re24'), fn.SUM(StratPlay.ab).alias('sum_ab'), - fn.SUM(StratPlay.run).alias('sum_run'), - fn.SUM(StratPlay.hit).alias('sum_hit'), fn.SUM(StratPlay.rbi).alias('sum_rbi'), - fn.SUM(StratPlay.double).alias('sum_double'), fn.SUM(StratPlay.triple).alias('sum_triple'), - fn.SUM(StratPlay.homerun).alias('sum_hr'), fn.SUM(StratPlay.bphr).alias('sum_bphr')) - .where(StratPlay.game_id == game_id) - .group_by(StratPlay.batter, StratPlay.batter_team) + StratPlay.select( + StratPlay.batter, + fn.SUM(StratPlay.re24).alias("sum_re24"), + fn.SUM(StratPlay.ab).alias("sum_ab"), + fn.SUM(StratPlay.run).alias("sum_run"), + fn.SUM(StratPlay.hit).alias("sum_hit"), + fn.SUM(StratPlay.rbi).alias("sum_rbi"), + fn.SUM(StratPlay.double).alias("sum_double"), + fn.SUM(StratPlay.triple).alias("sum_triple"), + fn.SUM(StratPlay.homerun).alias("sum_hr"), + fn.SUM(StratPlay.bphr).alias("sum_bphr"), + ) + .where(StratPlay.game_id == game_id) + .group_by(StratPlay.batter, StratPlay.batter_team) ) all_pitchers = ( - StratPlay - .select(StratPlay.pitcher, fn.SUM(StratPlay.re24).alias('sum_re24'), fn.SUM(StratPlay.pa).alias('sum_pa'), - fn.SUM(StratPlay.outs).alias('sum_outs'), fn.SUM(StratPlay.e_run).alias('sum_erun'), - fn.SUM(StratPlay.run).alias('sum_run'), fn.SUM(StratPlay.so).alias('sum_so'), - fn.SUM(StratPlay.hit).alias('sum_hit')) - .where(StratPlay.game_id == game_id) - .group_by(StratPlay.pitcher, StratPlay.pitcher_team) + StratPlay.select( + StratPlay.pitcher, + fn.SUM(StratPlay.re24).alias("sum_re24"), + fn.SUM(StratPlay.pa).alias("sum_pa"), + fn.SUM(StratPlay.outs).alias("sum_outs"), + fn.SUM(StratPlay.e_run).alias("sum_erun"), + fn.SUM(StratPlay.run).alias("sum_run"), + fn.SUM(StratPlay.so).alias("sum_so"), + fn.SUM(StratPlay.hit).alias("sum_hit"), + ) + .where(StratPlay.game_id == game_id) + .group_by(StratPlay.pitcher, StratPlay.pitcher_team) ) - top_pitchers = all_pitchers.order_by(SQL('sum_re24').asc()).limit(tp_max) - top_batters = all_batters.order_by(SQL('sum_re24').desc()).limit(tp_max) - bot_pitcher = all_pitchers.order_by(SQL('sum_re24').desc()).get() - bot_batter = all_batters.order_by(SQL('sum_re24').asc()).get() + top_pitchers = all_pitchers.order_by(SQL("sum_re24").asc()).limit(tp_max) + top_batters = all_batters.order_by(SQL("sum_re24").desc()).limit(tp_max) + bot_pitcher = all_pitchers.order_by(SQL("sum_re24").desc()).get() + bot_batter = all_batters.order_by(SQL("sum_re24").asc()).get() - top_b = [{ - 'player': model_to_dict(x.batter, recurse=not short_output), - 'ab': x.sum_ab, - 'run': x.sum_run, - 'hit': x.sum_hit, - 'rbi': x.sum_rbi, - 'double': x.sum_double, - 'triple': x.sum_triple, - 'hr': x.sum_hr, - 're24': x.sum_re24 - } for x in top_batters] - top_p = [{ - 'player': model_to_dict(x.pitcher, recurse=not short_output), - 'tbf': x.sum_pa, - 'ip': math.floor(x.sum_outs / 3) + ((x.sum_outs % 3) * .1), - 'run': x.sum_run, - 'e_run': x.sum_erun, - 'hit': x.sum_hit, - 'so': x.sum_so, - 're24': x.sum_re24 * -1 - } for x in top_pitchers] + top_b = [ + { + "player": model_to_dict(x.batter, recurse=not short_output), + "ab": x.sum_ab, + "run": x.sum_run, + "hit": x.sum_hit, + "rbi": x.sum_rbi, + "double": x.sum_double, + "triple": x.sum_triple, + "hr": x.sum_hr, + "re24": x.sum_re24, + } + for x in top_batters + ] + top_p = [ + { + "player": model_to_dict(x.pitcher, recurse=not short_output), + "tbf": x.sum_pa, + "ip": math.floor(x.sum_outs / 3) + ((x.sum_outs % 3) * 0.1), + "run": x.sum_run, + "e_run": x.sum_erun, + "hit": x.sum_hit, + "so": x.sum_so, + "re24": x.sum_re24 * -1, + } + for x in top_pitchers + ] top_players = [*top_b, *top_p] - logging.debug(f'top_players: {top_players}') + logging.debug(f"top_players: {top_players}") bot_players = [ { - 'player': model_to_dict(bot_pitcher.pitcher, recurse=not short_output), - 'tbf': bot_pitcher.sum_pa, - 'ip': math.floor(bot_pitcher.sum_outs / 3) + ((bot_pitcher.sum_outs % 3) * .1), - 'run': bot_pitcher.sum_run, - 'e_run': bot_pitcher.sum_erun, - 'hit': bot_pitcher.sum_hit, - 'so': bot_pitcher.sum_so, - 're24': bot_pitcher.sum_re24 * -1 + "player": model_to_dict(bot_pitcher.pitcher, recurse=not short_output), + "tbf": bot_pitcher.sum_pa, + "ip": math.floor(bot_pitcher.sum_outs / 3) + + ((bot_pitcher.sum_outs % 3) * 0.1), + "run": bot_pitcher.sum_run, + "e_run": bot_pitcher.sum_erun, + "hit": bot_pitcher.sum_hit, + "so": bot_pitcher.sum_so, + "re24": bot_pitcher.sum_re24 * -1, }, { - 'player': model_to_dict(bot_batter.batter, recurse=not short_output), - 'ab': bot_batter.sum_ab, - 'run': bot_batter.sum_run, - 'hit': bot_batter.sum_hit, - 'rbi': bot_batter.sum_rbi, - 'double': bot_batter.sum_double, - 'triple': bot_batter.sum_triple, - 'hr': bot_batter.sum_hr, - 're24': bot_batter.sum_re24 - } + "player": model_to_dict(bot_batter.batter, recurse=not short_output), + "ab": bot_batter.sum_ab, + "run": bot_batter.sum_run, + "hit": bot_batter.sum_hit, + "rbi": bot_batter.sum_rbi, + "double": bot_batter.sum_double, + "triple": bot_batter.sum_triple, + "hr": bot_batter.sum_hr, + "re24": bot_batter.sum_re24, + }, ] return { - 'game': model_to_dict(this_game, recurse=not short_output), - 'teams': { - 'away': model_to_dict(this_game.away_team, recurse=not short_output), - 'home': model_to_dict(this_game.home_team, recurse=not short_output), + "game": model_to_dict(this_game, recurse=not short_output), + "teams": { + "away": model_to_dict(this_game.away_team, recurse=not short_output), + "home": model_to_dict(this_game.home_team, recurse=not short_output), }, - 'runs': { - 'away': all_runs.where(StratPlay.batter_team == this_game.away_team).count(), - 'home': all_runs.where(StratPlay.batter_team == this_game.home_team).count() + "runs": { + "away": all_runs.where( + StratPlay.batter_team == this_game.away_team + ).count(), + "home": all_runs.where( + StratPlay.batter_team == this_game.home_team + ).count(), }, - 'hits': { - 'away': all_hits.where(StratPlay.batter_team == this_game.away_team).count(), - 'home': all_hits.where(StratPlay.batter_team == this_game.home_team).count() + "hits": { + "away": all_hits.where( + StratPlay.batter_team == this_game.away_team + ).count(), + "home": all_hits.where( + StratPlay.batter_team == this_game.home_team + ).count(), }, - 'errors': { - 'away': all_errors.where(StratPlay.defender_team == this_game.away_team).count(), - 'home': all_errors.where(StratPlay.defender_team == this_game.home_team).count() + "errors": { + "away": all_errors.where( + StratPlay.defender_team == this_game.away_team + ).count(), + "home": all_errors.where( + StratPlay.defender_team == this_game.home_team + ).count(), }, - 'top-players': sorted(top_players, key=lambda x: x['re24'], reverse=True)[:tp_max], - 'pooper': sorted(bot_players, key=lambda x: x['re24'])[:poop_max], - 'pitchers': { - 'win': model_to_dict(winner.get().pitcher, recurse=not short_output), - 'loss': model_to_dict(loser.get().pitcher, recurse=not short_output), - 'holds': [model_to_dict(x.pitcher, recurse=not short_output) for x in all_holds], - 'save': save_pitcher, - 'b_saves': [model_to_dict(x.pitcher, recurse=not short_output) for x in all_bsaves] + "top-players": sorted(top_players, key=lambda x: x["re24"], reverse=True)[ + :tp_max + ], + "pooper": sorted(bot_players, key=lambda x: x["re24"])[:poop_max], + "pitchers": { + "win": model_to_dict(winner.get().pitcher, recurse=not short_output), + "loss": model_to_dict(loser.get().pitcher, recurse=not short_output), + "holds": [ + model_to_dict(x.pitcher, recurse=not short_output) for x in all_holds + ], + "save": save_pitcher, + "b_saves": [ + model_to_dict(x.pitcher, recurse=not short_output) for x in all_bsaves + ], }, - 'xbh': { - '2b': [model_to_dict(x.batter, recurse=not short_output) for x in doubles], - '3b': [model_to_dict(x.batter, recurse=not short_output) for x in triples], - 'hr': [model_to_dict(x.batter, recurse=not short_output) for x in homeruns] + "xbh": { + "2b": [model_to_dict(x.batter, recurse=not short_output) for x in doubles], + "3b": [model_to_dict(x.batter, recurse=not short_output) for x in triples], + "hr": [model_to_dict(x.batter, recurse=not short_output) for x in homeruns], + }, + "running": { + "sb": [model_to_dict(x.runner, recurse=not short_output) for x in all_sb], + "csc": [model_to_dict(x.catcher, recurse=not short_output) for x in all_cs], }, - 'running': { - 'sb': [model_to_dict(x.runner, recurse=not short_output) for x in all_sb], - 'csc': [model_to_dict(x.catcher, recurse=not short_output) for x in all_cs] - } } -@router.get('/{play_id}') +@router.get("/{play_id}") async def get_one_play(play_id: int): if StratPlay.get_or_none(StratPlay.id == play_id) is None: db.close() - raise HTTPException(status_code=404, detail=f'Play ID {play_id} not found') + raise HTTPException(status_code=404, detail=f"Play ID {play_id} not found") r_play = model_to_dict(StratPlay.get_by_id(play_id)) db.close() return r_play -@router.patch('/{play_id}') -async def patch_play(play_id: int, new_play: PlayModel, token: str = Depends(oauth2_scheme)): +@router.patch("/{play_id}") +async def patch_play( + play_id: int, new_play: PlayModel, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'patch_play - Bad Token: {token}') - raise HTTPException(status_code=401, detail='Unauthorized') + logging.warning(f"patch_play - Bad Token: {token}") + raise HTTPException(status_code=401, detail="Unauthorized") if StratPlay.get_or_none(StratPlay.id == play_id) is None: db.close() - raise HTTPException(status_code=404, detail=f'Play ID {play_id} not found') + raise HTTPException(status_code=404, detail=f"Play ID {play_id} not found") StratPlay.update(**new_play.dict()).where(StratPlay.id == play_id).execute() r_play = model_to_dict(StratPlay.get_by_id(play_id)) @@ -1104,81 +1390,96 @@ async def patch_play(play_id: int, new_play: PlayModel, token: str = Depends(oau return r_play -@router.post('') +@router.post("") async def post_plays(p_list: PlayList, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'post_plays - Bad Token: {token}') - raise HTTPException(status_code=401, detail='Unauthorized') + logging.warning(f"post_plays - Bad Token: {token}") + raise HTTPException(status_code=401, detail="Unauthorized") new_plays = [] this_game = StratGame.get_or_none(StratGame.id == p_list.plays[0].game_id) if this_game is None: - raise HTTPException(status_code=404, detail=f'Game ID {p_list.plays[0].game_id} not found') + raise HTTPException( + status_code=404, detail=f"Game ID {p_list.plays[0].game_id} not found" + ) for play in p_list.plays: this_play = play this_play.inning_half = this_play.inning_half.lower() - top_half = this_play.inning_half == 'top' + top_half = this_play.inning_half == "top" if this_play.batter_team_id is None and this_play.batter_id is not None: - this_play.batter_team_id = this_game.away_team.id if top_half else this_game.home_team.id + this_play.batter_team_id = ( + this_game.away_team.id if top_half else this_game.home_team.id + ) if this_play.pitcher_team_id is None: - this_play.pitcher_team_id = this_game.home_team.id if top_half else this_game.away_team.id + this_play.pitcher_team_id = ( + this_game.home_team.id if top_half else this_game.away_team.id + ) if this_play.catcher_id is not None: - this_play.catcher_team_id = this_game.home_team.id if top_half else this_game.away_team.id + this_play.catcher_team_id = ( + this_game.home_team.id if top_half else this_game.away_team.id + ) if this_play.defender_id is not None: - this_play.defender_team_id = this_game.home_team.id if top_half else this_game.away_team.id + this_play.defender_team_id = ( + this_game.home_team.id if top_half else this_game.away_team.id + ) if this_play.runner_id is not None: - this_play.runner_team_id = this_game.away_team.id if top_half else this_game.home_team.id + this_play.runner_team_id = ( + this_game.away_team.id if top_half else this_game.home_team.id + ) if this_play.pa == 0: this_play.batter_final = None new_plays.append(this_play.dict()) with db.atomic(): - for batch in chunked(new_plays, 20): - StratPlay.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + upsert_strat_plays(new_plays, batch_size=20) db.close() - return f'Inserted {len(new_plays)} plays' + return f"Inserted {len(new_plays)} plays" -@router.delete('/{play_id}') +@router.delete("/{play_id}") async def delete_play(play_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'delete_play - Bad Token: {token}') - raise HTTPException(status_code=401, detail='Unauthorized') + logging.warning(f"delete_play - Bad Token: {token}") + raise HTTPException(status_code=401, detail="Unauthorized") this_play = StratPlay.get_or_none(StratPlay.id == play_id) if not this_play: db.close() - raise HTTPException(status_code=404, detail=f'Play ID {play_id} not found') + raise HTTPException(status_code=404, detail=f"Play ID {play_id} not found") count = this_play.delete_instance() db.close() if count == 1: - return f'Play {play_id} has been deleted' + return f"Play {play_id} has been deleted" else: - raise HTTPException(status_code=500, detail=f'Play {play_id} could not be deleted') + raise HTTPException( + status_code=500, detail=f"Play {play_id} could not be deleted" + ) -@router.delete('/game/{game_id}') +@router.delete("/game/{game_id}") async def delete_plays_game(game_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'delete_plays_game - Bad Token: {token}') - raise HTTPException(status_code=401, detail='Unauthorized') + logging.warning(f"delete_plays_game - Bad Token: {token}") + raise HTTPException(status_code=401, detail="Unauthorized") this_game = StratGame.get_or_none(StratGame.id == game_id) if not this_game: db.close() - raise HTTPException(status_code=404, detail=f'Game ID {game_id} not found') + raise HTTPException(status_code=404, detail=f"Game ID {game_id} not found") count = StratPlay.delete().where(StratPlay.game == this_game).execute() db.close() if count > 0: - return f'Deleted {count} plays matching Game ID {game_id}' + return f"Deleted {count} plays matching Game ID {game_id}" else: - raise HTTPException(status_code=500, detail=f'No plays matching Game ID {game_id} were deleted') - + raise HTTPException( + status_code=500, detail=f"No plays matching Game ID {game_id} were deleted" + ) diff --git a/app/routers_v2/teams.py b/app/routers_v2/teams.py index b504d87..e0ec465 100644 --- a/app/routers_v2/teams.py +++ b/app/routers_v2/teams.py @@ -8,21 +8,45 @@ import logging import pydantic from pandas import DataFrame -from ..db_engine import db, Team, model_to_dict, fn, Pack, Card, Player, Paperdex, Notification, PackType, \ - Rarity, Current, query_to_csv, complex_data_to_csv, CARDSETS, CardPosition, BattingCardRatings, BattingCard, \ - PitchingCard, PitchingCardRatings, StratGame, LIVE_PROMO_CARDSET_ID -from ..dependencies import oauth2_scheme, valid_token, LOG_DATA, int_timestamp, PRIVATE_IN_SCHEMA +from ..db_engine import ( + db, + Team, + model_to_dict, + fn, + Pack, + Card, + Player, + Paperdex, + Notification, + PackType, + Rarity, + Current, + query_to_csv, + complex_data_to_csv, + CARDSETS, + CardPosition, + BattingCardRatings, + BattingCard, + PitchingCard, + PitchingCardRatings, + StratGame, + LIVE_PROMO_CARDSET_ID, +) +from ..dependencies import ( + oauth2_scheme, + valid_token, + LOG_DATA, + int_timestamp, + PRIVATE_IN_SCHEMA, +) logging.basicConfig( - filename=LOG_DATA['filename'], - format=LOG_DATA['format'], - level=LOG_DATA['log_level'] + filename=LOG_DATA["filename"], + format=LOG_DATA["format"], + level=LOG_DATA["log_level"], ) -router = APIRouter( - prefix='/api/v2/teams', - tags=['teams'] -) +router = APIRouter(prefix="/api/v2/teams", tags=["teams"]) class TeamModel(pydantic.BaseModel): @@ -44,14 +68,27 @@ class TeamModel(pydantic.BaseModel): is_ai: Optional[bool] = False -@router.get('') +@router.get("") async def get_teams( - season: Optional[int] = None, gm_id: Optional[int] = None, abbrev: Optional[str] = None, - tv_min: Optional[int] = None, tv_max: Optional[int] = None, cv_min: Optional[int] = None, - cv_max: Optional[int] = None, ps_shiny_min: Optional[int] = None, ps_shiny_max: Optional[int] = None, - ranking_min: Optional[int] = None, ranking_max: Optional[int] = None, has_guide: Optional[bool] = None, - sname: Optional[str] = None, lname: Optional[str] = None, is_ai: Optional[bool] = None, - event_id: Optional[int] = None, limit: Optional[int] = None, csv: Optional[bool] = False): + season: Optional[int] = None, + gm_id: Optional[int] = None, + abbrev: Optional[str] = None, + tv_min: Optional[int] = None, + tv_max: Optional[int] = None, + cv_min: Optional[int] = None, + cv_max: Optional[int] = None, + ps_shiny_min: Optional[int] = None, + ps_shiny_max: Optional[int] = None, + ranking_min: Optional[int] = None, + ranking_max: Optional[int] = None, + has_guide: Optional[bool] = None, + sname: Optional[str] = None, + lname: Optional[str] = None, + is_ai: Optional[bool] = None, + event_id: Optional[int] = None, + limit: Optional[int] = None, + csv: Optional[bool] = False, +): """ Param: season: int Param: team_abbrev: string @@ -106,10 +143,11 @@ async def get_teams( all_teams = all_teams.where(Team.ranking <= ranking_max) if has_guide is not None: + # Use boolean comparison (PostgreSQL-compatible) if not has_guide: - all_teams = all_teams.where(Team.has_guide == 0) + all_teams = all_teams.where(Team.has_guide == False) else: - all_teams = all_teams.where(Team.has_guide == 1) + all_teams = all_teams.where(Team.has_guide == True) if is_ai is not None: all_teams = all_teams.where(Team.is_ai) @@ -123,193 +161,233 @@ async def get_teams( if csv: return_val = query_to_csv(all_teams, exclude=[Team.career]) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_teams = {'count': all_teams.count(), 'teams': []} + return_teams = {"count": all_teams.count(), "teams": []} for x in all_teams: - return_teams['teams'].append(model_to_dict(x)) + return_teams["teams"].append(model_to_dict(x)) db.close() return return_teams -@router.get('/{team_id}') +@router.get("/{team_id}") async def get_one_team(team_id, inc_packs: bool = True, csv: Optional[bool] = False): try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") - p_query = Pack.select().where((Pack.team == this_team) & (Pack.open_time.is_null(True))) + p_query = Pack.select().where( + (Pack.team == this_team) & (Pack.open_time.is_null(True)) + ) if csv: data = model_to_dict(this_team) - data['sealed_packs'] = p_query.count() + data["sealed_packs"] = p_query.count() return_val = complex_data_to_csv([data]) else: return_val = model_to_dict(this_team) if inc_packs: - return_val['sealed_packs'] = [model_to_dict(x) for x in p_query] + return_val["sealed_packs"] = [model_to_dict(x) for x in p_query] db.close() return return_val def get_scouting_dfs(allowed_players, position: str): - logging.info(f'allowed_players: {allowed_players}\nposition: {position}') + logging.info(f"allowed_players: {allowed_players}\nposition: {position}") positions = CardPosition.select().where( (CardPosition.player << allowed_players) & (CardPosition.position == position) ) pos_players = [x.player.player_id for x in positions] - logging.info(f'pos_players: {pos_players}') + logging.info(f"pos_players: {pos_players}") if len(pos_players) == 0: return None all_cards = BattingCard.select().where(BattingCard.player << pos_players) - all_ratings = BattingCardRatings.select().where(BattingCardRatings.battingcard << all_cards) + all_ratings = BattingCardRatings.select().where( + BattingCardRatings.battingcard << all_cards + ) - vl_query = all_ratings.where(BattingCardRatings.vs_hand == 'L') - vr_query = all_ratings.where(BattingCardRatings.vs_hand == 'R') + vl_query = all_ratings.where(BattingCardRatings.vs_hand == "L") + vr_query = all_ratings.where(BattingCardRatings.vs_hand == "R") vl_vals = [model_to_dict(x) for x in vl_query] for x in vl_vals: - x.update(x['battingcard']) - x['player_id'] = x['battingcard']['player']['player_id'] - x['player_name'] = x['battingcard']['player']['p_name'] - x['rarity'] = x['battingcard']['player']['rarity']['name'] - x['cardset_id'] = x['battingcard']['player']['cardset']['id'] - x['cardset_name'] = x['battingcard']['player']['cardset']['name'] - del x['battingcard'], x['player'] + x.update(x["battingcard"]) + x["player_id"] = x["battingcard"]["player"]["player_id"] + x["player_name"] = x["battingcard"]["player"]["p_name"] + x["rarity"] = x["battingcard"]["player"]["rarity"]["name"] + x["cardset_id"] = x["battingcard"]["player"]["cardset"]["id"] + x["cardset_name"] = x["battingcard"]["player"]["cardset"]["name"] + del x["battingcard"], x["player"] vr_vals = [model_to_dict(x) for x in vr_query] for x in vr_vals: - x['player_id'] = x['battingcard']['player']['player_id'] - del x['battingcard'] + x["player_id"] = x["battingcard"]["player"]["player_id"] + del x["battingcard"] vl = pd.DataFrame(vl_vals) vr = pd.DataFrame(vr_vals) - bat_df = pd.merge(vl, vr, on='player_id', suffixes=('_vl', '_vr')).set_index('player_id', drop=False) - logging.info(f'cols:\n{list(bat_df.columns)}') + bat_df = pd.merge(vl, vr, on="player_id", suffixes=("_vl", "_vr")).set_index( + "player_id", drop=False + ) + logging.info(f"cols:\n{list(bat_df.columns)}") series_list = [] - series_list.append(pd.Series( - dict([(x.player.player_id, x.range) for x in positions]), - name=f'Range {position}' - )) - series_list.append(pd.Series( - dict([(x.player.player_id, x.error) for x in positions]), - name=f'Error {position}' - )) - series_list.append(pd.Series( - dict([(x.player.player_id, x.innings) for x in positions]), - name=f'Innings {position}' - )) - if position in ['LF', 'CF', 'RF']: - series_list.append(pd.Series( - dict([(x.player.player_id, x.arm) for x in positions]), - name=f'Arm OF' - )) - elif position == 'C': - series_list.append(pd.Series( - dict([(x.player.player_id, x.arm) for x in positions]), - name=f'Arm C' - )) - series_list.append(pd.Series( - dict([(x.player.player_id, x.pb) for x in positions]), - name=f'PB C' - )) - series_list.append(pd.Series( - dict([(x.player.player_id, x.overthrow) for x in positions]), - name=f'Throw C' - )) + series_list.append( + pd.Series( + dict([(x.player.player_id, x.range) for x in positions]), + name=f"Range {position}", + ) + ) + series_list.append( + pd.Series( + dict([(x.player.player_id, x.error) for x in positions]), + name=f"Error {position}", + ) + ) + series_list.append( + pd.Series( + dict([(x.player.player_id, x.innings) for x in positions]), + name=f"Innings {position}", + ) + ) + if position in ["LF", "CF", "RF"]: + series_list.append( + pd.Series( + dict([(x.player.player_id, x.arm) for x in positions]), name=f"Arm OF" + ) + ) + elif position == "C": + series_list.append( + pd.Series( + dict([(x.player.player_id, x.arm) for x in positions]), name=f"Arm C" + ) + ) + series_list.append( + pd.Series( + dict([(x.player.player_id, x.pb) for x in positions]), name=f"PB C" + ) + ) + series_list.append( + pd.Series( + dict([(x.player.player_id, x.overthrow) for x in positions]), + name=f"Throw C", + ) + ) db.close() def get_total_ops(df_data): - ops_vl = df_data['obp_vl'] + df_data['slg_vl'] - ops_vr = df_data['obp_vr'] + df_data['slg_vr'] + ops_vl = df_data["obp_vl"] + df_data["slg_vl"] + ops_vr = df_data["obp_vr"] + df_data["slg_vr"] return (ops_vr + ops_vl + min(ops_vl, ops_vr)) / 3 - logging.debug(f'series_list: {series_list}') + logging.debug(f"series_list: {series_list}") ratings = bat_df.join(series_list) - ratings['total_OPS'] = ratings.apply(get_total_ops, axis=1) + ratings["total_OPS"] = ratings.apply(get_total_ops, axis=1) return ratings -@router.get('/{team_id}/lineup/{difficulty_name}') +@router.get("/{team_id}/lineup/{difficulty_name}") async def get_team_lineup( - team_id: int, difficulty_name: str, pitcher_name: str, build_type: str, cardset_id: list = Query(default=None), - backup_cardset_id: list = Query(default=None)): + team_id: int, + difficulty_name: str, + pitcher_name: str, + build_type: str, + cardset_id: list = Query(default=None), + backup_cardset_id: list = Query(default=None), +): """ d_rank: int - 10: best overall, 9: prioritize range, 8: prioritize error """ this_team = Team.get_or_none(Team.id == team_id) if this_team is None: db.close() - raise HTTPException(status_code=404, detail=f'Team id {team_id} not found') + raise HTTPException(status_code=404, detail=f"Team id {team_id} not found") - if difficulty_name not in CARDSETS.keys() and difficulty_name != 'exhibition': + if difficulty_name not in CARDSETS.keys() and difficulty_name != "exhibition": db.close() - raise HTTPException(status_code=400, detail=f'Difficulty name {difficulty_name} not a valid check') + raise HTTPException( + status_code=400, + detail=f"Difficulty name {difficulty_name} not a valid check", + ) # all_players = Player.select().where( # (fn.Lower(Player.p_name) != pitcher_name.lower()) & (Player.mlbclub == this_team.lname) # ) all_players = Player.select().where(Player.franchise == this_team.sname) - if difficulty_name == 'exhibition': - logging.info(f'pulling an exhibition lineup') + if difficulty_name == "exhibition": + logging.info(f"pulling an exhibition lineup") if cardset_id is None: db.close() - raise HTTPException(status_code=400, detail=f'Must provide at least one cardset_id for exhibition lineups') + raise HTTPException( + status_code=400, + detail=f"Must provide at least one cardset_id for exhibition lineups", + ) legal_players = all_players.where(Player.cardset_id << cardset_id) if backup_cardset_id is not None: backup_players = all_players.where(Player.cardset_id << backup_cardset_id) else: - backup_players = all_players.where(Player.cardset_id << CARDSETS['minor-league']['primary']) + backup_players = all_players.where( + Player.cardset_id << CARDSETS["minor-league"]["primary"] + ) else: - legal_players = all_players.where(Player.cardset_id << CARDSETS[difficulty_name]['primary']) - if 'secondary' in CARDSETS[difficulty_name]: - backup_players = all_players.where(Player.cardset_id << CARDSETS[difficulty_name]['secondary']) + legal_players = all_players.where( + Player.cardset_id << CARDSETS[difficulty_name]["primary"] + ) + if "secondary" in CARDSETS[difficulty_name]: + backup_players = all_players.where( + Player.cardset_id << CARDSETS[difficulty_name]["secondary"] + ) else: backup_players = None - logging.info(f'legal_players: {legal_players.count()}') - logging.info(f'legal query: {legal_players}') + logging.info(f"legal_players: {legal_players.count()}") + logging.info(f"legal query: {legal_players}") if backup_players is not None: - logging.info(f'backup_players: {backup_players.count()}') + logging.info(f"backup_players: {backup_players.count()}") player_names = [] starting_nine = { - 'C': {'player': None, 'vl': None, 'vr': None, 'ops': 0}, - '1B': {'player': None, 'vl': None, 'vr': None, 'ops': 0}, - '2B': {'player': None, 'vl': None, 'vr': None, 'ops': 0}, - '3B': {'player': None, 'vl': None, 'vr': None, 'ops': 0}, - 'SS': {'player': None, 'vl': None, 'vr': None, 'ops': 0}, - 'LF': {'player': None, 'vl': None, 'vr': None, 'ops': 0}, - 'CF': {'player': None, 'vl': None, 'vr': None, 'ops': 0}, - 'RF': {'player': None, 'vl': None, 'vr': None, 'ops': 0}, - 'DH': {'player': None, 'vl': None, 'vr': None, 'ops': 0} + "C": {"player": None, "vl": None, "vr": None, "ops": 0}, + "1B": {"player": None, "vl": None, "vr": None, "ops": 0}, + "2B": {"player": None, "vl": None, "vr": None, "ops": 0}, + "3B": {"player": None, "vl": None, "vr": None, "ops": 0}, + "SS": {"player": None, "vl": None, "vr": None, "ops": 0}, + "LF": {"player": None, "vl": None, "vr": None, "ops": 0}, + "CF": {"player": None, "vl": None, "vr": None, "ops": 0}, + "RF": {"player": None, "vl": None, "vr": None, "ops": 0}, + "DH": {"player": None, "vl": None, "vr": None, "ops": 0}, } def get_bratings(player_id): this_bcard = BattingCard.get_or_none(BattingCard.player_id == player_id) vl_ratings = BattingCardRatings.get_or_none( - BattingCardRatings.battingcard == this_bcard, BattingCardRatings.vs_hand == 'L' + BattingCardRatings.battingcard == this_bcard, + BattingCardRatings.vs_hand == "L", ) vl_ops = vl_ratings.obp + vl_ratings.slg vr_ratings = BattingCardRatings.get_or_none( - BattingCardRatings.battingcard == this_bcard, BattingCardRatings.vs_hand == 'R' + BattingCardRatings.battingcard == this_bcard, + BattingCardRatings.vs_hand == "R", ) vr_ops = vr_ratings.obp + vr_ratings.slg - return model_to_dict(vl_ratings), model_to_dict(vr_ratings), (vl_ops + vr_ops + min(vl_ops, vr_ops)) / 3 + return ( + model_to_dict(vl_ratings), + model_to_dict(vr_ratings), + (vl_ops + vr_ops + min(vl_ops, vr_ops)) / 3, + ) # IDEA: Rank guys by their bat per-position and take the best one that meets a threshold of defensive ability for position in starting_nine.keys(): - if position == 'DH': + if position == "DH": # all_bcards = BattingCard.select().where(BattingCard.player << legal_players) # all_batters = BattingCardRatings.select().where( # BattingCardRatings.battingcard << all_bcards @@ -319,99 +397,114 @@ async def get_team_lineup( # if x.battingcard.player.p_name not in player_names: # starting_nine['DH'] = x.battingcard.player # break - logging.debug(f'Searching for a DH!') + logging.debug(f"Searching for a DH!") dh_query = legal_players.order_by(Player.cost.desc()) for x in dh_query: - logging.debug(f'checking {x.p_name} for {position}') - if x.p_name not in player_names and 'P' not in x.pos_1: - logging.debug(f'adding!') - starting_nine['DH']['player'] = model_to_dict(x) + logging.debug(f"checking {x.p_name} for {position}") + if x.p_name not in player_names and "P" not in x.pos_1: + logging.debug(f"adding!") + starting_nine["DH"]["player"] = model_to_dict(x) try: vl, vr, total_ops = get_bratings(x.player_id) except AttributeError as e: - logging.debug(f'Could not find batting lines') + logging.debug(f"Could not find batting lines") else: # starting_nine[position]['vl'] = vl # starting_nine[position]['vr'] = vr - starting_nine[position]['vl'] = vl['obp'] + vl['slg'] - starting_nine[position]['vr'] = vr['obp'] + vr['slg'] - starting_nine['DH']['ops'] = total_ops + starting_nine[position]["vl"] = vl["obp"] + vl["slg"] + starting_nine[position]["vr"] = vr["obp"] + vr["slg"] + starting_nine["DH"]["ops"] = total_ops player_names.append(x.p_name) break - if starting_nine['DH']['player'] is None: + if starting_nine["DH"]["player"] is None: dh_query = backup_players.order_by(Player.cost.desc()) for x in dh_query: - logging.debug(f'checking {x.p_name} for {position}') + logging.debug(f"checking {x.p_name} for {position}") if x.p_name not in player_names: - logging.debug(f'adding!') - starting_nine['DH']['player'] = model_to_dict(x) + logging.debug(f"adding!") + starting_nine["DH"]["player"] = model_to_dict(x) try: vl, vr, total_ops = get_bratings(x.player_id) except AttributeError as e: - logging.debug(f'Could not find batting lines') + logging.debug(f"Could not find batting lines") else: vl, vr, total_ops = get_bratings(x.player_id) - starting_nine[position]['vl'] = vl['obp'] + vl['slg'] - starting_nine[position]['vr'] = vr['obp'] + vr['slg'] - starting_nine['DH']['ops'] = total_ops + starting_nine[position]["vl"] = vl["obp"] + vl["slg"] + starting_nine[position]["vr"] = vr["obp"] + vr["slg"] + starting_nine["DH"]["ops"] = total_ops player_names.append(x.p_name) break else: pos_group = CardPosition.select().where( - (CardPosition.position == position) & (CardPosition.player << legal_players) + (CardPosition.position == position) + & (CardPosition.player << legal_players) ) - backup_group = CardPosition.select().where( - (CardPosition.position == position) & (CardPosition.player << backup_players) - ).order_by(CardPosition.innings.desc()) - if difficulty_name in ['minor-league', 'gauntlet-3', 'gauntlet-5']: + backup_group = ( + CardPosition.select() + .where( + (CardPosition.position == position) + & (CardPosition.player << backup_players) + ) + .order_by(CardPosition.innings.desc()) + ) + if difficulty_name in ["minor-league", "gauntlet-3", "gauntlet-5"]: pos_group = pos_group.order_by(CardPosition.innings.desc()) for x in pos_group: - logging.debug(f'checking {x.player.p_name} for {position}') - if x.player.p_name not in player_names and x.player.p_name.lower() != pitcher_name: - logging.debug(f'adding!') - starting_nine[position]['player'] = model_to_dict(x.player) + logging.debug(f"checking {x.player.p_name} for {position}") + if ( + x.player.p_name not in player_names + and x.player.p_name.lower() != pitcher_name + ): + logging.debug(f"adding!") + starting_nine[position]["player"] = model_to_dict(x.player) vl, vr, total_ops = get_bratings(x.player.player_id) - starting_nine[position]['vl'] = vl - starting_nine[position]['vr'] = vr + starting_nine[position]["vl"] = vl + starting_nine[position]["vr"] = vr # starting_nine[position]['vl'] = vl.obp_vl + vl.slg_vl # starting_nine[position]['vr'] = vr.obp_vr + vr.slg_vr - starting_nine[position]['ops'] = total_ops + starting_nine[position]["ops"] = total_ops player_names.append(x.player.p_name) break # elif difficulty_name in ['major-league', 'flashback', 'hall-of-fame']: else: - logging.debug(f'entering difficulty: {difficulty_name}') + logging.debug(f"entering difficulty: {difficulty_name}") eligible_cards = get_scouting_dfs(legal_players, position) - logging.debug(f'got dataframe:\n{eligible_cards}') + logging.debug(f"got dataframe:\n{eligible_cards}") # if position == '1B': # return Response(content=eligible_cards.to_csv(index=False), media_type='text/csv') def rank_cards(df_data): - if position in ['C', 'SS', '2B', 'CF']: + if position in ["C", "SS", "2B", "CF"]: r_mult = 0.05 e_mult = -0.01 else: r_mult = 0.025 e_mult = -0.005 - r_mod = (3 - df_data[f'Range {position}']) * r_mult - e_mod = df_data[f'Error {position}'] * e_mult - i_mult = df_data[f'Innings {position}'] / 1000 + r_mod = (3 - df_data[f"Range {position}"]) * r_mult + e_mod = df_data[f"Error {position}"] * e_mult + i_mult = df_data[f"Innings {position}"] / 1000 # final_ops = df_data['total_OPS'] + r_mod + e_mod # final_ops = (df_data['total_OPS'] * i_mult) + r_mod + e_mod - final_ops = (df_data['total_OPS'] + r_mod + e_mod) * i_mult - logging.debug(f'{df_data.player_name} total OPS: {df_data.total_OPS} / ' - f'final OPS: {final_ops}') + final_ops = (df_data["total_OPS"] + r_mod + e_mod) * i_mult + logging.debug( + f"{df_data.player_name} total OPS: {df_data.total_OPS} / " + f"final OPS: {final_ops}" + ) return final_ops if eligible_cards is not None and len(eligible_cards.index) >= 1: - eligible_cards['final_ops'] = eligible_cards.apply(rank_cards, axis=1) - logging.debug(f'final_ops:\n{eligible_cards["final_ops"]}') - eligible_cards.sort_values(by=['final_ops'], ascending=False, inplace=True) + eligible_cards["final_ops"] = eligible_cards.apply( + rank_cards, axis=1 + ) + logging.debug(f"final_ops:\n{eligible_cards['final_ops']}") + eligible_cards.sort_values( + by=["final_ops"], ascending=False, inplace=True + ) this_row = None for x in range(len(eligible_cards.index)): @@ -420,23 +513,34 @@ async def get_team_lineup( break if this_row is not None: - starting_nine[position]['player'] = model_to_dict(Player.get_by_id(this_row.player_id)) - starting_nine[position]['vl'] = this_row.obp_vl + this_row.slg_vl - starting_nine[position]['vr'] = this_row.obp_vr + this_row.slg_vr - starting_nine[position]['ops'] = this_row.total_OPS + starting_nine[position]["player"] = model_to_dict( + Player.get_by_id(this_row.player_id) + ) + starting_nine[position]["vl"] = ( + this_row.obp_vl + this_row.slg_vl + ) + starting_nine[position]["vr"] = ( + this_row.obp_vr + this_row.slg_vr + ) + starting_nine[position]["ops"] = this_row.total_OPS player_names.append(this_row.player_name) - logging.debug(f'pos_group: {pos_group}\n{starting_nine}\n{player_names}\n\n') - if starting_nine[position]['player'] is None: + logging.debug( + f"pos_group: {pos_group}\n{starting_nine}\n{player_names}\n\n" + ) + if starting_nine[position]["player"] is None: for x in backup_group: - logging.info(f'checking {x.player.p_name} for {position}') - if x.player.p_name not in player_names and x.player.p_name.lower() != pitcher_name: - logging.debug(f'adding!') - starting_nine[position]['player'] = model_to_dict(x.player) + logging.info(f"checking {x.player.p_name} for {position}") + if ( + x.player.p_name not in player_names + and x.player.p_name.lower() != pitcher_name + ): + logging.debug(f"adding!") + starting_nine[position]["player"] = model_to_dict(x.player) vl, vr, total_ops = get_bratings(x.player.player_id) - starting_nine[position]['vl'] = vl['obp'] + vl['slg'] - starting_nine[position]['vr'] = vr['obp'] + vr['slg'] - starting_nine[position]['ops'] = total_ops + starting_nine[position]["vl"] = vl["obp"] + vl["slg"] + starting_nine[position]["vr"] = vr["obp"] + vr["slg"] + starting_nine[position]["ops"] = total_ops player_names.append(x.player.p_name) break @@ -474,99 +578,128 @@ async def get_team_lineup( # output['total_OPS'] = output.apply(get_total_ops, axis=1) # output = output.sort_values(by=['total_OPS'], ascending=False) - sorted_nine = sorted(starting_nine.items(), key=lambda item: item[1]['ops'], reverse=True) - return { - 'json': dict(sorted_nine), - 'array': sorted_nine - } + sorted_nine = sorted( + starting_nine.items(), key=lambda item: item[1]["ops"], reverse=True + ) + return {"json": dict(sorted_nine), "array": sorted_nine} def sort_pitchers(pitching_card_query) -> DataFrame | None: all_s = [model_to_dict(x, recurse=False) for x in pitching_card_query] if len(all_s) == 0: - logging.error(f'Empty pitching_card_query: {pitching_card_query}') + logging.error(f"Empty pitching_card_query: {pitching_card_query}") return None - pitcher_df = pd.DataFrame(all_s).set_index('player', drop=False) - logging.debug(f'pitcher_df: {pitcher_df}') + pitcher_df = pd.DataFrame(all_s).set_index("player", drop=False) + logging.debug(f"pitcher_df: {pitcher_df}") def get_total_ops(df_data): vlval = PitchingCardRatings.get_or_none( - PitchingCardRatings.pitchingcard_id == df_data['id'], PitchingCardRatings.vs_hand == 'L') + PitchingCardRatings.pitchingcard_id == df_data["id"], + PitchingCardRatings.vs_hand == "L", + ) vrval = PitchingCardRatings.get_or_none( - PitchingCardRatings.pitchingcard_id == df_data['id'], PitchingCardRatings.vs_hand == 'R') + PitchingCardRatings.pitchingcard_id == df_data["id"], + PitchingCardRatings.vs_hand == "R", + ) ops_vl = vlval.obp + vlval.slg ops_vr = vrval.obp + vrval.slg # TODO: should this be max?? return (ops_vr + ops_vl + min(ops_vl, ops_vr)) / 3 - pitcher_df['total_ops'] = pitcher_df.apply(get_total_ops, axis=1) - return pitcher_df.sort_values(by='total_ops') + pitcher_df["total_ops"] = pitcher_df.apply(get_total_ops, axis=1) + return pitcher_df.sort_values(by="total_ops") -@router.get('/{team_id}/sp/{difficulty_name}') +@router.get("/{team_id}/sp/{difficulty_name}") async def get_team_sp( - team_id: int, difficulty_name: str, sp_rank: int, cardset_id: list = Query(default=None), - backup_cardset_id: list = Query(default=None)): - logging.info(f'get_team_sp - team_id: {team_id} / difficulty_name: {difficulty_name} / sp_rank: {sp_rank}') + team_id: int, + difficulty_name: str, + sp_rank: int, + cardset_id: list = Query(default=None), + backup_cardset_id: list = Query(default=None), +): + logging.info( + f"get_team_sp - team_id: {team_id} / difficulty_name: {difficulty_name} / sp_rank: {sp_rank}" + ) this_team = Team.get_or_none(Team.id == team_id) if this_team is None: db.close() - raise HTTPException(status_code=404, detail=f'Team id {team_id} not found') + raise HTTPException(status_code=404, detail=f"Team id {team_id} not found") - if difficulty_name not in CARDSETS.keys() and difficulty_name != 'exhibition': + if difficulty_name not in CARDSETS.keys() and difficulty_name != "exhibition": db.close() - raise HTTPException(status_code=400, detail=f'Difficulty name {difficulty_name} not a valid check') + raise HTTPException( + status_code=400, + detail=f"Difficulty name {difficulty_name} not a valid check", + ) all_players = Player.select().where(Player.franchise == this_team.sname) - if difficulty_name == 'exhibition': - logging.info(f'pulling an exhibition lineup') + if difficulty_name == "exhibition": + logging.info(f"pulling an exhibition lineup") if cardset_id is None: db.close() - raise HTTPException(status_code=400, detail=f'Must provide at least one cardset_id for exhibition lineups') + raise HTTPException( + status_code=400, + detail=f"Must provide at least one cardset_id for exhibition lineups", + ) legal_players = all_players.where(Player.cardset_id << cardset_id) if backup_cardset_id is not None: backup_players = all_players.where(Player.cardset_id << backup_cardset_id) else: - backup_players = all_players.where(Player.cardset_id << CARDSETS['minor-league']['primary']) + backup_players = all_players.where( + Player.cardset_id << CARDSETS["minor-league"]["primary"] + ) else: - legal_players = all_players.where(Player.cardset_id << CARDSETS[difficulty_name]['primary']) - if 'secondary' in CARDSETS[difficulty_name]: - backup_players = all_players.where(Player.cardset_id << CARDSETS[difficulty_name]['secondary']) + legal_players = all_players.where( + Player.cardset_id << CARDSETS[difficulty_name]["primary"] + ) + if "secondary" in CARDSETS[difficulty_name]: + backup_players = all_players.where( + Player.cardset_id << CARDSETS[difficulty_name]["secondary"] + ) else: backup_players = None def sort_starters(starter_query) -> DataFrame | None: all_s = [model_to_dict(x, recurse=False) for x in starter_query] if len(all_s) == 0: - logging.error(f'Empty starter_query: {starter_query}') + logging.error(f"Empty starter_query: {starter_query}") return None - starter_df = pd.DataFrame(all_s).set_index('player', drop=False) - logging.debug(f'starter_df: {starter_df}') + starter_df = pd.DataFrame(all_s).set_index("player", drop=False) + logging.debug(f"starter_df: {starter_df}") def get_total_ops(df_data): vlval = PitchingCardRatings.get_or_none( - PitchingCardRatings.pitchingcard_id == df_data['id'], PitchingCardRatings.vs_hand == 'L') + PitchingCardRatings.pitchingcard_id == df_data["id"], + PitchingCardRatings.vs_hand == "L", + ) vrval = PitchingCardRatings.get_or_none( - PitchingCardRatings.pitchingcard_id == df_data['id'], PitchingCardRatings.vs_hand == 'R') + PitchingCardRatings.pitchingcard_id == df_data["id"], + PitchingCardRatings.vs_hand == "R", + ) ops_vl = vlval.obp + vlval.slg ops_vr = vrval.obp + vrval.slg return (ops_vr + ops_vl + min(ops_vl, ops_vr)) / 3 - starter_df['total_ops'] = starter_df.apply(get_total_ops, axis=1) - return starter_df.sort_values(by='total_ops') + starter_df["total_ops"] = starter_df.apply(get_total_ops, axis=1) + return starter_df.sort_values(by="total_ops") # Find SP in primary cardsets - s_query = PitchingCard.select().join(Player).where( - (PitchingCard.player << legal_players) & (PitchingCard.starter_rating >= 4) + s_query = ( + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << legal_players) & (PitchingCard.starter_rating >= 4) + ) ) all_starters = sort_starters(s_query) - logging.debug(f'sorted: {all_starters}') + logging.debug(f"sorted: {all_starters}") if all_starters is not None and len(all_starters.index) >= sp_rank: this_player_id = all_starters.iloc[sp_rank - 1].player @@ -585,7 +718,7 @@ async def get_team_sp( (PitchingCard.player << backup_players) & (PitchingCard.starter_rating >= 4) ) all_starters = sort_starters(s_query) - logging.debug(f'sorted: {all_starters}') + logging.debug(f"sorted: {all_starters}") if all_starters is not None and len(all_starters.index) >= sp_rank: this_player_id = all_starters.iloc[sp_rank - 1].player @@ -599,126 +732,210 @@ async def get_team_sp( db.close() return this_player - raise HTTPException(status_code=400, detail=f'No SP #{sp_rank} found for Team {team_id}') + raise HTTPException( + status_code=400, detail=f"No SP #{sp_rank} found for Team {team_id}" + ) -@router.get('/{team_id}/rp/{difficulty_name}') +@router.get("/{team_id}/rp/{difficulty_name}") async def get_team_rp( - team_id: int, difficulty_name: str, need: Literal['length', 'setup', 'closer', 'middle'], - used_pitcher_ids: list = Query(default=[]), cardset_id: list = Query(default=None), - backup_cardset_id: list = Query(default=None)): - logging.info(f'get_team_rp - team_id: {team_id} / difficulty_name: {difficulty_name} / need: {need} ' - f'/ used_pitcher_ids: {used_pitcher_ids}') + team_id: int, + difficulty_name: str, + need: Literal["length", "setup", "closer", "middle"], + used_pitcher_ids: list = Query(default=[]), + cardset_id: list = Query(default=None), + backup_cardset_id: list = Query(default=None), +): + logging.info( + f"get_team_rp - team_id: {team_id} / difficulty_name: {difficulty_name} / need: {need} " + f"/ used_pitcher_ids: {used_pitcher_ids}" + ) this_team = Team.get_or_none(Team.id == team_id) if this_team is None: db.close() - raise HTTPException(status_code=404, detail=f'Team id {team_id} not found') + raise HTTPException(status_code=404, detail=f"Team id {team_id} not found") - if difficulty_name not in CARDSETS.keys() and difficulty_name != 'exhibition': + if difficulty_name not in CARDSETS.keys() and difficulty_name != "exhibition": db.close() - raise HTTPException(status_code=400, detail=f'Difficulty name {difficulty_name} not a valid check') + raise HTTPException( + status_code=400, + detail=f"Difficulty name {difficulty_name} not a valid check", + ) all_players = Player.select().where( - (Player.franchise == this_team.sname) & (Player.player_id.not_in(used_pitcher_ids)) + (Player.franchise == this_team.sname) + & (Player.player_id.not_in(used_pitcher_ids)) ) - if difficulty_name == 'exhibition': - logging.info(f'pulling an exhibition RP') + if difficulty_name == "exhibition": + logging.info(f"pulling an exhibition RP") if cardset_id is None: db.close() - raise HTTPException(status_code=400, detail=f'Must provide at least one cardset_id for exhibition lineups') + raise HTTPException( + status_code=400, + detail=f"Must provide at least one cardset_id for exhibition lineups", + ) legal_players = all_players.where(Player.cardset_id << cardset_id) if backup_cardset_id is not None: backup_players = all_players.where(Player.cardset_id << backup_cardset_id) else: - backup_players = all_players.where(Player.cardset_id << CARDSETS['minor-league']['primary']) + backup_players = all_players.where( + Player.cardset_id << CARDSETS["minor-league"]["primary"] + ) else: - legal_players = all_players.where(Player.cardset_id << CARDSETS[difficulty_name]['primary']) - if 'secondary' in CARDSETS[difficulty_name]: - backup_players = all_players.where(Player.cardset_id << CARDSETS[difficulty_name]['secondary']) + legal_players = all_players.where( + Player.cardset_id << CARDSETS[difficulty_name]["primary"] + ) + if "secondary" in CARDSETS[difficulty_name]: + backup_players = all_players.where( + Player.cardset_id << CARDSETS[difficulty_name]["secondary"] + ) else: backup_players = None - logging.info(f'legal_players: {legal_players.count()}') - logging.info(f'legal query: {legal_players}') + logging.info(f"legal_players: {legal_players.count()}") + logging.info(f"legal query: {legal_players}") - if need == 'closer': - for query in [PitchingCard.select().join(Player).where( - (PitchingCard.player << legal_players) & (PitchingCard.closer_rating >= 3) & - (PitchingCard.starter_rating == 1)), - PitchingCard.select().join(Player).where( - (PitchingCard.player << legal_players) & (PitchingCard.closer_rating >= 1) & - (PitchingCard.starter_rating == 1)), - PitchingCard.select().join(Player).where( - (PitchingCard.player << backup_players) & (PitchingCard.closer_rating >= 3) & - (PitchingCard.starter_rating == 1)), - PitchingCard.select().join(Player).where( - (PitchingCard.player << backup_players) & (PitchingCard.closer_rating >= 1) & - (PitchingCard.starter_rating == 1)), - PitchingCard.select().join(Player).where( - (PitchingCard.player << backup_players) & (PitchingCard.starter_rating < 4)) + if need == "closer": + for query in [ + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << legal_players) + & (PitchingCard.closer_rating >= 3) + & (PitchingCard.starter_rating == 1) + ), + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << legal_players) + & (PitchingCard.closer_rating >= 1) + & (PitchingCard.starter_rating == 1) + ), + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << backup_players) + & (PitchingCard.closer_rating >= 3) + & (PitchingCard.starter_rating == 1) + ), + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << backup_players) + & (PitchingCard.closer_rating >= 1) + & (PitchingCard.starter_rating == 1) + ), + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << backup_players) + & (PitchingCard.starter_rating < 4) + ), ]: all_relievers = sort_pitchers(query) if all_relievers is not None: - logging.info(f'RP query: {query}') + logging.info(f"RP query: {query}") this_player_id = all_relievers.iloc[0].player - this_player = model_to_dict(Player.get_by_id(this_player_id), recurse=False) + this_player = model_to_dict( + Player.get_by_id(this_player_id), recurse=False + ) db.close() return this_player - elif need == 'setup': - for query in [PitchingCard.select().join(Player).where( - (PitchingCard.player << legal_players) & (PitchingCard.starter_rating == 1)), - PitchingCard.select().join(Player).where( - (PitchingCard.player << backup_players) & (PitchingCard.starter_rating < 4)) + elif need == "setup": + for query in [ + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << legal_players) + & (PitchingCard.starter_rating == 1) + ), + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << backup_players) + & (PitchingCard.starter_rating < 4) + ), ]: all_relievers = sort_pitchers(query) if all_relievers is not None and len(all_relievers.index) >= 2: this_player_id = all_relievers.iloc[1].player - this_player = model_to_dict(Player.get_by_id(this_player_id), recurse=False) + this_player = model_to_dict( + Player.get_by_id(this_player_id), recurse=False + ) db.close() return this_player - elif need == 'length' or len(used_pitcher_ids) > 4: - for query in [PitchingCard.select().join(Player).where( - (PitchingCard.player << legal_players) & (PitchingCard.relief_rating >= 3) & - (PitchingCard.starter_rating < 4)), - PitchingCard.select().join(Player).where( - (PitchingCard.player << legal_players) & (PitchingCard.relief_rating >= 2) & - (PitchingCard.starter_rating < 4)), - PitchingCard.select().join(Player).where( - (PitchingCard.player << backup_players) & (PitchingCard.relief_rating >= 2) & - (PitchingCard.starter_rating < 4)) + elif need == "length" or len(used_pitcher_ids) > 4: + for query in [ + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << legal_players) + & (PitchingCard.relief_rating >= 3) + & (PitchingCard.starter_rating < 4) + ), + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << legal_players) + & (PitchingCard.relief_rating >= 2) + & (PitchingCard.starter_rating < 4) + ), + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << backup_players) + & (PitchingCard.relief_rating >= 2) + & (PitchingCard.starter_rating < 4) + ), ]: all_relievers = sort_pitchers(query) if all_relievers is not None: this_player_id = all_relievers.iloc[0].player - this_player = model_to_dict(Player.get_by_id(this_player_id), recurse=False) + this_player = model_to_dict( + Player.get_by_id(this_player_id), recurse=False + ) db.close() return this_player - elif need == 'middle': - for query in [PitchingCard.select().join(Player).where( - (PitchingCard.player << legal_players) & (PitchingCard.starter_rating == 1)), - PitchingCard.select().join(Player).where( - (PitchingCard.player << backup_players) & (PitchingCard.starter_rating < 4)) + elif need == "middle": + for query in [ + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << legal_players) + & (PitchingCard.starter_rating == 1) + ), + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << backup_players) + & (PitchingCard.starter_rating < 4) + ), ]: all_relievers = sort_pitchers(query) if all_relievers is not None and len(all_relievers.index) >= 3: this_player_id = all_relievers.iloc[2].player - this_player = model_to_dict(Player.get_by_id(this_player_id), recurse=False) + this_player = model_to_dict( + Player.get_by_id(this_player_id), recurse=False + ) db.close() return this_player - logging.info(f'Falling to last chance pitcher') + logging.info(f"Falling to last chance pitcher") all_relievers = sort_pitchers( - PitchingCard.select().join(Player).where( - (PitchingCard.player << backup_players) | (PitchingCard.player << legal_players) + PitchingCard.select() + .join(Player) + .where( + (PitchingCard.player << backup_players) + | (PitchingCard.player << legal_players) ) ) @@ -728,33 +945,60 @@ async def get_team_rp( db.close() return this_player - raise HTTPException(status_code=400, detail=f'No RP found for Team {team_id}') + raise HTTPException(status_code=400, detail=f"No RP found for Team {team_id}") -@router.get('/{team_id}/season-record/{season}') +@router.get("/{team_id}/season-record/{season}") async def get_team_record(team_id: int, season: int): all_games = StratGame.select().where( - ((StratGame.away_team_id == team_id) | (StratGame.home_team_id == team_id)) & (StratGame.season == season) & - (StratGame.short_game == False) + ((StratGame.away_team_id == team_id) | (StratGame.home_team_id == team_id)) + & (StratGame.season == season) + & (StratGame.short_game == False) ) template = { - 'ARI': [0, 0, 0], 'ATL': [0, 0, 0], 'BAL': [0, 0, 0], 'BOS': [0, 0, 0], 'CHC': [0, 0, 0], 'CHW': [0, 0, 0], - 'CIN': [0, 0, 0], 'CLE': [0, 0, 0], 'COL': [0, 0, 0], 'DET': [0, 0, 0], - 'NYY': [0, 0, 0], 'TBR': [0, 0, 0], 'TOR': [0, 0, 0], 'PHI': [0, 0, 0], 'MIA': [0, 0, 0], 'NYM': [0, 0, 0], - 'WSN': [0, 0, 0], 'MIN': [0, 0, 0], 'KCR': [0, 0, 0], 'HOU': [0, 0, 0], - 'TEX': [0, 0, 0], 'SEA': [0, 0, 0], 'LAA': [0, 0, 0], 'OAK': [0, 0, 0], 'MIL': [0, 0, 0], 'PIT': [0, 0, 0], - 'STL': [0, 0, 0], 'LAD': [0, 0, 0], 'SDP': [0, 0, 0], 'SFG': [0, 0, 0], 'ALAS': [0, 0, 0], 'NLAS': [0, 0, 0] + "ARI": [0, 0, 0], + "ATL": [0, 0, 0], + "BAL": [0, 0, 0], + "BOS": [0, 0, 0], + "CHC": [0, 0, 0], + "CHW": [0, 0, 0], + "CIN": [0, 0, 0], + "CLE": [0, 0, 0], + "COL": [0, 0, 0], + "DET": [0, 0, 0], + "NYY": [0, 0, 0], + "TBR": [0, 0, 0], + "TOR": [0, 0, 0], + "PHI": [0, 0, 0], + "MIA": [0, 0, 0], + "NYM": [0, 0, 0], + "WSN": [0, 0, 0], + "MIN": [0, 0, 0], + "KCR": [0, 0, 0], + "HOU": [0, 0, 0], + "TEX": [0, 0, 0], + "SEA": [0, 0, 0], + "LAA": [0, 0, 0], + "OAK": [0, 0, 0], + "MIL": [0, 0, 0], + "PIT": [0, 0, 0], + "STL": [0, 0, 0], + "LAD": [0, 0, 0], + "SDP": [0, 0, 0], + "SFG": [0, 0, 0], + "ALAS": [0, 0, 0], + "NLAS": [0, 0, 0], } standings = { - 'minor-league': copy.deepcopy(template), - 'major-league': copy.deepcopy(template), - 'hall-of-fame': copy.deepcopy(template), - 'flashback': copy.deepcopy(template), - 'unlimited': copy.deepcopy(template), - 'ranked': copy.deepcopy(template), - 'exhibition': copy.deepcopy(template) + "minor-league": copy.deepcopy(template), + "major-league": copy.deepcopy(template), + "hall-of-fame": copy.deepcopy(template), + "flashback": copy.deepcopy(template), + "unlimited": copy.deepcopy(template), + "ranked": copy.deepcopy(template), + "exhibition": copy.deepcopy(template), } for game in all_games: @@ -788,110 +1032,124 @@ async def get_team_record(team_id: int, season: int): return standings -@router.get('/{team_id}/buy/players', include_in_schema=PRIVATE_IN_SCHEMA) +@router.get("/{team_id}/buy/players", include_in_schema=PRIVATE_IN_SCHEMA) async def team_buy_players(team_id: int, ids: str, ts: str): try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if ts != this_team.team_hash(): - logging.warning(f'Bad Team Secret: {ts} ({this_team.team_hash()})') + logging.warning(f"Bad Team Secret: {ts} ({this_team.team_hash()})") db.close() raise HTTPException( status_code=401, - detail=f'You are not authorized to buy {this_team.abbrev} cards. This event has been logged.' + detail=f"You are not authorized to buy {this_team.abbrev} cards. This event has been logged.", ) last_card = Card.select(Card.id).order_by(-Card.id).limit(1) lc_id = last_card[0].id - all_ids = ids.split(',') - conf_message = '' + all_ids = ids.split(",") + conf_message = "" total_cost = 0 for player_id in all_ids: - if player_id != '': + if player_id != "": try: this_player = Player.get_by_id(player_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No player found with id {player_id} /// ' - f'{conf_message} purchased') + raise HTTPException( + status_code=404, + detail=f"No player found with id {player_id} /// " + f"{conf_message} purchased", + ) # check wallet balance if this_team.wallet < this_player.cost: - logging.error(f'{this_player} was not purchased. {this_team.lname} only has {this_team.wallet}₼, but ' - f'{this_player} costs {this_player.cost}₼.') + logging.error( + f"{this_player} was not purchased. {this_team.lname} only has {this_team.wallet}₼, but " + f"{this_player} costs {this_player.cost}₼." + ) db.close() raise HTTPException( 200, - detail=f'{this_player} was not purchased. {this_team.lname} only has {this_team.wallet}₼, but ' - f'{this_player} costs {this_player.cost}₼. /// {conf_message} purchased' + detail=f"{this_player} was not purchased. {this_team.lname} only has {this_team.wallet}₼, but " + f"{this_player} costs {this_player.cost}₼. /// {conf_message} purchased", ) # Create player card and update cost buy_price = this_player.cost total_cost += buy_price this_card = Card( - player_id=this_player.player_id, - team_id=this_team.id, - value=buy_price + player_id=this_player.player_id, team_id=this_team.id, value=buy_price ) Paperdex.get_or_create(team_id=team_id, player_id=this_player.player_id) this_card.save() this_player.change_on_buy() # Deduct card cost from team - logging.info(f'{this_team.abbrev} starting wallet: {this_team.wallet}') + logging.info(f"{this_team.abbrev} starting wallet: {this_team.wallet}") this_team.wallet -= buy_price this_team.save() - logging.info(f'{this_team.abbrev} ending wallet: {this_team.wallet}') + logging.info(f"{this_team.abbrev} ending wallet: {this_team.wallet}") # Post a notification if this_player.rarity.value >= 2: new_notif = Notification( created=int_timestamp(datetime.now()), - title=f'Price Change', - desc='Modified by buying and selling', - field_name=f'{this_player.description} ' - f'{this_player.p_name if this_player.p_name not in this_player.description else ""}', - message=f'From {buy_price}₼ 📈 to **{this_player.cost}**₼', - about=f'Player-{this_player.player_id}' + title=f"Price Change", + desc="Modified by buying and selling", + field_name=f"{this_player.description} " + f"{this_player.p_name if this_player.p_name not in this_player.description else ''}", + message=f"From {buy_price}₼ 📈 to **{this_player.cost}**₼", + about=f"Player-{this_player.player_id}", ) new_notif.save() - conf_message += f'{buy_price}₼ for {this_player.rarity.name} {this_player.p_name} ' \ - f'({this_player.cardset.name}), ' + conf_message += ( + f"{buy_price}₼ for {this_player.rarity.name} {this_player.p_name} " + f"({this_player.cardset.name}), " + ) # sheets.post_new_cards(SHEETS_AUTH, lc_id) - raise HTTPException(status_code=200, detail=f'{conf_message} purchased. /// Total Cost: {total_cost}₼ /// ' - f'Final Wallet: {this_team.wallet}') + raise HTTPException( + status_code=200, + detail=f"{conf_message} purchased. /// Total Cost: {total_cost}₼ /// " + f"Final Wallet: {this_team.wallet}", + ) -@router.get('/{team_id}/buy/pack/{packtype_id}', include_in_schema=PRIVATE_IN_SCHEMA) -async def team_buy_packs(team_id: int, packtype_id: int, ts: str, quantity: Optional[int] = 1): +@router.get("/{team_id}/buy/pack/{packtype_id}", include_in_schema=PRIVATE_IN_SCHEMA) +async def team_buy_packs( + team_id: int, packtype_id: int, ts: str, quantity: Optional[int] = 1 +): try: this_packtype = PackType.get_by_id(packtype_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No pack type found with id {packtype_id}') + raise HTTPException( + status_code=404, detail=f"No pack type found with id {packtype_id}" + ) try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if ts != this_team.team_hash(): - logging.warning(f'Bad Team Secret: {ts} ({this_team.team_hash()})') + logging.warning(f"Bad Team Secret: {ts} ({this_team.team_hash()})") db.close() - logging.warning(f'team: {this_team} / pack_type: {this_packtype} / secret: {ts} / ' - f'actual: {this_team.team_hash()}') + logging.warning( + f"team: {this_team} / pack_type: {this_packtype} / secret: {ts} / " + f"actual: {this_team.team_hash()}" + ) raise HTTPException( status_code=401, - detail=f'You are not authorized to buy {this_team.abbrev} packs. This event has been logged.' + detail=f"You are not authorized to buy {this_team.abbrev} packs. This event has been logged.", ) # check wallet balance @@ -900,8 +1158,8 @@ async def team_buy_packs(team_id: int, packtype_id: int, ts: str, quantity: Opti db.close() raise HTTPException( 200, - detail=f'{this_packtype} was not purchased. {this_team.lname} only has {this_team.wallet} bucks, but ' - f'{this_packtype} costs {this_packtype.cost}.' + detail=f"{this_packtype} was not purchased. {this_team.lname} only has {this_team.wallet} bucks, but " + f"{this_packtype} costs {this_packtype.cost}.", ) all_packs = [] @@ -909,13 +1167,19 @@ async def team_buy_packs(team_id: int, packtype_id: int, ts: str, quantity: Opti if packtype_id == 9: cardset_id = LIVE_PROMO_CARDSET_ID for i in range(quantity): - all_packs.append(Pack(team_id=this_team.id, pack_type_id=this_packtype.id, pack_cardset_id=cardset_id)) + all_packs.append( + Pack( + team_id=this_team.id, + pack_type_id=this_packtype.id, + pack_cardset_id=cardset_id, + ) + ) # Deduct card cost from team - logging.info(f'{this_team.abbrev} starting wallet: {this_team.wallet}') + logging.info(f"{this_team.abbrev} starting wallet: {this_team.wallet}") this_team.wallet -= total_cost this_team.save() - logging.info(f'{this_team.abbrev} ending wallet: {this_team.wallet}') + logging.info(f"{this_team.abbrev} ending wallet: {this_team.wallet}") with db.atomic(): Pack.bulk_create(all_packs, batch_size=15) @@ -923,49 +1187,53 @@ async def team_buy_packs(team_id: int, packtype_id: int, ts: str, quantity: Opti raise HTTPException( status_code=200, - detail=f'Quantity {quantity} {this_packtype.name} pack{"s" if quantity > 1 else ""} have been purchased by ' - f'{this_team.lname} for {total_cost} bucks. You may close this window.' + detail=f"Quantity {quantity} {this_packtype.name} pack{'s' if quantity > 1 else ''} have been purchased by " + f"{this_team.lname} for {total_cost} bucks. You may close this window.", ) -@router.get('/{team_id}/sell/cards', include_in_schema=PRIVATE_IN_SCHEMA) +@router.get("/{team_id}/sell/cards", include_in_schema=PRIVATE_IN_SCHEMA) async def team_sell_cards(team_id: int, ids: str, ts: str): try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if ts != this_team.team_hash(): - logging.warning(f'Bad Team Secret: {ts} ({this_team.team_hash()})') + logging.warning(f"Bad Team Secret: {ts} ({this_team.team_hash()})") db.close() raise HTTPException( status_code=401, - detail=f'You are not authorized to sell {this_team.abbrev} cards. This event has been logged.' + detail=f"You are not authorized to sell {this_team.abbrev} cards. This event has been logged.", ) - all_ids = ids.split(',') + all_ids = ids.split(",") del_ids = [] - conf_message = '' + conf_message = "" total_cost = 0 for card_id in all_ids: - if card_id != '': + if card_id != "": try: this_card = Card.get_by_id(card_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No card found with id {card_id}') + raise HTTPException( + status_code=404, detail=f"No card found with id {card_id}" + ) del_ids.append(card_id) this_player = this_card.player if this_card.team != this_team: - raise HTTPException(status_code=401, - detail=f'Card id {card_id} ({this_player.p_name}) belongs to ' - f'{this_card.team.abbrev} and cannot be sold. /// {conf_message} sold') + raise HTTPException( + status_code=401, + detail=f"Card id {card_id} ({this_player.p_name}) belongs to " + f"{this_card.team.abbrev} and cannot be sold. /// {conf_message} sold", + ) orig_price = this_player.cost - sell_price = round(this_player.cost * .5) + sell_price = round(this_player.cost * 0.5) total_cost += sell_price # credit selling team's wallet @@ -983,24 +1251,29 @@ async def team_sell_cards(team_id: int, ids: str, ts: str): if this_player.rarity.value >= 2: new_notif = Notification( created=int_timestamp(datetime.now()), - title=f'Price Change', - desc='Modified by buying and selling', - field_name=f'{this_player.description} ' - f'{this_player.p_name if this_player.p_name not in this_player.description else ""}', - message=f'From {orig_price}₼ 📉 to **{this_player.cost}**₼', - about=f'Player-{this_player.id}' + title=f"Price Change", + desc="Modified by buying and selling", + field_name=f"{this_player.description} " + f"{this_player.p_name if this_player.p_name not in this_player.description else ''}", + message=f"From {orig_price}₼ 📉 to **{this_player.cost}**₼", + about=f"Player-{this_player.id}", ) new_notif.save() - conf_message += f'{sell_price}₼ for {this_player.rarity.name} {this_player.p_name} ' \ - f'({this_player.cardset.name}), ' + conf_message += ( + f"{sell_price}₼ for {this_player.rarity.name} {this_player.p_name} " + f"({this_player.cardset.name}), " + ) # sheets.post_deletion(SHEETS_AUTH, del_ids) - raise HTTPException(status_code=200, detail=f'{conf_message} sold. /// Total Earned: {total_cost}₼ /// ' - f'Final Wallet: {this_team.wallet}') + raise HTTPException( + status_code=200, + detail=f"{conf_message} sold. /// Total Earned: {total_cost}₼ /// " + f"Final Wallet: {this_team.wallet}", + ) -@router.get('/{team_id}/cards') +@router.get("/{team_id}/cards") async def get_team_cards(team_id, csv: Optional[bool] = True): """ CSV output specifically targeting team roster sheet @@ -1014,59 +1287,85 @@ async def get_team_cards(team_id, csv: Optional[bool] = True): this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if not csv: db.close() raise HTTPException( status_code=400, - detail='The /teams/{team_id}/cards endpoint only supports csv output.' + detail="The /teams/{team_id}/cards endpoint only supports csv output.", ) - all_cards = (Card - .select() - .join(Player) - .join(Rarity) - .where(Card.team == this_team) - .order_by(-Card.player.rarity.value, Card.player.p_name) - ) + all_cards = ( + Card.select() + .join(Player) + .join(Rarity) + .where(Card.team == this_team) + .order_by(-Card.player.rarity.value, Card.player.p_name) + ) if all_cards.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'No cards found') + raise HTTPException(status_code=404, detail=f"No cards found") card_vals = [model_to_dict(x) for x in all_cards] db.close() for x in card_vals: - x.update(x['player']) - x['player_id'] = x['player']['player_id'] - x['player_name'] = x['player']['p_name'] - x['cardset_id'] = x['player']['cardset']['id'] - x['cardset_name'] = x['player']['cardset']['name'] - x['rarity'] = x['player']['rarity']['name'] - x['card_id'] = x['id'] + x.update(x["player"]) + x["player_id"] = x["player"]["player_id"] + x["player_name"] = x["player"]["p_name"] + x["cardset_id"] = x["player"]["cardset"]["id"] + x["cardset_name"] = x["player"]["cardset"]["name"] + x["rarity"] = x["player"]["rarity"]["name"] + x["card_id"] = x["id"] card_df = pd.DataFrame(card_vals) - output = card_df[[ - 'cardset_name', 'player_name', 'rarity', 'image', 'image2', 'pos_1', 'pos_2', 'pos_3', 'pos_4', 'pos_5', - 'pos_6', 'pos_7', 'pos_8', 'cost', 'mlbclub', 'franchise', 'fangr_id', 'bbref_id', 'player_id', 'card_id']] - return Response(content=pd.DataFrame(output).to_csv(index=False), media_type='text/csv') + output = card_df[ + [ + "cardset_name", + "player_name", + "rarity", + "image", + "image2", + "pos_1", + "pos_2", + "pos_3", + "pos_4", + "pos_5", + "pos_6", + "pos_7", + "pos_8", + "cost", + "mlbclub", + "franchise", + "fangr_id", + "bbref_id", + "player_id", + "card_id", + ] + ] + return Response( + content=pd.DataFrame(output).to_csv(index=False), media_type="text/csv" + ) -@router.post('', include_in_schema=PRIVATE_IN_SCHEMA) +@router.post("", include_in_schema=PRIVATE_IN_SCHEMA) async def post_team(team: TeamModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post teams. This event has been logged.' + detail="You are not authorized to post teams. This event has been logged.", ) dupe_team = Team.get_or_none(Team.season == team.season, Team.abbrev == team.abbrev) if dupe_team: db.close() - raise HTTPException(status_code=400, detail=f'There is already a season {team.season} team using {team.abbrev}') + raise HTTPException( + status_code=400, + detail=f"There is already a season {team.season} team using {team.abbrev}", + ) this_team = Team( abbrev=team.abbrev, @@ -1084,7 +1383,7 @@ async def post_team(team: TeamModel, token: str = Depends(oauth2_scheme)): season=team.season, career=team.ps_shiny, has_guide=team.has_guide, - is_ai=team.is_ai + is_ai=team.is_ai, ) saved = this_team.save() @@ -1093,43 +1392,52 @@ async def post_team(team: TeamModel, token: str = Depends(oauth2_scheme)): db.close() return return_team else: - raise HTTPException(status_code=418, detail='Well slap my ass and call me a teapot; I could not save that team') + raise HTTPException( + status_code=418, + detail="Well slap my ass and call me a teapot; I could not save that team", + ) -@router.post('/new-season/{new_season}', include_in_schema=PRIVATE_IN_SCHEMA) +@router.post("/new-season/{new_season}", include_in_schema=PRIVATE_IN_SCHEMA) async def team_season_update(new_season: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post teams. This event has been logged.' + detail="You are not authorized to post teams. This event has been logged.", ) - r_query = Team.update(ranking=1000, season=new_season, wallet=Team.wallet + 250, has_guide=False).execute() + r_query = Team.update( + ranking=1000, season=new_season, wallet=Team.wallet + 250, has_guide=False + ).execute() current = Current.latest() current.season = new_season current.save() db.close() - return {'detail': f'Team rankings, season, guides, and wallets updated for season {new_season}'} + return { + "detail": f"Team rankings, season, guides, and wallets updated for season {new_season}" + } -@router.post('/{team_id}/money/{delta}', include_in_schema=PRIVATE_IN_SCHEMA) -async def team_update_money(team_id: int, delta: int, token: str = Depends(oauth2_scheme)): +@router.post("/{team_id}/money/{delta}", include_in_schema=PRIVATE_IN_SCHEMA) +async def team_update_money( + team_id: int, delta: int, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to adjust wallets. This event has been logged.' + detail="You are not authorized to adjust wallets. This event has been logged.", ) try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") this_team.wallet += delta @@ -1138,29 +1446,45 @@ async def team_update_money(team_id: int, delta: int, token: str = Depends(oauth db.close() return return_team else: - raise HTTPException(status_code=418, detail='Well slap my ass and call me a teapot; I could not save that team') + raise HTTPException( + status_code=418, + detail="Well slap my ass and call me a teapot; I could not save that team", + ) -@router.patch('/{team_id}', include_in_schema=PRIVATE_IN_SCHEMA) +@router.patch("/{team_id}", include_in_schema=PRIVATE_IN_SCHEMA) async def patch_team( - team_id, sname: Optional[str] = None, lname: Optional[str] = None, gmid: Optional[int] = None, - gmname: Optional[str] = None, gsheet: Optional[str] = None, team_value: Optional[int] = None, - collection_value: Optional[int] = None, logo: Optional[str] = None, color: Optional[str] = None, - season: Optional[int] = None, ps_shiny: Optional[int] = None, wallet_delta: Optional[int] = None, - has_guide: Optional[bool] = None, is_ai: Optional[bool] = None, ranking: Optional[int] = None, - token: str = Depends(oauth2_scheme), abbrev: Optional[str] = None): + team_id, + sname: Optional[str] = None, + lname: Optional[str] = None, + gmid: Optional[int] = None, + gmname: Optional[str] = None, + gsheet: Optional[str] = None, + team_value: Optional[int] = None, + collection_value: Optional[int] = None, + logo: Optional[str] = None, + color: Optional[str] = None, + season: Optional[int] = None, + ps_shiny: Optional[int] = None, + wallet_delta: Optional[int] = None, + has_guide: Optional[bool] = None, + is_ai: Optional[bool] = None, + ranking: Optional[int] = None, + token: str = Depends(oauth2_scheme), + abbrev: Optional[str] = None, +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete teams. This event has been logged.' + detail="You are not authorized to delete teams. This event has been logged.", ) try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if abbrev is not None: this_team.abbrev = abbrev @@ -1206,28 +1530,31 @@ async def patch_team( db.close() return return_team else: - raise HTTPException(status_code=418, detail='Well slap my ass and call me a teapot; I could not save that team') + raise HTTPException( + status_code=418, + detail="Well slap my ass and call me a teapot; I could not save that team", + ) -@router.delete('/{team_id}', include_in_schema=PRIVATE_IN_SCHEMA) +@router.delete("/{team_id}", include_in_schema=PRIVATE_IN_SCHEMA) async def delete_team(team_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete teams. This event has been logged.' + detail="You are not authorized to delete teams. This event has been logged.", ) try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") count = this_team.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Team {team_id} has been deleted') + raise HTTPException(status_code=200, detail=f"Team {team_id} has been deleted") else: - raise HTTPException(status_code=500, detail=f'Team {team_id} was not deleted') + raise HTTPException(status_code=500, detail=f"Team {team_id} was not deleted") diff --git a/main.py b/main.py index 83a20ec..2c0d4bd 100644 --- a/main.py +++ b/main.py @@ -3,6 +3,7 @@ import logging import os from db_engine import * +from app.db_helpers import upsert_players, upsert_gauntlet_rewards from typing import Optional, List, Union, Literal from fastapi import FastAPI, HTTPException, Depends, Response, Query from fastapi.security import OAuth2PasswordBearer @@ -13,19 +14,19 @@ import sheets from playhouse.shortcuts import model_to_dict from pandas import DataFrame -raw_log_level = os.getenv('LOG_LEVEL') -if raw_log_level == 'INFO': +raw_log_level = os.getenv("LOG_LEVEL") +if raw_log_level == "INFO": log_level = logging.INFO -elif raw_log_level == 'WARN': +elif raw_log_level == "WARN": log_level = logging.WARN else: log_level = logging.ERROR -date = f'{datetime.now().year}-{datetime.now().month}-{datetime.now().day}' +date = f"{datetime.now().year}-{datetime.now().month}-{datetime.now().day}" logging.basicConfig( - filename=f'logs/database/{date}.log', - format='%(asctime)s - %(levelname)s - %(message)s', - level=log_level + filename=f"logs/database/{date}.log", + format="%(asctime)s - %(levelname)s - %(message)s", + level=log_level, ) app = FastAPI() @@ -36,37 +37,37 @@ DEFAULT_SEASON = 5 # Franchise normalization: Convert city+team names to city-agnostic team names # This enables cross-era player matching (e.g., 'Oakland Athletics' -> 'Athletics') FRANCHISE_NORMALIZE = { - 'Arizona Diamondbacks': 'Diamondbacks', - 'Atlanta Braves': 'Braves', - 'Baltimore Orioles': 'Orioles', - 'Boston Red Sox': 'Red Sox', - 'Chicago Cubs': 'Cubs', - 'Chicago White Sox': 'White Sox', - 'Cincinnati Reds': 'Reds', - 'Cleveland Guardians': 'Guardians', - 'Colorado Rockies': 'Rockies', - 'Detroit Tigers': 'Tigers', - 'Houston Astros': 'Astros', - 'Kansas City Royals': 'Royals', - 'Los Angeles Angels': 'Angels', - 'Los Angeles Dodgers': 'Dodgers', - 'Miami Marlins': 'Marlins', - 'Milwaukee Brewers': 'Brewers', - 'Minnesota Twins': 'Twins', - 'New York Mets': 'Mets', - 'New York Yankees': 'Yankees', - 'Oakland Athletics': 'Athletics', - 'Philadelphia Phillies': 'Phillies', - 'Pittsburgh Pirates': 'Pirates', - 'San Diego Padres': 'Padres', - 'San Francisco Giants': 'Giants', - 'Seattle Mariners': 'Mariners', - 'St Louis Cardinals': 'Cardinals', - 'St. Louis Cardinals': 'Cardinals', - 'Tampa Bay Rays': 'Rays', - 'Texas Rangers': 'Rangers', - 'Toronto Blue Jays': 'Blue Jays', - 'Washington Nationals': 'Nationals', + "Arizona Diamondbacks": "Diamondbacks", + "Atlanta Braves": "Braves", + "Baltimore Orioles": "Orioles", + "Boston Red Sox": "Red Sox", + "Chicago Cubs": "Cubs", + "Chicago White Sox": "White Sox", + "Cincinnati Reds": "Reds", + "Cleveland Guardians": "Guardians", + "Colorado Rockies": "Rockies", + "Detroit Tigers": "Tigers", + "Houston Astros": "Astros", + "Kansas City Royals": "Royals", + "Los Angeles Angels": "Angels", + "Los Angeles Dodgers": "Dodgers", + "Miami Marlins": "Marlins", + "Milwaukee Brewers": "Brewers", + "Minnesota Twins": "Twins", + "New York Mets": "Mets", + "New York Yankees": "Yankees", + "Oakland Athletics": "Athletics", + "Philadelphia Phillies": "Phillies", + "Pittsburgh Pirates": "Pirates", + "San Diego Padres": "Padres", + "San Francisco Giants": "Giants", + "Seattle Mariners": "Mariners", + "St Louis Cardinals": "Cardinals", + "St. Louis Cardinals": "Cardinals", + "Tampa Bay Rays": "Rays", + "Texas Rangers": "Rangers", + "Toronto Blue Jays": "Blue Jays", + "Washington Nationals": "Nationals", } @@ -76,11 +77,13 @@ def normalize_franchise(franchise: str) -> str: return FRANCHISE_NORMALIZE.get(titled, titled) -SHEETS_AUTH = pygsheets.authorize(service_file='storage/paper-dynasty-service-creds.json', retries=1) +SHEETS_AUTH = pygsheets.authorize( + service_file="storage/paper-dynasty-service-creds.json", retries=1 +) def valid_token(token): - if token == os.environ.get('API_TOKEN'): + if token == os.environ.get("API_TOKEN"): return True else: return False @@ -102,7 +105,7 @@ class CurrentModel(pydantic.BaseModel): gsheet_version: str -@app.get('/api/v1/current') +@app.get("/api/v1/current") async def v1_current_get(season: Optional[int] = None, csv: Optional[bool] = False): if season: current = Current.get_or_none(season=season) @@ -111,62 +114,67 @@ async def v1_current_get(season: Optional[int] = None, csv: Optional[bool] = Fal if csv: current_list = [ - ['id', 'season', 'week'], - [current.id, current.season, current.week] + ["id", "season", "week"], + [current.id, current.season, current.week], ] return_val = DataFrame(current_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(current) db.close() return return_val -@app.get('/api/v1/current/{current_id}') +@app.get("/api/v1/current/{current_id}") async def v1_current_get_one(current_id, csv: Optional[bool] = False): try: current = Current.get_by_id(current_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No current found with id {current_id}') + raise HTTPException( + status_code=404, detail=f"No current found with id {current_id}" + ) if csv: current_list = [ - ['id', 'season', 'week'], - [current.id, current.season, current.week] + ["id", "season", "week"], + [current.id, current.season, current.week], ] return_val = DataFrame(current_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(current) db.close() return return_val -@app.post('/api/v1/current') +@app.post("/api/v1/current") async def v1_current_post(current: CurrentModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post current. This event has been logged.' + detail="You are not authorized to post current. This event has been logged.", ) dupe_curr = Current.get_or_none(Current.season == current.season) if dupe_curr: db.close() - raise HTTPException(status_code=400, detail=f'There is already a current for season {current.season}') + raise HTTPException( + status_code=400, + detail=f"There is already a current for season {current.season}", + ) this_curr = Current( season=current.season, week=current.week, gsheet_template=current.gsheet_template, - gsheet_version=current.gsheet_version + gsheet_version=current.gsheet_version, ) saved = this_curr.save() @@ -175,26 +183,36 @@ async def v1_current_post(current: CurrentModel, token: str = Depends(oauth2_sch db.close() return return_val else: - raise HTTPException(status_code=418, detail='Well slap my ass and call me a teapot; I could not save that team') + raise HTTPException( + status_code=418, + detail="Well slap my ass and call me a teapot; I could not save that team", + ) -@app.patch('/api/v1/current/{current_id}') +@app.patch("/api/v1/current/{current_id}") async def v1_current_patch( - current_id: int, season: Optional[int] = None, week: Optional[int] = None, - gsheet_template: Optional[str] = None, gsheet_version: Optional[str] = None, - live_scoreboard: Optional[int] = None, token: str = Depends(oauth2_scheme)): + current_id: int, + season: Optional[int] = None, + week: Optional[int] = None, + gsheet_template: Optional[str] = None, + gsheet_version: Optional[str] = None, + live_scoreboard: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch current. This event has been logged.' + detail="You are not authorized to patch current. This event has been logged.", ) try: current = Current.get_by_id(current_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No current found with id {current_id}') + raise HTTPException( + status_code=404, detail=f"No current found with id {current_id}" + ) if season is not None: current.season = season @@ -214,32 +232,38 @@ async def v1_current_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that current' + detail="Well slap my ass and call me a teapot; I could not save that current", ) -@app.delete('/api/v1/current/{current_id}') +@app.delete("/api/v1/current/{current_id}") async def v1_current_delete(current_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete current. This event has been logged.' + detail="You are not authorized to delete current. This event has been logged.", ) try: this_curr = Current.get_by_id(current_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No current found with id {current_id}') + raise HTTPException( + status_code=404, detail=f"No current found with id {current_id}" + ) count = this_curr.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Current {current_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Current {current_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Current {current_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Current {current_id} was not deleted" + ) """ @@ -266,14 +290,27 @@ class TeamModel(pydantic.BaseModel): is_ai: Optional[bool] = False -@app.get('/api/v1/teams') +@app.get("/api/v1/teams") async def v1_teams_get( - season: Optional[int] = None, gm_id: Optional[int] = None, abbrev: Optional[str] = None, - tv_min: Optional[int] = None, tv_max: Optional[int] = None, cv_min: Optional[int] = None, - cv_max: Optional[int] = None, ps_shiny_min: Optional[int] = None, ps_shiny_max: Optional[int] = None, - ranking_min: Optional[int] = None, ranking_max: Optional[int] = None, has_guide: Optional[bool] = None, - sname: Optional[str] = None, lname: Optional[str] = None, is_ai: Optional[bool] = None, - event_id: Optional[int] = None, limit: Optional[int] = None, csv: Optional[bool] = False): + season: Optional[int] = None, + gm_id: Optional[int] = None, + abbrev: Optional[str] = None, + tv_min: Optional[int] = None, + tv_max: Optional[int] = None, + cv_min: Optional[int] = None, + cv_max: Optional[int] = None, + ps_shiny_min: Optional[int] = None, + ps_shiny_max: Optional[int] = None, + ranking_min: Optional[int] = None, + ranking_max: Optional[int] = None, + has_guide: Optional[bool] = None, + sname: Optional[str] = None, + lname: Optional[str] = None, + is_ai: Optional[bool] = None, + event_id: Optional[int] = None, + limit: Optional[int] = None, + csv: Optional[bool] = False, +): """ Param: season: int Param: team_abbrev: string @@ -347,161 +384,230 @@ async def v1_teams_get( # raise HTTPException(status_code=404, detail=f'No teams found') if csv: - data_list = [[ - 'id', 'abbrev', 'sname', 'lname', 'gmid', 'gmname', 'wallet', 'gsheet', 'team_value', - 'collection_value', 'logo', 'color', 'season', 'ranking' - ]] + data_list = [ + [ + "id", + "abbrev", + "sname", + "lname", + "gmid", + "gmname", + "wallet", + "gsheet", + "team_value", + "collection_value", + "logo", + "color", + "season", + "ranking", + ] + ] for line in all_teams: data_list.append( [ - line.id, line.abbrev, line.sname, line.lname, line.gmid, line.gmname, line.wallet, line.gsheet, - line.team_value, line.collection_value, line.logo, f'\'{line.color}', line.season, line.ranking + line.id, + line.abbrev, + line.sname, + line.lname, + line.gmid, + line.gmname, + line.wallet, + line.gsheet, + line.team_value, + line.collection_value, + line.logo, + f"'{line.color}", + line.season, + line.ranking, ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_teams = {'count': all_teams.count(), 'teams': []} + return_teams = {"count": all_teams.count(), "teams": []} for x in all_teams: - return_teams['teams'].append(model_to_dict(x)) + return_teams["teams"].append(model_to_dict(x)) db.close() return return_teams -@app.get('/api/v1/teams/{team_id}') +@app.get("/api/v1/teams/{team_id}") async def v1_teams_get_one(team_id, csv: Optional[bool] = False): try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if csv: - team_packs = Pack.select().where((Pack.team == this_team) & (Pack.open_time.is_null(True))) + team_packs = Pack.select().where( + (Pack.team == this_team) & (Pack.open_time.is_null(True)) + ) data_list = [ - ['id', 'abbrev', 'sname', 'lname', 'gmid', 'gmname', 'wallet', 'ranking', 'gsheet', 'sealed_packs', - 'collection_value', 'logo', 'color', 'season'], - [this_team.id, this_team.abbrev, this_team.sname, this_team.lname, this_team.gmid, this_team.gmname, - this_team.wallet, this_team.ranking, this_team.gsheet, team_packs.count(), this_team.collection_value, - this_team.logo, this_team.color, this_team.season] + [ + "id", + "abbrev", + "sname", + "lname", + "gmid", + "gmname", + "wallet", + "ranking", + "gsheet", + "sealed_packs", + "collection_value", + "logo", + "color", + "season", + ], + [ + this_team.id, + this_team.abbrev, + this_team.sname, + this_team.lname, + this_team.gmid, + this_team.gmname, + this_team.wallet, + this_team.ranking, + this_team.gsheet, + team_packs.count(), + this_team.collection_value, + this_team.logo, + this_team.color, + this_team.season, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_team) db.close() return return_val -@app.get('/api/v1/teams/{team_id}/buy/players') +@app.get("/api/v1/teams/{team_id}/buy/players") async def v1_team_cards_buy(team_id: int, ids: str, ts: str): try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if ts != this_team.team_hash(): - logging.warning(f'Bad Team Secret: {ts} ({this_team.team_hash()})') + logging.warning(f"Bad Team Secret: {ts} ({this_team.team_hash()})") db.close() raise HTTPException( status_code=401, - detail=f'You are not authorized to buy {this_team.abbrev} cards. This event has been logged.' + detail=f"You are not authorized to buy {this_team.abbrev} cards. This event has been logged.", ) last_card = Card.select(Card.id).order_by(-Card.id).limit(1) lc_id = last_card[0].id - all_ids = ids.split(',') - conf_message = '' + all_ids = ids.split(",") + conf_message = "" total_cost = 0 for player_id in all_ids: - if player_id != '': + if player_id != "": try: this_player = Player.get_by_id(player_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No player found with id {player_id} /// ' - f'{conf_message} purchased') + raise HTTPException( + status_code=404, + detail=f"No player found with id {player_id} /// " + f"{conf_message} purchased", + ) # check wallet balance if this_team.wallet < this_player.cost: - logging.info(f'{this_player} was not purchased. {this_team.lname} only has {this_team.wallet}₼, but ' - f'{this_player} costs {this_player.cost}₼.') + logging.info( + f"{this_player} was not purchased. {this_team.lname} only has {this_team.wallet}₼, but " + f"{this_player} costs {this_player.cost}₼." + ) db.close() raise HTTPException( 200, - detail=f'{this_player} was not purchased. {this_team.lname} only has {this_team.wallet}₼, but ' - f'{this_player} costs {this_player.cost}₼. /// {conf_message} purchased' + detail=f"{this_player} was not purchased. {this_team.lname} only has {this_team.wallet}₼, but " + f"{this_player} costs {this_player.cost}₼. /// {conf_message} purchased", ) # Create player card and update cost buy_price = this_player.cost total_cost += buy_price this_card = Card( - player_id=this_player.player_id, - team_id=this_team.id, - value=buy_price + player_id=this_player.player_id, team_id=this_team.id, value=buy_price ) Paperdex.get_or_create(team_id=team_id, player_id=this_player.player_id) this_card.save() this_player.change_on_buy() # Deduct card cost from team - logging.info(f'{this_team.abbrev} starting wallet: {this_team.wallet}') + logging.info(f"{this_team.abbrev} starting wallet: {this_team.wallet}") this_team.wallet -= buy_price this_team.save() - logging.info(f'{this_team.abbrev} ending wallet: {this_team.wallet}') + logging.info(f"{this_team.abbrev} ending wallet: {this_team.wallet}") # Post a notification if this_player.rarity.value >= 2: new_notif = Notification( created=int_timestamp(datetime.now()), - title=f'Price Change', - desc='Modified by buying and selling', - field_name=f'{this_player.description}', - message=f'From {buy_price}₼ 📈 to **{this_player.cost}**₼', - about=f'Player-{this_player.player_id}' + title=f"Price Change", + desc="Modified by buying and selling", + field_name=f"{this_player.description}", + message=f"From {buy_price}₼ 📈 to **{this_player.cost}**₼", + about=f"Player-{this_player.player_id}", ) new_notif.save() - conf_message += f'{buy_price}₼ for {this_player.rarity.name} {this_player.p_name} ' \ - f'({this_player.cardset.name}), ' + conf_message += ( + f"{buy_price}₼ for {this_player.rarity.name} {this_player.p_name} " + f"({this_player.cardset.name}), " + ) # sheets.post_new_cards(SHEETS_AUTH, lc_id) - raise HTTPException(status_code=200, detail=f'{conf_message} purchased. /// Total Cost: {total_cost}₼ /// ' - f'Final Wallet: {this_team.wallet}') + raise HTTPException( + status_code=200, + detail=f"{conf_message} purchased. /// Total Cost: {total_cost}₼ /// " + f"Final Wallet: {this_team.wallet}", + ) -@app.get('/api/v1/teams/{team_id}/buy/pack/{packtype_id}') -async def v1_team_pack_buy(team_id: int, packtype_id: int, ts: str, quantity: Optional[int] = 1): +@app.get("/api/v1/teams/{team_id}/buy/pack/{packtype_id}") +async def v1_team_pack_buy( + team_id: int, packtype_id: int, ts: str, quantity: Optional[int] = 1 +): try: this_packtype = PackType.get_by_id(packtype_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No pack type found with id {packtype_id}') + raise HTTPException( + status_code=404, detail=f"No pack type found with id {packtype_id}" + ) try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if ts != this_team.team_hash(): - logging.warning(f'Bad Team Secret: {ts} ({this_team.team_hash()})') + logging.warning(f"Bad Team Secret: {ts} ({this_team.team_hash()})") db.close() - logging.warning(f'team: {this_team} / pack_type: {this_packtype} / secret: {ts} / ' - f'actual: {this_team.team_hash()}') + logging.warning( + f"team: {this_team} / pack_type: {this_packtype} / secret: {ts} / " + f"actual: {this_team.team_hash()}" + ) raise HTTPException( status_code=401, - detail=f'You are not authorized to buy {this_team.abbrev} packs. This event has been logged.' + detail=f"You are not authorized to buy {this_team.abbrev} packs. This event has been logged.", ) # check wallet balance @@ -510,8 +616,8 @@ async def v1_team_pack_buy(team_id: int, packtype_id: int, ts: str, quantity: Op db.close() raise HTTPException( 200, - detail=f'{this_packtype} was not purchased. {this_team.lname} only has {this_team.wallet} bucks, but ' - f'{this_packtype} costs {this_packtype.cost}.' + detail=f"{this_packtype} was not purchased. {this_team.lname} only has {this_team.wallet} bucks, but " + f"{this_packtype} costs {this_packtype.cost}.", ) all_packs = [] @@ -519,10 +625,10 @@ async def v1_team_pack_buy(team_id: int, packtype_id: int, ts: str, quantity: Op all_packs.append(Pack(team_id=this_team.id, pack_type_id=this_packtype.id)) # Deduct card cost from team - logging.info(f'{this_team.abbrev} starting wallet: {this_team.wallet}') + logging.info(f"{this_team.abbrev} starting wallet: {this_team.wallet}") this_team.wallet -= total_cost this_team.save() - logging.info(f'{this_team.abbrev} ending wallet: {this_team.wallet}') + logging.info(f"{this_team.abbrev} ending wallet: {this_team.wallet}") with db.atomic(): Pack.bulk_create(all_packs, batch_size=15) @@ -530,49 +636,53 @@ async def v1_team_pack_buy(team_id: int, packtype_id: int, ts: str, quantity: Op raise HTTPException( status_code=200, - detail=f'Quantity {quantity} {this_packtype.name} pack{"s" if quantity > 1 else ""} have been purchased by ' - f'{this_team.lname} for {total_cost} bucks. You may close this window.' + detail=f"Quantity {quantity} {this_packtype.name} pack{'s' if quantity > 1 else ''} have been purchased by " + f"{this_team.lname} for {total_cost} bucks. You may close this window.", ) -@app.get('/api/v1/teams/{team_id}/sell/cards') +@app.get("/api/v1/teams/{team_id}/sell/cards") async def v1_team_cards_sell(team_id: int, ids: str, ts: str): try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if ts != this_team.team_hash(): - logging.warning(f'Bad Team Secret: {ts} ({this_team.team_hash()})') + logging.warning(f"Bad Team Secret: {ts} ({this_team.team_hash()})") db.close() raise HTTPException( status_code=401, - detail=f'You are not authorized to sell {this_team.abbrev} cards. This event has been logged.' + detail=f"You are not authorized to sell {this_team.abbrev} cards. This event has been logged.", ) - all_ids = ids.split(',') + all_ids = ids.split(",") del_ids = [] - conf_message = '' + conf_message = "" total_cost = 0 for card_id in all_ids: - if card_id != '': + if card_id != "": try: this_card = Card.get_by_id(card_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No card found with id {card_id}') + raise HTTPException( + status_code=404, detail=f"No card found with id {card_id}" + ) del_ids.append(card_id) this_player = this_card.player if this_card.team != this_team: - raise HTTPException(status_code=401, - detail=f'Card id {card_id} ({this_player.p_name}) belongs to ' - f'{this_card.team.abbrev} and cannot be sold. /// {conf_message} sold') + raise HTTPException( + status_code=401, + detail=f"Card id {card_id} ({this_player.p_name}) belongs to " + f"{this_card.team.abbrev} and cannot be sold. /// {conf_message} sold", + ) orig_price = this_player.cost - sell_price = round(this_player.cost * .5) + sell_price = round(this_player.cost * 0.5) total_cost += sell_price # credit selling team's wallet @@ -590,23 +700,28 @@ async def v1_team_cards_sell(team_id: int, ids: str, ts: str): if this_player.rarity.value >= 2: new_notif = Notification( created=int_timestamp(datetime.now()), - title=f'Price Change', - desc='Modified by buying and selling', - field_name=f'{this_player.description}', - message=f'From {orig_price}₼ 📉 to **{this_player.cost}**₼', - about=f'Player-{this_player.id}' + title=f"Price Change", + desc="Modified by buying and selling", + field_name=f"{this_player.description}", + message=f"From {orig_price}₼ 📉 to **{this_player.cost}**₼", + about=f"Player-{this_player.id}", ) new_notif.save() - conf_message += f'{sell_price}₼ for {this_player.rarity.name} {this_player.p_name} ' \ - f'({this_player.cardset.name}), ' + conf_message += ( + f"{sell_price}₼ for {this_player.rarity.name} {this_player.p_name} " + f"({this_player.cardset.name}), " + ) # sheets.post_deletion(SHEETS_AUTH, del_ids) - raise HTTPException(status_code=200, detail=f'{conf_message} sold. /// Total Earned: {total_cost}₼ /// ' - f'Final Wallet: {this_team.wallet}') + raise HTTPException( + status_code=200, + detail=f"{conf_message} sold. /// Total Earned: {total_cost}₼ /// " + f"Final Wallet: {this_team.wallet}", + ) -@app.get('/api/v1/teams/{team_id}/cards') +@app.get("/api/v1/teams/{team_id}/cards") async def v1_teams_cards_get(team_id, csv: Optional[bool] = True): """ CSV output specifically targeting team roster sheet @@ -620,59 +735,98 @@ async def v1_teams_cards_get(team_id, csv: Optional[bool] = True): this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if not csv: db.close() raise HTTPException( status_code=400, - detail='The /teams/{team_id}/cards endpoint only supports csv output.' + detail="The /teams/{team_id}/cards endpoint only supports csv output.", ) - all_cards = (Card - .select() - .join(Player) - .join(Rarity) - .where(Card.team == this_team) - .order_by(-Card.player.rarity.value, Card.player.p_name) - ) + all_cards = ( + Card.select() + .join(Player) + .join(Rarity) + .where(Card.team == this_team) + .order_by(-Card.player.rarity.value, Card.player.p_name) + ) if all_cards.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'No cards found') + raise HTTPException(status_code=404, detail=f"No cards found") - data_list = [[ - 'cardset', 'player', 'rarity', 'image', 'image2', 'pos_1', 'pos_2', 'pos_3', 'pos_4', 'pos_5', 'pos_6', - 'pos_7', 'pos_8', 'cost', 'mlbclub', 'franchise', 'set_num', 'bbref_id', 'player_id', 'card_id' - ]] + data_list = [ + [ + "cardset", + "player", + "rarity", + "image", + "image2", + "pos_1", + "pos_2", + "pos_3", + "pos_4", + "pos_5", + "pos_6", + "pos_7", + "pos_8", + "cost", + "mlbclub", + "franchise", + "set_num", + "bbref_id", + "player_id", + "card_id", + ] + ] for line in all_cards: data_list.append( [ - line.player.cardset, line.player.p_name, line.player.rarity, line.player.image, line.player.image2, - line.player.pos_1, line.player.pos_2, line.player.pos_3, line.player.pos_4, line.player.pos_5, - line.player.pos_6, line.player.pos_7, line.player.pos_8, line.player.cost, line.player.mlbclub, - line.player.franchise, line.player.set_num, line.player.bbref_id, line.player.player_id, line.id + line.player.cardset, + line.player.p_name, + line.player.rarity, + line.player.image, + line.player.image2, + line.player.pos_1, + line.player.pos_2, + line.player.pos_3, + line.player.pos_4, + line.player.pos_5, + line.player.pos_6, + line.player.pos_7, + line.player.pos_8, + line.player.cost, + line.player.mlbclub, + line.player.franchise, + line.player.set_num, + line.player.bbref_id, + line.player.player_id, + line.id, ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") -@app.post('/api/v1/teams') +@app.post("/api/v1/teams") async def v1_teams_post(team: TeamModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post teams. This event has been logged.' + detail="You are not authorized to post teams. This event has been logged.", ) dupe_team = Team.get_or_none(Team.season == team.season, Team.abbrev == team.abbrev) if dupe_team: db.close() - raise HTTPException(status_code=400, detail=f'There is already a season {team.season} team using {team.abbrev}') + raise HTTPException( + status_code=400, + detail=f"There is already a season {team.season} team using {team.abbrev}", + ) this_team = Team( abbrev=team.abbrev, @@ -690,7 +844,7 @@ async def v1_teams_post(team: TeamModel, token: str = Depends(oauth2_scheme)): season=team.season, career=team.ps_shiny, has_guide=team.has_guide, - is_ai=team.is_ai + is_ai=team.is_ai, ) saved = this_team.save() @@ -699,43 +853,52 @@ async def v1_teams_post(team: TeamModel, token: str = Depends(oauth2_scheme)): db.close() return return_team else: - raise HTTPException(status_code=418, detail='Well slap my ass and call me a teapot; I could not save that team') + raise HTTPException( + status_code=418, + detail="Well slap my ass and call me a teapot; I could not save that team", + ) -@app.post('/api/v1/teams/new-season/{new_season}') +@app.post("/api/v1/teams/new-season/{new_season}") async def v1_teams_new_season(new_season: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post teams. This event has been logged.' + detail="You are not authorized to post teams. This event has been logged.", ) - r_query = Team.update(ranking=1000, season=new_season, wallet=Team.wallet + 250).execute() + r_query = Team.update( + ranking=1000, season=new_season, wallet=Team.wallet + 250 + ).execute() current = Current.latest() current.season = new_season current.save() db.close() - return {'detail': f'Team rankings, season, and wallet updated for season {new_season}'} + return { + "detail": f"Team rankings, season, and wallet updated for season {new_season}" + } -@app.post('/api/v1/teams/{team_id}/money/{delta}') -async def v1_teams_money_delta(team_id: int, delta: int, token: str = Depends(oauth2_scheme)): +@app.post("/api/v1/teams/{team_id}/money/{delta}") +async def v1_teams_money_delta( + team_id: int, delta: int, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to adjust wallets. This event has been logged.' + detail="You are not authorized to adjust wallets. This event has been logged.", ) try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") this_team.wallet += delta @@ -744,29 +907,44 @@ async def v1_teams_money_delta(team_id: int, delta: int, token: str = Depends(oa db.close() return return_team else: - raise HTTPException(status_code=418, detail='Well slap my ass and call me a teapot; I could not save that team') + raise HTTPException( + status_code=418, + detail="Well slap my ass and call me a teapot; I could not save that team", + ) -@app.patch('/api/v1/teams/{team_id}') +@app.patch("/api/v1/teams/{team_id}") async def v1_teams_patch( - team_id, sname: Optional[str] = None, lname: Optional[str] = None, gmid: Optional[int] = None, - gmname: Optional[str] = None, gsheet: Optional[str] = None, team_value: Optional[int] = None, - collection_value: Optional[int] = None, logo: Optional[str] = None, color: Optional[str] = None, - season: Optional[int] = None, ps_shiny: Optional[int] = None, wallet_delta: Optional[int] = None, - has_guide: Optional[bool] = None, is_ai: Optional[bool] = None, ranking: Optional[int] = None, - token: str = Depends(oauth2_scheme)): + team_id, + sname: Optional[str] = None, + lname: Optional[str] = None, + gmid: Optional[int] = None, + gmname: Optional[str] = None, + gsheet: Optional[str] = None, + team_value: Optional[int] = None, + collection_value: Optional[int] = None, + logo: Optional[str] = None, + color: Optional[str] = None, + season: Optional[int] = None, + ps_shiny: Optional[int] = None, + wallet_delta: Optional[int] = None, + has_guide: Optional[bool] = None, + is_ai: Optional[bool] = None, + ranking: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete teams. This event has been logged.' + detail="You are not authorized to delete teams. This event has been logged.", ) try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") if sname is not None: this_team.sname = sname @@ -810,31 +988,34 @@ async def v1_teams_patch( db.close() return return_team else: - raise HTTPException(status_code=418, detail='Well slap my ass and call me a teapot; I could not save that team') + raise HTTPException( + status_code=418, + detail="Well slap my ass and call me a teapot; I could not save that team", + ) -@app.delete('/api/v1/teams/{team_id}') +@app.delete("/api/v1/teams/{team_id}") async def v1_teams_delete(team_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete teams. This event has been logged.' + detail="You are not authorized to delete teams. This event has been logged.", ) try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException(status_code=404, detail=f"No team found with id {team_id}") count = this_team.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Team {team_id} has been deleted') + raise HTTPException(status_code=200, detail=f"Team {team_id} has been deleted") else: - raise HTTPException(status_code=500, detail=f'Team {team_id} was not deleted') + raise HTTPException(status_code=500, detail=f"Team {team_id} was not deleted") """ @@ -848,14 +1029,19 @@ class RarityModel(pydantic.BaseModel): color: str -@app.get('/api/v1/rarities') -async def v1_rarities_get(value: Optional[int] = None, name: Optional[str] = None, min_value: Optional[int] = None, - max_value: Optional[int] = None, csv: Optional[bool] = None): +@app.get("/api/v1/rarities") +async def v1_rarities_get( + value: Optional[int] = None, + name: Optional[str] = None, + min_value: Optional[int] = None, + max_value: Optional[int] = None, + csv: Optional[bool] = None, +): all_rarities = Rarity.select() if all_rarities.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'There are no rarities to filter') + raise HTTPException(status_code=404, detail=f"There are no rarities to filter") if value is not None: all_rarities = all_rarities.where(Rarity.value == value) @@ -868,76 +1054,68 @@ async def v1_rarities_get(value: Optional[int] = None, name: Optional[str] = Non if all_rarities.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'No rarities found') + raise HTTPException(status_code=404, detail=f"No rarities found") if csv: - data_list = [['id', 'value', 'name']] + data_list = [["id", "value", "name"]] for line in all_rarities: - data_list.append( - [ - line.id, line.value, line.name - ] - ) + data_list.append([line.id, line.value, line.name]) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_rarities.count(), 'rarities': []} + return_val = {"count": all_rarities.count(), "rarities": []} for x in all_rarities: - return_val['rarities'].append(model_to_dict(x)) + return_val["rarities"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/rarities/{rarity_id}') +@app.get("/api/v1/rarities/{rarity_id}") async def v1_rarities_get_one(rarity_id, csv: Optional[bool] = False): try: this_rarity = Rarity.get_by_id(rarity_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No rarity found with id {rarity_id}') + raise HTTPException( + status_code=404, detail=f"No rarity found with id {rarity_id}" + ) if csv: - data_list = [['id', 'value', 'name']] + data_list = [["id", "value", "name"]] for line in this_rarity: - data_list.append( - [ - line.id, line.value, line.name - ] - ) + data_list.append([line.id, line.value, line.name]) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_rarity) db.close() return return_val -@app.post('/api/v1/rarities') +@app.post("/api/v1/rarities") async def v1_rarities_post(rarity: RarityModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post rarities. This event has been logged.' + detail="You are not authorized to post rarities. This event has been logged.", ) dupe_team = Rarity.get_or_none(Rarity.name) if dupe_team: db.close() - raise HTTPException(status_code=400, detail=f'There is already a rarity using {rarity.name}') + raise HTTPException( + status_code=400, detail=f"There is already a rarity using {rarity.name}" + ) - this_rarity = Rarity( - value=rarity.value, - name=rarity.name, - color=rarity.color - ) + this_rarity = Rarity(value=rarity.value, name=rarity.name, color=rarity.color) saved = this_rarity.save() if saved == 1: @@ -947,26 +1125,32 @@ async def v1_rarities_post(rarity: RarityModel, token: str = Depends(oauth2_sche else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.patch('/api/v1/rarities/{rarity_id}') +@app.patch("/api/v1/rarities/{rarity_id}") async def v1_rarities_patch( - rarity_id, value: Optional[int] = None, name: Optional[str] = None, color: Optional[str] = None, - token: str = Depends(oauth2_scheme)): + rarity_id, + value: Optional[int] = None, + name: Optional[str] = None, + color: Optional[str] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch rarities. This event has been logged.' + detail="You are not authorized to patch rarities. This event has been logged.", ) try: this_rarity = Rarity.get_by_id(rarity_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No rarity found with id {rarity_id}') + raise HTTPException( + status_code=404, detail=f"No rarity found with id {rarity_id}" + ) if value is not None: this_rarity.value = value @@ -982,32 +1166,38 @@ async def v1_rarities_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.delete('/api/v1/rarities/{rarity_id}') +@app.delete("/api/v1/rarities/{rarity_id}") async def v1_rarities_delete(rarity_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete rarities. This event has been logged.' + detail="You are not authorized to delete rarities. This event has been logged.", ) try: this_rarity = Rarity.get_by_id(rarity_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No rarity found with id {rarity_id}') + raise HTTPException( + status_code=404, detail=f"No rarity found with id {rarity_id}" + ) count = this_rarity.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Rarity {rarity_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Rarity {rarity_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Rarity {rarity_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Rarity {rarity_id} was not deleted" + ) """ @@ -1025,27 +1215,36 @@ class CardsetModel(pydantic.BaseModel): ranked_legal: Optional[bool] = True -@app.get('/api/v1/cardsets') +@app.get("/api/v1/cardsets") async def v1_cardsets_get( - name: Optional[str] = None, in_desc: Optional[str] = None, event_id: Optional[int] = None, - in_packs: Optional[bool] = None, ranked_legal: Optional[bool] = None, csv: Optional[bool] = None): + name: Optional[str] = None, + in_desc: Optional[str] = None, + event_id: Optional[int] = None, + in_packs: Optional[bool] = None, + ranked_legal: Optional[bool] = None, + csv: Optional[bool] = None, +): all_cardsets = Cardset.select() if all_cardsets.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'There are no cardsets to filter') + raise HTTPException(status_code=404, detail=f"There are no cardsets to filter") if name is not None: all_cardsets = all_cardsets.where(fn.Lower(Cardset.name) == name.lower()) if in_desc is not None: - all_cardsets = all_cardsets.where(fn.Lower(Cardset.description).contains(in_desc.lower())) + all_cardsets = all_cardsets.where( + fn.Lower(Cardset.description).contains(in_desc.lower()) + ) if event_id is not None: try: this_event = Event.get_by_id(event_id) all_cardsets = all_cardsets.where(Cardset.event == this_event) except Exception as e: - logging.error(f'Failed to find event {event_id}: {e}') - raise HTTPException(status_code=404, detail=f'Event id {event_id} not found') + logging.error(f"Failed to find event {event_id}: {e}") + raise HTTPException( + status_code=404, detail=f"Event id {event_id} not found" + ) if in_packs is not None: all_cardsets = all_cardsets.where(Cardset.in_packs == in_packs) if ranked_legal is not None: @@ -1053,70 +1252,89 @@ async def v1_cardsets_get( if all_cardsets.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'No cardsets found') + raise HTTPException(status_code=404, detail=f"No cardsets found") if csv: - data_list = [[ - 'id', 'name', 'description', 'event_id', 'in_packs', 'for_purchase', 'total_cards', 'ranked_legal' - ]] + data_list = [ + [ + "id", + "name", + "description", + "event_id", + "in_packs", + "for_purchase", + "total_cards", + "ranked_legal", + ] + ] for line in all_cardsets: data_list.append( [ - line.id, line.name, line.description, line.event.id if line.event else '', line.in_packs, - line.for_purchase, line.total_cards, line.ranked_legal + line.id, + line.name, + line.description, + line.event.id if line.event else "", + line.in_packs, + line.for_purchase, + line.total_cards, + line.ranked_legal, ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_cardsets.count(), 'cardsets': []} + return_val = {"count": all_cardsets.count(), "cardsets": []} for x in all_cardsets: - return_val['cardsets'].append(model_to_dict(x)) + return_val["cardsets"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/cardsets/{cardset_id}') +@app.get("/api/v1/cardsets/{cardset_id}") async def v1_cardsets_get_one(cardset_id, csv: Optional[bool] = False): try: this_cardset = Cardset.get_by_id(cardset_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No cardset found with id {cardset_id}') + raise HTTPException( + status_code=404, detail=f"No cardset found with id {cardset_id}" + ) if csv: data_list = [ - ['id', 'name', 'description'], - [this_cardset.id, this_cardset.name, this_cardset.description] + ["id", "name", "description"], + [this_cardset.id, this_cardset.name, this_cardset.description], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_cardset) db.close() return return_val -@app.post('/api/v1/cardsets') +@app.post("/api/v1/cardsets") async def v1_cardsets_post(cardset: CardsetModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post cardsets. This event has been logged.' + detail="You are not authorized to post cardsets. This event has been logged.", ) dupe_set = Cardset.get_or_none(Cardset.name == cardset.name) if dupe_set: db.close() - raise HTTPException(status_code=400, detail=f'There is already a cardset using {cardset.name}') + raise HTTPException( + status_code=400, detail=f"There is already a cardset using {cardset.name}" + ) this_cardset = Cardset(**cardset.__dict__) @@ -1128,27 +1346,35 @@ async def v1_cardsets_post(cardset: CardsetModel, token: str = Depends(oauth2_sc else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that cardset' + detail="Well slap my ass and call me a teapot; I could not save that cardset", ) -@app.patch('/api/v1/cardsets/{cardset_id}') +@app.patch("/api/v1/cardsets/{cardset_id}") async def v1_cardsets_patch( - cardset_id, name: Optional[str] = None, description: Optional[str] = None, in_packs: Optional[bool] = None, - for_purchase: Optional[bool] = None, total_cards: Optional[int] = None, ranked_legal: Optional[bool] = None, - token: str = Depends(oauth2_scheme)): + cardset_id, + name: Optional[str] = None, + description: Optional[str] = None, + in_packs: Optional[bool] = None, + for_purchase: Optional[bool] = None, + total_cards: Optional[int] = None, + ranked_legal: Optional[bool] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch cardsets. This event has been logged.' + detail="You are not authorized to patch cardsets. This event has been logged.", ) try: this_cardset = Cardset.get_by_id(cardset_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No cardset found with id {cardset_id}') + raise HTTPException( + status_code=404, detail=f"No cardset found with id {cardset_id}" + ) if name is not None: this_cardset.name = name @@ -1170,32 +1396,38 @@ async def v1_cardsets_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.delete('/api/v1/cardsets/{cardset_id}') +@app.delete("/api/v1/cardsets/{cardset_id}") async def v1_cardsets_delete(cardset_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete cardsets. This event has been logged.' + detail="You are not authorized to delete cardsets. This event has been logged.", ) try: this_cardset = Cardset.get_by_id(cardset_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No cardset found with id {cardset_id}') + raise HTTPException( + status_code=404, detail=f"No cardset found with id {cardset_id}" + ) count = this_cardset.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Cardset {cardset_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Cardset {cardset_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Cardset {cardset_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Cardset {cardset_id} was not deleted" + ) """ @@ -1236,20 +1468,36 @@ class PlayerModel(pydantic.BaseModel): # NOT A TEMPLATE - BROKE MOLD FOR pos_exclude -@app.get('/api/v1/players') +@app.get("/api/v1/players") async def v1_players_get( - name: Optional[str] = None, value: Optional[int] = None, min_cost: Optional[int] = None, - max_cost: Optional[int] = None, has_image2: Optional[bool] = None, mlbclub: Optional[str] = None, - franchise: Optional[str] = None, cardset_id: list = Query(default=None), rarity_id: list = Query(default=None), - pos_include: list = Query(default=None), pos_exclude: list = Query(default=None), has_headshot: Optional[bool] = None, - has_vanity_card: Optional[bool] = None, strat_code: Optional[str] = None, bbref_id: Optional[str] = None, - fangr_id: Optional[str] = None, inc_dex: Optional[bool] = True, in_desc: Optional[str] = None, - flat: Optional[bool] = False, sort_by: Optional[str] = False, cardset_id_exclude: list = Query(default=None), - limit: Optional[int] = None, csv: Optional[bool] = None): + name: Optional[str] = None, + value: Optional[int] = None, + min_cost: Optional[int] = None, + max_cost: Optional[int] = None, + has_image2: Optional[bool] = None, + mlbclub: Optional[str] = None, + franchise: Optional[str] = None, + cardset_id: list = Query(default=None), + rarity_id: list = Query(default=None), + pos_include: list = Query(default=None), + pos_exclude: list = Query(default=None), + has_headshot: Optional[bool] = None, + has_vanity_card: Optional[bool] = None, + strat_code: Optional[str] = None, + bbref_id: Optional[str] = None, + fangr_id: Optional[str] = None, + inc_dex: Optional[bool] = True, + in_desc: Optional[str] = None, + flat: Optional[bool] = False, + sort_by: Optional[str] = False, + cardset_id_exclude: list = Query(default=None), + limit: Optional[int] = None, + csv: Optional[bool] = None, +): all_players = Player.select() if all_players.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'There are no players to filter') + raise HTTPException(status_code=404, detail=f"There are no players to filter") if name is not None: all_players = all_players.where(fn.Lower(Player.p_name) == name.lower()) @@ -1274,8 +1522,14 @@ async def v1_players_get( if pos_include is not None: p_list = [x.upper() for x in pos_include] all_players = all_players.where( - (Player.pos_1 << p_list) | (Player.pos_2 << p_list) | (Player.pos_3 << p_list) | (Player.pos_4 << p_list) | - (Player.pos_5 << p_list) | (Player.pos_6 << p_list) | (Player.pos_7 << p_list) | (Player.pos_8 << p_list) + (Player.pos_1 << p_list) + | (Player.pos_2 << p_list) + | (Player.pos_3 << p_list) + | (Player.pos_4 << p_list) + | (Player.pos_5 << p_list) + | (Player.pos_6 << p_list) + | (Player.pos_7 << p_list) + | (Player.pos_8 << p_list) ) if has_headshot is not None: all_players = all_players.where(Player.headshot.is_null(not has_headshot)) @@ -1288,26 +1542,30 @@ async def v1_players_get( if fangr_id is not None: all_players = all_players.where(Player.fangr_id == fangr_id) if in_desc is not None: - all_players = all_players.where(fn.Lower(Player.description).contains(in_desc.lower())) + all_players = all_players.where( + fn.Lower(Player.description).contains(in_desc.lower()) + ) if sort_by is not None: - if sort_by == 'cost-desc': + if sort_by == "cost-desc": all_players = all_players.order_by(-Player.cost) - elif sort_by == 'cost-asc': + elif sort_by == "cost-asc": all_players = all_players.order_by(Player.cost) - elif sort_by == 'name-asc': + elif sort_by == "name-asc": all_players = all_players.order_by(Player.p_name) - elif sort_by == 'name-desc': + elif sort_by == "name-desc": all_players = all_players.order_by(-Player.p_name) - elif sort_by == 'rarity-desc': + elif sort_by == "rarity-desc": all_players = all_players.order_by(Player.rarity) - elif sort_by == 'rarity-asc': + elif sort_by == "rarity-asc": all_players = all_players.order_by(-Player.rarity) final_players = [] # logging.info(f'pos_exclude: {type(pos_exclude)} - {pos_exclude} - is None: {pos_exclude is None}') for x in all_players: - if pos_exclude is not None and set([x.upper() for x in pos_exclude]).intersection(x.get_all_pos()): + if pos_exclude is not None and set( + [x.upper() for x in pos_exclude] + ).intersection(x.get_all_pos()): pass else: final_players.append(x) @@ -1321,37 +1579,83 @@ async def v1_players_get( if csv: all_players.order_by(-Player.rarity.value, Player.p_name) - data_list = [['id', 'name', 'value', 'image', 'image2', 'mlbclub', 'franchise', 'cardset', 'rarity', 'pos_1', - 'pos_2', 'pos_3', 'pos_4', 'pos_5', 'pos_6', 'pos_7', 'pos_8', 'headshot', 'vanity_card', - 'strat_code', 'bbref_id', 'description', 'for_purchase', 'ranked_legal']] + data_list = [ + [ + "id", + "name", + "value", + "image", + "image2", + "mlbclub", + "franchise", + "cardset", + "rarity", + "pos_1", + "pos_2", + "pos_3", + "pos_4", + "pos_5", + "pos_6", + "pos_7", + "pos_8", + "headshot", + "vanity_card", + "strat_code", + "bbref_id", + "description", + "for_purchase", + "ranked_legal", + ] + ] for line in final_players: data_list.append( [ - line.player_id, line.p_name, line.cost, line.image, line.image2, line.mlbclub, line.franchise, - line.cardset, line.rarity, line.pos_1, line.pos_2, line.pos_3, line.pos_4, line.pos_5, line.pos_6, - line.pos_7, line.pos_8, line.headshot, line.vanity_card, line.strat_code, line.bbref_id, - line.description, line.cardset.for_purchase, line.cardset.ranked_legal + line.player_id, + line.p_name, + line.cost, + line.image, + line.image2, + line.mlbclub, + line.franchise, + line.cardset, + line.rarity, + line.pos_1, + line.pos_2, + line.pos_3, + line.pos_4, + line.pos_5, + line.pos_6, + line.pos_7, + line.pos_8, + line.headshot, + line.vanity_card, + line.strat_code, + line.bbref_id, + line.description, + line.cardset.for_purchase, + line.cardset.ranked_legal, # line.description, line.cardset.in_packs, line.quantity ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': len(final_players), 'players': []} + return_val = {"count": len(final_players), "players": []} for x in final_players: - this_record = model_to_dict(x, recurse=not flat) if inc_dex: this_dex = Paperdex.select().where(Paperdex.player == x) - this_record['paperdex'] = {'count': this_dex.count(), 'paperdex': []} + this_record["paperdex"] = {"count": this_dex.count(), "paperdex": []} for y in this_dex: - this_record['paperdex']['paperdex'].append(model_to_dict(y, recurse=False)) + this_record["paperdex"]["paperdex"].append( + model_to_dict(y, recurse=False) + ) - return_val['players'].append(this_record) + return_val["players"].append(this_record) # return_val['players'].append(model_to_dict(x, recurse=not flat)) @@ -1359,19 +1663,26 @@ async def v1_players_get( return return_val -@app.get('/api/v1/players/random') +@app.get("/api/v1/players/random") async def v1_players_get_random( - min_cost: Optional[int] = None, max_cost: Optional[int] = None, in_packs: Optional[bool] = None, - min_rarity: Optional[int] = None, max_rarity: Optional[int] = None, limit: Optional[int] = None, - pos_include: Optional[str] = None, pos_exclude: Optional[str] = None, franchise: Optional[str] = None, - mlbclub: Optional[str] = None, cardset_id: list = Query(default=None), pos_inc: list = Query(default=None), - pos_exc: list = Query(default=None), csv: Optional[bool] = None): - all_players = (Player - .select() - .join(Cardset) - .switch(Player) - .join(Rarity) - .order_by(fn.Random())) + min_cost: Optional[int] = None, + max_cost: Optional[int] = None, + in_packs: Optional[bool] = None, + min_rarity: Optional[int] = None, + max_rarity: Optional[int] = None, + limit: Optional[int] = None, + pos_include: Optional[str] = None, + pos_exclude: Optional[str] = None, + franchise: Optional[str] = None, + mlbclub: Optional[str] = None, + cardset_id: list = Query(default=None), + pos_inc: list = Query(default=None), + pos_exc: list = Query(default=None), + csv: Optional[bool] = None, +): + all_players = ( + Player.select().join(Cardset).switch(Player).join(Rarity).order_by(fn.Random()) + ) if min_cost is not None: all_players = all_players.where(Player.cost >= min_cost) @@ -1386,10 +1697,14 @@ async def v1_players_get_random( all_players = all_players.where(Player.rarity.value <= max_rarity) if pos_include is not None: all_players = all_players.where( - (fn.lower(Player.pos_1) == pos_include.lower()) | (fn.lower(Player.pos_2) == pos_include.lower()) | - (fn.lower(Player.pos_3) == pos_include.lower()) | (fn.lower(Player.pos_4) == pos_include.lower()) | - (fn.lower(Player.pos_5) == pos_include.lower()) | (fn.lower(Player.pos_6) == pos_include.lower()) | - (fn.lower(Player.pos_7) == pos_include.lower()) | (fn.lower(Player.pos_8) == pos_include.lower()) + (fn.lower(Player.pos_1) == pos_include.lower()) + | (fn.lower(Player.pos_2) == pos_include.lower()) + | (fn.lower(Player.pos_3) == pos_include.lower()) + | (fn.lower(Player.pos_4) == pos_include.lower()) + | (fn.lower(Player.pos_5) == pos_include.lower()) + | (fn.lower(Player.pos_6) == pos_include.lower()) + | (fn.lower(Player.pos_7) == pos_include.lower()) + | (fn.lower(Player.pos_8) == pos_include.lower()) ) if franchise is not None: all_players = all_players.where(fn.Lower(Player.franchise) == franchise.lower()) @@ -1400,8 +1715,14 @@ async def v1_players_get_random( if pos_inc is not None: p_list = [x.upper() for x in pos_inc] all_players = all_players.where( - (Player.pos_1 << p_list) | (Player.pos_2 << p_list) | (Player.pos_3 << p_list) | (Player.pos_4 << p_list) | - (Player.pos_5 << p_list) | (Player.pos_6 << p_list) | (Player.pos_7 << p_list) | (Player.pos_8 << p_list) + (Player.pos_1 << p_list) + | (Player.pos_2 << p_list) + | (Player.pos_3 << p_list) + | (Player.pos_4 << p_list) + | (Player.pos_5 << p_list) + | (Player.pos_6 << p_list) + | (Player.pos_7 << p_list) + | (Player.pos_8 << p_list) ) # if pos_exc is not None: # p_list = [x.upper() for x in pos_exc] @@ -1434,99 +1755,202 @@ async def v1_players_get_random( # raise HTTPException(status_code=404, detail=f'No players found') if csv: - data_list = [['id', 'name', 'cost', 'image', 'image2', 'mlbclub', 'franchise', 'cardset', 'rarity', 'pos_1', - 'pos_2', 'pos_3', 'pos_4', 'pos_5', 'pos_6', 'pos_7', 'pos_8', 'headshot', 'vanity_card', - 'strat_code', 'bbref_id', 'description']] + data_list = [ + [ + "id", + "name", + "cost", + "image", + "image2", + "mlbclub", + "franchise", + "cardset", + "rarity", + "pos_1", + "pos_2", + "pos_3", + "pos_4", + "pos_5", + "pos_6", + "pos_7", + "pos_8", + "headshot", + "vanity_card", + "strat_code", + "bbref_id", + "description", + ] + ] for line in final_players: data_list.append( [ - line.id, line.p_name, line.cost, line.image, line.image2, - line.mlbclub, line.franchise, line.cardset.name, line.rarity.name, - line.pos_1, line.pos_2, line.pos_3, line.pos_4, line.pos_5, - line.pos_6, line.pos_7, line.pos_8, line.headshot, line.vanity_card, - line.strat_code, line.bbref_id, line.description + line.id, + line.p_name, + line.cost, + line.image, + line.image2, + line.mlbclub, + line.franchise, + line.cardset.name, + line.rarity.name, + line.pos_1, + line.pos_2, + line.pos_3, + line.pos_4, + line.pos_5, + line.pos_6, + line.pos_7, + line.pos_8, + line.headshot, + line.vanity_card, + line.strat_code, + line.bbref_id, + line.description, ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': len(final_players), 'players': []} + return_val = {"count": len(final_players), "players": []} for x in final_players: this_record = model_to_dict(x) this_dex = Paperdex.select().where(Paperdex.player == x) - this_record['paperdex'] = {'count': this_dex.count(), 'paperdex': []} + this_record["paperdex"] = {"count": this_dex.count(), "paperdex": []} for y in this_dex: - this_record['paperdex']['paperdex'].append(model_to_dict(y, recurse=False)) + this_record["paperdex"]["paperdex"].append( + model_to_dict(y, recurse=False) + ) - return_val['players'].append(this_record) + return_val["players"].append(this_record) # return_val['players'].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/players/{player_id}') +@app.get("/api/v1/players/{player_id}") async def v1_players_get_one(player_id, csv: Optional[bool] = False): try: this_player = Player.get_by_id(player_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No player found with id {player_id}') + raise HTTPException( + status_code=404, detail=f"No player found with id {player_id}" + ) if csv: - data_list = [['id', 'name', 'cost', 'image', 'image2', 'mlbclub', 'franchise', 'cardset', 'rarity', 'pos_1', - 'pos_2', 'pos_3', 'pos_4', 'pos_5', 'pos_6', 'pos_7', 'pos_8', 'headshot', 'vanity_card', - 'strat_code', 'bbref_id', 'description']] + data_list = [ + [ + "id", + "name", + "cost", + "image", + "image2", + "mlbclub", + "franchise", + "cardset", + "rarity", + "pos_1", + "pos_2", + "pos_3", + "pos_4", + "pos_5", + "pos_6", + "pos_7", + "pos_8", + "headshot", + "vanity_card", + "strat_code", + "bbref_id", + "description", + ] + ] return_val = DataFrame(data_list).to_csv(header=False, index=False) data_list.append( [ - this_player.id, this_player.p_name, this_player.cost, this_player.image, this_player.image2, - this_player.mlbclub, this_player.franchise, this_player.cardset.name, this_player.rarity.name, - this_player.pos_1, this_player.pos_2, this_player.pos_3, this_player.pos_4, this_player.pos_5, - this_player.pos_6, this_player.pos_7, this_player.pos_8, this_player.headshot, this_player.vanity_card, - this_player.strat_code, this_player.bbref_id, this_player.description + this_player.id, + this_player.p_name, + this_player.cost, + this_player.image, + this_player.image2, + this_player.mlbclub, + this_player.franchise, + this_player.cardset.name, + this_player.rarity.name, + this_player.pos_1, + this_player.pos_2, + this_player.pos_3, + this_player.pos_4, + this_player.pos_5, + this_player.pos_6, + this_player.pos_7, + this_player.pos_8, + this_player.headshot, + this_player.vanity_card, + this_player.strat_code, + this_player.bbref_id, + this_player.description, ] ) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_player) this_dex = Paperdex.select().where(Paperdex.player == this_player) - return_val['paperdex'] = {'count': this_dex.count(), 'paperdex': []} + return_val["paperdex"] = {"count": this_dex.count(), "paperdex": []} for x in this_dex: - return_val['paperdex']['paperdex'].append(model_to_dict(x, recurse=False)) + return_val["paperdex"]["paperdex"].append(model_to_dict(x, recurse=False)) db.close() return return_val -@app.patch('/api/v1/players/{player_id}') +@app.patch("/api/v1/players/{player_id}") async def v1_players_patch( - player_id, name: Optional[str] = None, image: Optional[str] = None, image2: Optional[str] = None, - mlbclub: Optional[str] = None, franchise: Optional[str] = None, cardset_id: Optional[int] = None, - rarity_id: Optional[int] = None, pos_1: Optional[str] = None, pos_2: Optional[str] = None, - pos_3: Optional[str] = None, pos_4: Optional[str] = None, pos_5: Optional[str] = None, - pos_6: Optional[str] = None, pos_7: Optional[str] = None, pos_8: Optional[str] = None, - headshot: Optional[str] = None, vanity_card: Optional[str] = None, strat_code: Optional[str] = None, - bbref_id: Optional[str] = None, description: Optional[str] = None, cost: Optional[int] = None, - fangr_id: Optional[str] = None, token: str = Depends(oauth2_scheme)): + player_id, + name: Optional[str] = None, + image: Optional[str] = None, + image2: Optional[str] = None, + mlbclub: Optional[str] = None, + franchise: Optional[str] = None, + cardset_id: Optional[int] = None, + rarity_id: Optional[int] = None, + pos_1: Optional[str] = None, + pos_2: Optional[str] = None, + pos_3: Optional[str] = None, + pos_4: Optional[str] = None, + pos_5: Optional[str] = None, + pos_6: Optional[str] = None, + pos_7: Optional[str] = None, + pos_8: Optional[str] = None, + headshot: Optional[str] = None, + vanity_card: Optional[str] = None, + strat_code: Optional[str] = None, + bbref_id: Optional[str] = None, + description: Optional[str] = None, + cost: Optional[int] = None, + fangr_id: Optional[str] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch players. This event has been logged.' + detail="You are not authorized to patch players. This event has been logged.", ) try: this_player = Player.get_by_id(player_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No player found with id {player_id}') + raise HTTPException( + status_code=404, detail=f"No player found with id {player_id}" + ) if cost is not None: this_player.cost = cost @@ -1535,7 +1959,7 @@ async def v1_players_patch( if image is not None: this_player.image = image if image2 is not None: - if image2.lower() == 'false': + if image2.lower() == "false": this_player.image2 = None else: this_player.image2 = image2 @@ -1548,52 +1972,56 @@ async def v1_players_patch( this_cardset = Cardset.get_by_id(cardset_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No cardset found with id {cardset_id}') + raise HTTPException( + status_code=404, detail=f"No cardset found with id {cardset_id}" + ) this_player.cardset = this_cardset if rarity_id is not None: try: this_rarity = Rarity.get_by_id(rarity_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No rarity found with id {rarity_id}') + raise HTTPException( + status_code=404, detail=f"No rarity found with id {rarity_id}" + ) this_player.rarity = this_rarity if pos_1 is not None: - if pos_1 == 'False': + if pos_1 == "False": this_player.pos_1 = None else: this_player.pos_1 = pos_1 if pos_2 is not None: - if pos_2 == 'False': + if pos_2 == "False": this_player.pos_2 = None else: this_player.pos_2 = pos_2 if pos_3 is not None: - if pos_3 == 'False': + if pos_3 == "False": this_player.pos_3 = None else: this_player.pos_3 = pos_3 if pos_4 is not None: - if pos_4 == 'False': + if pos_4 == "False": this_player.pos_4 = None else: this_player.pos_4 = pos_4 if pos_5 is not None: - if pos_5 == 'False': + if pos_5 == "False": this_player.pos_5 = None else: this_player.pos_5 = pos_5 if pos_6 is not None: - if pos_6 == 'False': + if pos_6 == "False": this_player.pos_6 = None else: this_player.pos_6 = pos_6 if pos_7 is not None: - if pos_7 == 'False': + if pos_7 == "False": this_player.pos_7 = None else: this_player.pos_7 = pos_7 if pos_8 is not None: - if pos_8 == 'False': + if pos_8 == "False": this_player.pos_8 = None else: this_player.pos_8 = pos_8 @@ -1617,18 +2045,18 @@ async def v1_players_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.put('/api/v1/players') +@app.put("/api/v1/players") async def v1_players_put(players: PlayerModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post players. This event has been logged.' + detail="You are not authorized to post players. This event has been logged.", ) new_players = [] @@ -1660,44 +2088,46 @@ async def v1_players_put(players: PlayerModel, token: str = Depends(oauth2_schem # description=x.description # ) # new_players.append(this_player) - new_players.append({ - 'player_id': x.player_id, - 'p_name': x.p_name, - 'cost': x.cost, - 'image': x.image, - 'image2': x.image2, - 'mlbclub': x.mlbclub.title(), - 'franchise': normalize_franchise(x.franchise), - 'cardset_id': x.cardset_id, - 'rarity_id': x.rarity_id, - 'set_num': x.set_num, - 'pos_1': x.pos_1, - 'pos_2': x.pos_2, - 'pos_3': x.pos_3, - 'pos_4': x.pos_4, - 'pos_5': x.pos_5, - 'pos_6': x.pos_6, - 'pos_7': x.pos_7, - 'pos_8': x.pos_8, - 'headshot': x.headshot, - 'vanity_card': x.vanity_card, - 'strat_code': x.strat_code, - 'fangr_id': x.fangr_id, - 'bbref_id': x.bbref_id, - 'description': x.description - }) + new_players.append( + { + "player_id": x.player_id, + "p_name": x.p_name, + "cost": x.cost, + "image": x.image, + "image2": x.image2, + "mlbclub": x.mlbclub.title(), + "franchise": normalize_franchise(x.franchise), + "cardset_id": x.cardset_id, + "rarity_id": x.rarity_id, + "set_num": x.set_num, + "pos_1": x.pos_1, + "pos_2": x.pos_2, + "pos_3": x.pos_3, + "pos_4": x.pos_4, + "pos_5": x.pos_5, + "pos_6": x.pos_6, + "pos_7": x.pos_7, + "pos_8": x.pos_8, + "headshot": x.headshot, + "vanity_card": x.vanity_card, + "strat_code": x.strat_code, + "fangr_id": x.fangr_id, + "bbref_id": x.bbref_id, + "description": x.description, + } + ) - logging.info(f'new_players: {new_players}') + logging.info(f"new_players: {new_players}") with db.atomic(): - # Player.bulk_create(new_players, batch_size=15) - for batch in chunked(new_players, 15): - logging.info(f'batch: {batch}') - Player.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + upsert_players(new_players, batch_size=15) db.close() # sheets.update_all_players(SHEETS_AUTH) - raise HTTPException(status_code=200, detail=f'{len(new_players)} players have been added') + raise HTTPException( + status_code=200, detail=f"{len(new_players)} players have been added" + ) # @app.put('/api/v1/players') @@ -1759,29 +2189,35 @@ async def v1_players_put(players: PlayerModel, token: str = Depends(oauth2_schem # for x in players.players: -@app.delete('/api/v1/players/{player_id}') +@app.delete("/api/v1/players/{player_id}") async def v1_players_delete(player_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete players. This event has been logged.' + detail="You are not authorized to delete players. This event has been logged.", ) try: this_player = Player.get_by_id(player_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No player found with id {player_id}') + raise HTTPException( + status_code=404, detail=f"No player found with id {player_id}" + ) count = this_player.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Player {player_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Player {player_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Player {player_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Player {player_id} was not deleted" + ) """ @@ -1797,22 +2233,28 @@ class PacktypeModel(pydantic.BaseModel): available: Optional[bool] = True -@app.get('/api/v1/packtypes') +@app.get("/api/v1/packtypes") async def v1_packtypes_get( - name: Optional[str] = None, card_count: Optional[int] = None, in_desc: Optional[str] = None, - available: Optional[bool] = None, csv: Optional[bool] = None): + name: Optional[str] = None, + card_count: Optional[int] = None, + in_desc: Optional[str] = None, + available: Optional[bool] = None, + csv: Optional[bool] = None, +): all_packtypes = PackType.select() if all_packtypes.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'There are no packtypes to filter') + raise HTTPException(status_code=404, detail=f"There are no packtypes to filter") if name is not None: all_packtypes = all_packtypes.where(fn.Lower(PackType.name) == name.lower()) if card_count is not None: all_packtypes = all_packtypes.where(PackType.card_count == card_count) if in_desc is not None: - all_packtypes = all_packtypes.where(fn.Lower(PackType.description).contains(in_desc.lower())) + all_packtypes = all_packtypes.where( + fn.Lower(PackType.description).contains(in_desc.lower()) + ) if available is not None: all_packtypes = all_packtypes.where(PackType.available == available) @@ -1821,44 +2263,47 @@ async def v1_packtypes_get( # raise HTTPException(status_code=404, detail=f'No packtypes found') if csv: - data_list = [['id', 'name', 'card_count', 'description']] + data_list = [["id", "name", "card_count", "description"]] for line in all_packtypes: - data_list.append( - [ - line.id, line.name, line.card_count, line.description - ] - ) + data_list.append([line.id, line.name, line.card_count, line.description]) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_packtypes.count(), 'packtypes': []} + return_val = {"count": all_packtypes.count(), "packtypes": []} for x in all_packtypes: - return_val['packtypes'].append(model_to_dict(x)) + return_val["packtypes"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/packtypes/{packtype_id}') +@app.get("/api/v1/packtypes/{packtype_id}") async def v1_packtypes_get_one(packtype_id, csv: Optional[bool] = False): try: this_packtype = PackType.get_by_id(packtype_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No packtype found with id {packtype_id}') + raise HTTPException( + status_code=404, detail=f"No packtype found with id {packtype_id}" + ) if csv: data_list = [ - ['id', 'name', 'card_count', 'description'], - [this_packtype.id, this_packtype.name, this_packtype.card_count, this_packtype.description] + ["id", "name", "card_count", "description"], + [ + this_packtype.id, + this_packtype.name, + this_packtype.card_count, + this_packtype.description, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_packtype) @@ -1866,27 +2311,31 @@ async def v1_packtypes_get_one(packtype_id, csv: Optional[bool] = False): return return_val -@app.post('/api/v1/packtypes') -async def v1_packtypes_post(packtype: PacktypeModel, token: str = Depends(oauth2_scheme)): +@app.post("/api/v1/packtypes") +async def v1_packtypes_post( + packtype: PacktypeModel, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post packtypes. This event has been logged.' + detail="You are not authorized to post packtypes. This event has been logged.", ) dupe_packtype = PackType.get_or_none(PackType.name == packtype.name) if dupe_packtype: db.close() - raise HTTPException(status_code=400, detail=f'There is already a packtype using {packtype.name}') + raise HTTPException( + status_code=400, detail=f"There is already a packtype using {packtype.name}" + ) this_packtype = PackType( name=packtype.name, card_count=packtype.card_count, description=packtype.description, cost=packtype.cost, - available=packtype.available + available=packtype.available, ) saved = this_packtype.save() @@ -1897,26 +2346,34 @@ async def v1_packtypes_post(packtype: PacktypeModel, token: str = Depends(oauth2 else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that cardset' + detail="Well slap my ass and call me a teapot; I could not save that cardset", ) -@app.patch('/api/v1/packtypes/{packtype_id}') +@app.patch("/api/v1/packtypes/{packtype_id}") async def v1_packtypes_patch( - packtype_id, name: Optional[str] = None, card_count: Optional[int] = None, description: Optional[str] = None, - cost: Optional[int] = None, available: Optional[bool] = None, token: str = Depends(oauth2_scheme)): + packtype_id, + name: Optional[str] = None, + card_count: Optional[int] = None, + description: Optional[str] = None, + cost: Optional[int] = None, + available: Optional[bool] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch packtypes. This event has been logged.' + detail="You are not authorized to patch packtypes. This event has been logged.", ) try: this_packtype = PackType.get_by_id(packtype_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No packtype found with id {packtype_id}') + raise HTTPException( + status_code=404, detail=f"No packtype found with id {packtype_id}" + ) if name is not None: this_packtype.name = name @@ -1936,32 +2393,38 @@ async def v1_packtypes_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.delete('/api/v1/packtypes/{packtype_id}') +@app.delete("/api/v1/packtypes/{packtype_id}") async def v1_packtypes_delete(packtype_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete packtypes. This event has been logged.' + detail="You are not authorized to delete packtypes. This event has been logged.", ) try: this_packtype = PackType.get_by_id(packtype_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No packtype found with id {packtype_id}') + raise HTTPException( + status_code=404, detail=f"No packtype found with id {packtype_id}" + ) count = this_packtype.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Packtype {packtype_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Packtype {packtype_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Packtype {packtype_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Packtype {packtype_id} was not deleted" + ) """ @@ -1981,30 +2444,41 @@ class PackModel(pydantic.BaseModel): packs: List[PackPydantic] -@app.get('/api/v1/packs') +@app.get("/api/v1/packs") async def v1_packs_get( - team_id: Optional[int] = None, pack_type_id: Optional[int] = None, opened: Optional[bool] = None, - limit: Optional[int] = None, new_to_old: Optional[bool] = None, pack_team_id: Optional[int] = None, - pack_cardset_id: Optional[int] = None, exact_match: Optional[bool] = False, csv: Optional[bool] = None): + team_id: Optional[int] = None, + pack_type_id: Optional[int] = None, + opened: Optional[bool] = None, + limit: Optional[int] = None, + new_to_old: Optional[bool] = None, + pack_team_id: Optional[int] = None, + pack_cardset_id: Optional[int] = None, + exact_match: Optional[bool] = False, + csv: Optional[bool] = None, +): all_packs = Pack.select() if all_packs.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'There are no packs to filter') + raise HTTPException(status_code=404, detail=f"There are no packs to filter") if team_id is not None: try: this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException( + status_code=404, detail=f"No team found with id {team_id}" + ) all_packs = all_packs.where(Pack.team == this_team) if pack_type_id is not None: try: this_pack_type = PackType.get_by_id(pack_type_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No pack type found with id {pack_type_id}') + raise HTTPException( + status_code=404, detail=f"No pack type found with id {pack_type_id}" + ) all_packs = all_packs.where(Pack.pack_type == this_pack_type) if pack_team_id is not None: @@ -2012,7 +2486,9 @@ async def v1_packs_get( this_pack_team = Team.get_by_id(pack_team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {pack_team_id}') + raise HTTPException( + status_code=404, detail=f"No team found with id {pack_team_id}" + ) all_packs = all_packs.where(Pack.pack_team == this_pack_team) elif exact_match: all_packs = all_packs.where(Pack.pack_team == None) @@ -2022,7 +2498,9 @@ async def v1_packs_get( this_pack_cardset = Cardset.get_by_id(pack_cardset_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No cardset found with id {pack_cardset_id}') + raise HTTPException( + status_code=404, detail=f"No cardset found with id {pack_cardset_id}" + ) all_packs = all_packs.where(Pack.pack_cardset == this_pack_cardset) elif exact_match: all_packs = all_packs.where(Pack.pack_cardset == None) @@ -2039,46 +2517,54 @@ async def v1_packs_get( # raise HTTPException(status_code=404, detail=f'No packs found') if csv: - data_list = [['id', 'team', 'pack_type', 'open_time']] + data_list = [["id", "team", "pack_type", "open_time"]] for line in all_packs: data_list.append( [ - line.id, line.team.abbrev, line.pack_type.name, - datetime.fromtimestamp(line.open_time) if line.open_time else None + line.id, + line.team.abbrev, + line.pack_type.name, + datetime.fromtimestamp(line.open_time) if line.open_time else None, ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_packs.count(), 'packs': []} + return_val = {"count": all_packs.count(), "packs": []} for x in all_packs: - return_val['packs'].append(model_to_dict(x)) + return_val["packs"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/packs/{pack_id}') +@app.get("/api/v1/packs/{pack_id}") async def v1_packs_get_one(pack_id, csv: Optional[bool] = False): try: this_pack = Pack.get_by_id(pack_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No pack found with id {pack_id}') + raise HTTPException(status_code=404, detail=f"No pack found with id {pack_id}") if csv: data_list = [ - ['id', 'team', 'pack_type', 'open_time'], - [this_pack.id, this_pack.team.abbrev, this_pack.pack_type.name, - datetime.fromtimestamp(this_pack.open_time) if this_pack.open_time else None] + ["id", "team", "pack_type", "open_time"], + [ + this_pack.id, + this_pack.team.abbrev, + this_pack.pack_type.name, + datetime.fromtimestamp(this_pack.open_time) + if this_pack.open_time + else None, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_pack) @@ -2086,14 +2572,14 @@ async def v1_packs_get_one(pack_id, csv: Optional[bool] = False): return return_val -@app.post('/api/v1/packs') +@app.post("/api/v1/packs") async def v1_packs_post(packs: PackModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post packs. This event has been logged.' + detail="You are not authorized to post packs. This event has been logged.", ) new_packs = [] @@ -2103,7 +2589,7 @@ async def v1_packs_post(packs: PackModel, token: str = Depends(oauth2_scheme)): pack_type_id=x.pack_type_id, pack_team_id=x.pack_team_id, pack_cardset_id=x.pack_cardset_id, - open_time=x.open_time if x.open_time != "" else None + open_time=x.open_time if x.open_time != "" else None, ) new_packs.append(this_player) @@ -2111,17 +2597,19 @@ async def v1_packs_post(packs: PackModel, token: str = Depends(oauth2_scheme)): Pack.bulk_create(new_packs, batch_size=15) db.close() - raise HTTPException(status_code=200, detail=f'{len(new_packs)} packs have been added') + raise HTTPException( + status_code=200, detail=f"{len(new_packs)} packs have been added" + ) -@app.post('/api/v1/packs/one') +@app.post("/api/v1/packs/one") async def v1_packs_post_one(pack: PackPydantic, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post packs. This event has been logged.' + detail="You are not authorized to post packs. This event has been logged.", ) this_pack = Pack( @@ -2129,7 +2617,7 @@ async def v1_packs_post_one(pack: PackPydantic, token: str = Depends(oauth2_sche pack_type_id=pack.pack_type_id, pack_team_id=pack.pack_team_id, pack_cardset_id=pack.pack_cardset_id, - open_time=pack.open_time + open_time=pack.open_time, ) saved = this_pack.save() @@ -2140,26 +2628,32 @@ async def v1_packs_post_one(pack: PackPydantic, token: str = Depends(oauth2_sche else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that cardset' + detail="Well slap my ass and call me a teapot; I could not save that cardset", ) -@app.patch('/api/v1/packs/{pack_id}') +@app.patch("/api/v1/packs/{pack_id}") async def v1_packs_patch( - pack_id, team_id: Optional[int] = None, pack_type_id: Optional[int] = None, open_time: Optional[int] = None, - pack_team_id: Optional[int] = None, pack_cardset_id: Optional[int] = None, token: str = Depends(oauth2_scheme)): + pack_id, + team_id: Optional[int] = None, + pack_type_id: Optional[int] = None, + open_time: Optional[int] = None, + pack_team_id: Optional[int] = None, + pack_cardset_id: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch packs. This event has been logged.' + detail="You are not authorized to patch packs. This event has been logged.", ) try: this_pack = Pack.get_by_id(pack_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No pack found with id {pack_id}') + raise HTTPException(status_code=404, detail=f"No pack found with id {pack_id}") if team_id is not None: this_pack.team_id = team_id @@ -2179,32 +2673,32 @@ async def v1_packs_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.delete('/api/v1/packs/{pack_id}') +@app.delete("/api/v1/packs/{pack_id}") async def v1_packs_delete(pack_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete packs. This event has been logged.' + detail="You are not authorized to delete packs. This event has been logged.", ) try: this_pack = Pack.get_by_id(pack_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No packs found with id {pack_id}') + raise HTTPException(status_code=404, detail=f"No packs found with id {pack_id}") count = this_pack.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Pack {pack_id} has been deleted') + raise HTTPException(status_code=200, detail=f"Pack {pack_id} has been deleted") else: - raise HTTPException(status_code=500, detail=f'Pack {pack_id} was not deleted') + raise HTTPException(status_code=500, detail=f"Pack {pack_id} was not deleted") """ @@ -2223,12 +2717,19 @@ class CardModel(pydantic.BaseModel): cards: List[CardPydantic] -@app.get('/api/v1/cards') +@app.get("/api/v1/cards") async def v1_cards_get( - player_id: Optional[int] = None, team_id: Optional[int] = None, pack_id: Optional[int] = None, - value: Optional[int] = None, min_value: Optional[int] = None, max_value: Optional[int] = None, - order_by: Optional[str] = None, limit: Optional[int] = None, dupes: Optional[bool] = None, - csv: Optional[bool] = None): + player_id: Optional[int] = None, + team_id: Optional[int] = None, + pack_id: Optional[int] = None, + value: Optional[int] = None, + min_value: Optional[int] = None, + max_value: Optional[int] = None, + order_by: Optional[str] = None, + limit: Optional[int] = None, + dupes: Optional[bool] = None, + csv: Optional[bool] = None, +): all_cards = Card.select() # if all_cards.count() == 0: @@ -2240,21 +2741,27 @@ async def v1_cards_get( this_team = Team.get_by_id(team_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException( + status_code=404, detail=f"No team found with id {team_id}" + ) all_cards = all_cards.where(Card.team == this_team) if player_id is not None: try: this_player = Player.get_by_id(player_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No player found with id {player_id}') + raise HTTPException( + status_code=404, detail=f"No player found with id {player_id}" + ) all_cards = all_cards.where(Card.player == this_player) if pack_id is not None: try: this_pack = Pack.get_by_id(pack_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No pack found with id {pack_id}') + raise HTTPException( + status_code=404, detail=f"No pack found with id {pack_id}" + ) all_cards = all_cards.where(Card.pack == this_pack) if value is not None: all_cards = all_cards.where(Card.value == value) @@ -2263,14 +2770,16 @@ async def v1_cards_get( if max_value is not None: all_cards = all_cards.where(Card.value <= max_value) if order_by is not None: - if order_by.lower() == 'new': + if order_by.lower() == "new": all_cards = all_cards.order_by(-Card.id) if limit is not None: all_cards = all_cards.limit(limit) if dupes: if team_id is None: - raise HTTPException(status_code=400, detail='Dupe checking must include a team_id') - logging.info(f'dupe check') + raise HTTPException( + status_code=400, detail="Dupe checking must include a team_id" + ) + logging.info(f"dupe check") p_query = Card.select(Card.player).where(Card.team_id == team_id) seen = set() dupes = [] @@ -2286,32 +2795,41 @@ async def v1_cards_get( # raise HTTPException(status_code=404, detail=f'No cards found') if csv: - data_list = [['id', 'player', 'cardset', 'rarity', 'team', 'pack', 'value']] + data_list = [["id", "player", "cardset", "rarity", "team", "pack", "value"]] for line in all_cards: data_list.append( [ - line.id, line.player.p_name, line.player.cardset, line.player.rarity, line.team.abbrev, line.pack, - line.value + line.id, + line.player.p_name, + line.player.cardset, + line.player.rarity, + line.team.abbrev, + line.pack, + line.value, ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_cards.count(), 'cards': []} + return_val = {"count": all_cards.count(), "cards": []} for x in all_cards: - this_record = model_to_dict(x) - logging.debug(f'this_record: {this_record}') + logging.debug(f"this_record: {this_record}") this_dex = Paperdex.select().where(Paperdex.player == x) - this_record['player']['paperdex'] = {'count': this_dex.count(), 'paperdex': []} + this_record["player"]["paperdex"] = { + "count": this_dex.count(), + "paperdex": [], + } for y in this_dex: - this_record['player']['paperdex']['paperdex'].append(model_to_dict(y, recurse=False)) + this_record["player"]["paperdex"]["paperdex"].append( + model_to_dict(y, recurse=False) + ) - return_val['cards'].append(this_record) + return_val["cards"].append(this_record) # return_val['cards'].append(model_to_dict(x)) @@ -2319,24 +2837,32 @@ async def v1_cards_get( return return_val -@app.get('/api/v1/cards/{card_id}') +@app.get("/api/v1/cards/{card_id}") async def v1_cards_get_one(card_id, csv: Optional[bool] = False): try: this_card = Card.get_by_id(card_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No card found with id {card_id}') + raise HTTPException(status_code=404, detail=f"No card found with id {card_id}") if csv: data_list = [ - ['id', 'player', 'team', 'pack', 'value', 'roster1', 'roster2', 'roster3'], - [this_card.id, this_card.player, this_card.team.abbrev, this_card.pack, this_card.value, - this_card.roster1.name, this_card.roster2.name, this_card.roster3.name] + ["id", "player", "team", "pack", "value", "roster1", "roster2", "roster3"], + [ + this_card.id, + this_card.player, + this_card.team.abbrev, + this_card.pack, + this_card.value, + this_card.roster1.name, + this_card.roster2.name, + this_card.roster3.name, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_card) @@ -2344,14 +2870,14 @@ async def v1_cards_get_one(card_id, csv: Optional[bool] = False): return return_val -@app.post('/api/v1/cards') +@app.post("/api/v1/cards") async def v1_cards_post(cards: CardModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post cards. This event has been logged.' + detail="You are not authorized to post cards. This event has been logged.", ) last_card = Card.select(Card.id).order_by(-Card.id).limit(1) lc_id = last_card[0].id @@ -2360,17 +2886,14 @@ async def v1_cards_post(cards: CardModel, token: str = Depends(oauth2_scheme)): player_ids = [] inc_dex = True this_team = Team.get_by_id(cards.cards[0].team_id) - if this_team.is_ai or 'Gauntlet' in this_team.abbrev: + if this_team.is_ai or "Gauntlet" in this_team.abbrev: inc_dex = False # new_dex = [] # now = int(datetime.timestamp(datetime.now()) * 1000) for x in cards.cards: this_card = Card( - player_id=x.player_id, - team_id=x.team_id, - pack_id=x.pack_id, - value=x.value + player_id=x.player_id, team_id=x.team_id, pack_id=x.pack_id, value=x.value ) if inc_dex: Paperdex.get_or_create(team_id=x.team_id, player_id=x.player_id) @@ -2379,40 +2902,44 @@ async def v1_cards_post(cards: CardModel, token: str = Depends(oauth2_scheme)): with db.atomic(): Card.bulk_create(new_cards, batch_size=15) - cost_query = Player.update(cost=Player.cost + 1).where(Player.player_id << player_ids) + cost_query = Player.update(cost=Player.cost + 1).where( + Player.player_id << player_ids + ) cost_query.execute() # sheets.post_new_cards(SHEETS_AUTH, lc_id) db.close() - raise HTTPException(status_code=200, detail=f'{len(new_cards)} cards have been added') + raise HTTPException( + status_code=200, detail=f"{len(new_cards)} cards have been added" + ) -@app.post('/api/v1/cards/ai-update') +@app.post("/api/v1/cards/ai-update") async def v1_cards_ai_update(token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to update AI cards. This event has been logged.' + detail="You are not authorized to update AI cards. This event has been logged.", ) sheets.send_ai_cards(SHEETS_AUTH) - raise HTTPException(status_code=200, detail=f'Just sent AI cards to sheets') + raise HTTPException(status_code=200, detail=f"Just sent AI cards to sheets") -@app.post('/api/v1/cards/legal-check/{rarity_name}') +@app.post("/api/v1/cards/legal-check/{rarity_name}") async def v1_cards_legal_check( - rarity_name: str, card_id: list = Query(default=None), token: str = Depends(oauth2_scheme)): + rarity_name: str, + card_id: list = Query(default=None), + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() - raise HTTPException( - status_code=401, - detail='Unauthorized' - ) - if rarity_name not in ['ranked']: - return f'Rarity name {rarity_name} not a valid check' + raise HTTPException(status_code=401, detail="Unauthorized") + if rarity_name not in ["ranked"]: + return f"Rarity name {rarity_name} not a valid check" bad_cards = [] all_cards = Card.select().where(Card.id << card_id) @@ -2421,57 +2948,60 @@ async def v1_cards_legal_check( if x.player.cardset_id not in [3, 4, 9, 10]: bad_cards.append(x.player.description) - return {'count': len(bad_cards), 'bad_cards': bad_cards} + return {"count": len(bad_cards), "bad_cards": bad_cards} -@app.post('/api/v1/cards/post-update/{starting_id}') +@app.post("/api/v1/cards/post-update/{starting_id}") async def v1_cards_post_update(starting_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to update card lists. This event has been logged.' + detail="You are not authorized to update card lists. This event has been logged.", ) # sheets.post_new_cards(SHEETS_AUTH, starting_id) db.close() - raise HTTPException(status_code=200, detail=f'Just sent cards to sheets starting at ID {starting_id}') + raise HTTPException( + status_code=200, + detail=f"Just sent cards to sheets starting at ID {starting_id}", + ) -@app.post('/api/v1/cards/post-delete') +@app.post("/api/v1/cards/post-delete") async def v1_cards_post_delete(del_ids: str, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete card lists. This event has been logged.' + detail="You are not authorized to delete card lists. This event has been logged.", ) - logging.info(f'del_ids: {del_ids} / type: {type(del_ids)}') + logging.info(f"del_ids: {del_ids} / type: {type(del_ids)}") # sheets.post_deletion(SHEETS_AUTH, del_ids.split(',')) -@app.post('/api/v1/cards/wipe-team/{team_id}') +@app.post("/api/v1/cards/wipe-team/{team_id}") async def v1_cards_wipe_team(team_id: int, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to wipe teams. This event has been logged.' + detail="You are not authorized to wipe teams. This event has been logged.", ) try: this_team = Team.get_by_id(team_id) except Exception as e: - logging.error(f'/cards/wipe-team/{team_id} - could not find team') - raise HTTPException(status_code=404, detail=f'Team {team_id} not found') + logging.error(f"/cards/wipe-team/{team_id} - could not find team") + raise HTTPException(status_code=404, detail=f"Team {team_id} not found") t_query = Card.update(team=None).where(Card.team == this_team).execute() db.close() - return f'Wiped {t_query} cards' + return f"Wiped {t_query} cards" # @app.get('/api/v1/cards/{card_id}/sell') @@ -2520,23 +3050,30 @@ async def v1_cards_wipe_team(team_id: int, token: str = Depends(oauth2_scheme)): # raise HTTPException(status_code=200, detail=f'Card {card_id} has been sold for {sell_price} bucks') -@app.patch('/api/v1/cards/{card_id}') +@app.patch("/api/v1/cards/{card_id}") async def v1_cards_patch( - card_id, player_id: Optional[int] = None, team_id: Optional[int] = None, pack_id: Optional[int] = None, - value: Optional[int] = None, roster1_id: Optional[int] = None, roster2_id: Optional[int] = None, - roster3_id: Optional[int] = None, token: str = Depends(oauth2_scheme)): + card_id, + player_id: Optional[int] = None, + team_id: Optional[int] = None, + pack_id: Optional[int] = None, + value: Optional[int] = None, + roster1_id: Optional[int] = None, + roster2_id: Optional[int] = None, + roster3_id: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch cards. This event has been logged.' + detail="You are not authorized to patch cards. This event has been logged.", ) try: this_card = Card.get_by_id(card_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No card found with id {card_id}') + raise HTTPException(status_code=404, detail=f"No card found with id {card_id}") if player_id is not None: this_card.player_id = player_id @@ -2564,32 +3101,32 @@ async def v1_cards_patch( db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.delete('/api/v1/cards/{card_id}') +@app.delete("/api/v1/cards/{card_id}") async def v1_cards_delete(card_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete packs. This event has been logged.' + detail="You are not authorized to delete packs. This event has been logged.", ) try: this_card = Card.get_by_id(card_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No cards found with id {card_id}') + raise HTTPException(status_code=404, detail=f"No cards found with id {card_id}") count = this_card.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Card {card_id} has been deleted') + raise HTTPException(status_code=200, detail=f"Card {card_id} has been deleted") else: - raise HTTPException(status_code=500, detail=f'Card {card_id} was not deleted') + raise HTTPException(status_code=500, detail=f"Card {card_id} was not deleted") """ @@ -2606,62 +3143,82 @@ class EventModel(pydantic.BaseModel): active: Optional[bool] = False -@app.get('/api/v1/events') +@app.get("/api/v1/events") async def v1_events_get( - name: Optional[str] = None, in_desc: Optional[str] = None, active: Optional[bool] = None, - csv: Optional[bool] = None): + name: Optional[str] = None, + in_desc: Optional[str] = None, + active: Optional[bool] = None, + csv: Optional[bool] = None, +): all_events = Event.select() if name is not None: all_events = all_events.where(fn.Lower(Event.name) == name.lower()) if in_desc is not None: all_events = all_events.where( - (fn.Lower(Event.short_desc).contains(in_desc.lower())) | - (fn.Lower(Event.long_desc).contains(in_desc.lower())) + (fn.Lower(Event.short_desc).contains(in_desc.lower())) + | (fn.Lower(Event.long_desc).contains(in_desc.lower())) ) if active is not None: all_events = all_events.where(Event.active == active) if csv: - data_list = [['id', 'name', 'short_desc', 'long_desc', 'url', 'thumbnail', 'active']] + data_list = [ + ["id", "name", "short_desc", "long_desc", "url", "thumbnail", "active"] + ] for line in all_events: data_list.append( [ - line.id, line.name, line.short_desc, line.long_desc, line.url, line.thumbnail, line.active + line.id, + line.name, + line.short_desc, + line.long_desc, + line.url, + line.thumbnail, + line.active, ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_events.count(), 'events': []} + return_val = {"count": all_events.count(), "events": []} for x in all_events: - return_val['events'].append(model_to_dict(x)) + return_val["events"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/events/{event_id}') +@app.get("/api/v1/events/{event_id}") async def v1_events_get_one(event_id, csv: Optional[bool] = False): try: this_event = Event.get_by_id(event_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No event found with id {event_id}') + raise HTTPException( + status_code=404, detail=f"No event found with id {event_id}" + ) if csv: data_list = [ - ['id', 'name', 'short_desc', 'long_desc', 'url', 'thumbnail', 'active'], - [this_event.id, this_event.name, this_event.short_desc, this_event.long_desc, this_event.url, - this_event.thumbnail, this_event.active] + ["id", "name", "short_desc", "long_desc", "url", "thumbnail", "active"], + [ + this_event.id, + this_event.name, + this_event.short_desc, + this_event.long_desc, + this_event.url, + this_event.thumbnail, + this_event.active, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_event) @@ -2669,20 +3226,22 @@ async def v1_events_get_one(event_id, csv: Optional[bool] = False): return return_val -@app.post('/api/v1/events') +@app.post("/api/v1/events") async def v1_events_post(event: EventModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post events. This event has been logged.' + detail="You are not authorized to post events. This event has been logged.", ) dupe_event = Event.get_or_none(Event.name == event.name) if dupe_event: db.close() - raise HTTPException(status_code=400, detail=f'There is already an event using {event.name}') + raise HTTPException( + status_code=400, detail=f"There is already an event using {event.name}" + ) this_event = Event( name=event.name, @@ -2690,7 +3249,7 @@ async def v1_events_post(event: EventModel, token: str = Depends(oauth2_scheme)) long_desc=event.long_desc, url=event.url, thumbnail=event.thumbnail, - active=event.active + active=event.active, ) saved = this_event.save() @@ -2702,27 +3261,35 @@ async def v1_events_post(event: EventModel, token: str = Depends(oauth2_scheme)) db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that cardset' + detail="Well slap my ass and call me a teapot; I could not save that cardset", ) -@app.patch('/api/v1/events/{event_id}') +@app.patch("/api/v1/events/{event_id}") async def v1_events_patch( - event_id, name: Optional[str] = None, short_desc: Optional[str] = None, long_desc: Optional[str] = None, - url: Optional[str] = None, thumbnail: Optional[str] = None, active: Optional[bool] = None, - token: str = Depends(oauth2_scheme)): + event_id, + name: Optional[str] = None, + short_desc: Optional[str] = None, + long_desc: Optional[str] = None, + url: Optional[str] = None, + thumbnail: Optional[str] = None, + active: Optional[bool] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch events. This event has been logged.' + detail="You are not authorized to patch events. This event has been logged.", ) try: this_event = Event.get_by_id(event_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No event found with id {event_id}') + raise HTTPException( + status_code=404, detail=f"No event found with id {event_id}" + ) if name is not None: this_event.name = name @@ -2745,32 +3312,36 @@ async def v1_events_patch( db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that event' + detail="Well slap my ass and call me a teapot; I could not save that event", ) -@app.delete('/api/v1/events/{event_id}') +@app.delete("/api/v1/events/{event_id}") async def v1_events_delete(event_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete events. This event has been logged.' + detail="You are not authorized to delete events. This event has been logged.", ) try: this_event = Event.get_by_id(event_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No event found with id {event_id}') + raise HTTPException( + status_code=404, detail=f"No event found with id {event_id}" + ) count = this_event.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Event {event_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Event {event_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Event {event_id} was not deleted') + raise HTTPException(status_code=500, detail=f"Event {event_id} was not deleted") """ @@ -2780,12 +3351,12 @@ ROSTERS ENDPOINTS class RosterModel(pydantic.BaseModel): team_id: int - name: Optional[str] = 'My Roster' + name: Optional[str] = "My Roster" roster_num: int card_ids: list -@app.get('/api/v1/rosters') +@app.get("/api/v1/rosters") async def v1_rosters_get(team_id: Optional[int] = None, csv: Optional[bool] = None): all_rosters = Roster.select() @@ -2799,45 +3370,52 @@ async def v1_rosters_get(team_id: Optional[int] = None, csv: Optional[bool] = No all_rosters = all_rosters.where(Roster.team == this_team) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_id}') + raise HTTPException( + status_code=404, detail=f"No team found with id {team_id}" + ) if csv: - data_list = [['id', 'roster', 'team_id', 'team_abbrev']] + data_list = [["id", "roster", "team_id", "team_abbrev"]] for line in all_rosters: - data_list.append([ - line.id, line.name, line.team, line.team.abbrev - ]) + data_list.append([line.id, line.name, line.team, line.team.abbrev]) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_rosters.count(), 'rosters': []} + return_val = {"count": all_rosters.count(), "rosters": []} for x in all_rosters: - return_val['rosters'].append(model_to_dict(x)) + return_val["rosters"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/rosters/{roster_id}') +@app.get("/api/v1/rosters/{roster_id}") async def v1_rosters_get_one(roster_id, csv: Optional[bool] = None): try: this_roster = Roster.get_by_id(roster_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No roster found with id {roster_id}') + raise HTTPException( + status_code=404, detail=f"No roster found with id {roster_id}" + ) if csv: data_list = [ - ['id', 'roster', 'team_id', 'team_abbrev'], - [this_roster.id, this_roster.name, this_roster.team, this_roster.team.abbrev] + ["id", "roster", "team_id", "team_abbrev"], + [ + this_roster.id, + this_roster.name, + this_roster.team, + this_roster.team.abbrev, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_roster) @@ -2845,28 +3423,30 @@ async def v1_rosters_get_one(roster_id, csv: Optional[bool] = None): return return_val -@app.post('/api/v1/rosters') +@app.post("/api/v1/rosters") async def v1_rosters_post(roster: RosterModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post rosters. This event has been logged.' + detail="You are not authorized to post rosters. This event has been logged.", ) c_query = Card.select().where(Card.id << roster.card_ids) - logging.debug(f'c_query: {c_query}') + logging.debug(f"c_query: {c_query}") for card in c_query: if card.team_id != roster.team_id: raise HTTPException( status_code=401, - detail=f'Card ID {card.id} ({card.player.rarity.name} {card.player.p_name}) belongs to ' - f'{card.team.abbrev} and cannot be added to your roster.' + detail=f"Card ID {card.id} ({card.player.rarity.name} {card.player.p_name}) belongs to " + f"{card.team.abbrev} and cannot be added to your roster.", ) - r_query = Roster.delete().where(Roster.team_id == roster.team_id, Roster.roster_num == roster.roster_num) - logging.debug(f'r_query: {r_query}') + r_query = Roster.delete().where( + Roster.team_id == roster.team_id, Roster.roster_num == roster.roster_num + ) + logging.debug(f"r_query: {r_query}") r_query.execute() this_roster = Roster( @@ -2910,26 +3490,32 @@ async def v1_rosters_post(roster: RosterModel, token: str = Depends(oauth2_schem db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that roster' + detail="Well slap my ass and call me a teapot; I could not save that roster", ) -@app.patch('/api/v1/rosters/{roster_id}') +@app.patch("/api/v1/rosters/{roster_id}") async def v1_rosters_patch( - roster_id, team_id: Optional[int] = None, name: Optional[str] = None, roster_num: Optional[int] = None, - token: str = Depends(oauth2_scheme)): + roster_id, + team_id: Optional[int] = None, + name: Optional[str] = None, + roster_num: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch rosters. This event has been logged.' + detail="You are not authorized to patch rosters. This event has been logged.", ) try: this_roster = Roster.get_by_id(roster_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No roster found with id {roster_id}') + raise HTTPException( + status_code=404, detail=f"No roster found with id {roster_id}" + ) if team_id is not None: this_roster.team_id = team_id @@ -2946,32 +3532,38 @@ async def v1_rosters_patch( db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that event' + detail="Well slap my ass and call me a teapot; I could not save that event", ) -@app.delete('/api/v1/rosters/{roster_id}') +@app.delete("/api/v1/rosters/{roster_id}") async def v1_rosters_delete(roster_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete rosters. This event has been logged.' + detail="You are not authorized to delete rosters. This event has been logged.", ) try: this_roster = Roster.get_by_id(roster_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No roster found with id {roster_id}') + raise HTTPException( + status_code=404, detail=f"No roster found with id {roster_id}" + ) count = this_roster.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Roster {roster_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Roster {roster_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Roster {roster_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Roster {roster_id} was not deleted" + ) """ @@ -2996,15 +3588,28 @@ class ResultModel(pydantic.BaseModel): game_type: str -@app.get('/api/v1/results') +@app.get("/api/v1/results") async def v1_results_get( - away_team_id: Optional[int] = None, home_team_id: Optional[int] = None, team_one_id: Optional[int] = None, - team_two_id: Optional[int] = None, away_score_min: Optional[int] = None, away_score_max: Optional[int] = None, - home_score_min: Optional[int] = None, home_score_max: Optional[int] = None, bothscore_min: Optional[int] = None, - bothscore_max: Optional[int] = None, season: Optional[int] = None, week: Optional[int] = None, - week_start: Optional[int] = None, week_end: Optional[int] = None, ranked: Optional[bool] = None, - short_game: Optional[bool] = None, game_type: Optional[str] = None, vs_ai: Optional[bool] = None, - csv: Optional[bool] = None): + away_team_id: Optional[int] = None, + home_team_id: Optional[int] = None, + team_one_id: Optional[int] = None, + team_two_id: Optional[int] = None, + away_score_min: Optional[int] = None, + away_score_max: Optional[int] = None, + home_score_min: Optional[int] = None, + home_score_max: Optional[int] = None, + bothscore_min: Optional[int] = None, + bothscore_max: Optional[int] = None, + season: Optional[int] = None, + week: Optional[int] = None, + week_start: Optional[int] = None, + week_end: Optional[int] = None, + ranked: Optional[bool] = None, + short_game: Optional[bool] = None, + game_type: Optional[str] = None, + vs_ai: Optional[bool] = None, + csv: Optional[bool] = None, +): all_results = Result.select() # if all_results.count() == 0: @@ -3017,7 +3622,9 @@ async def v1_results_get( all_results = all_results.where(Result.away_team == this_team) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {away_team_id}') + raise HTTPException( + status_code=404, detail=f"No team found with id {away_team_id}" + ) if home_team_id is not None: try: @@ -3025,23 +3632,33 @@ async def v1_results_get( all_results = all_results.where(Result.home_team == this_team) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {home_team_id}') + raise HTTPException( + status_code=404, detail=f"No team found with id {home_team_id}" + ) if team_one_id is not None: try: this_team = Team.get_by_id(team_one_id) - all_results = all_results.where((Result.home_team == this_team) | (Result.away_team == this_team)) + all_results = all_results.where( + (Result.home_team == this_team) | (Result.away_team == this_team) + ) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_one_id}') + raise HTTPException( + status_code=404, detail=f"No team found with id {team_one_id}" + ) if team_two_id is not None: try: this_team = Team.get_by_id(team_two_id) - all_results = all_results.where((Result.home_team == this_team) | (Result.away_team == this_team)) + all_results = all_results.where( + (Result.home_team == this_team) | (Result.away_team == this_team) + ) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No team found with id {team_two_id}') + raise HTTPException( + status_code=404, detail=f"No team found with id {team_two_id}" + ) if away_score_min is not None: all_results = all_results.where(Result.away_score >= away_score_min) @@ -3056,10 +3673,14 @@ async def v1_results_get( all_results = all_results.where(Result.home_score <= home_score_max) if bothscore_min is not None: - all_results = all_results.where((Result.home_score >= bothscore_min) & (Result.away_score >= bothscore_min)) + all_results = all_results.where( + (Result.home_score >= bothscore_min) & (Result.away_score >= bothscore_min) + ) if bothscore_max is not None: - all_results = all_results.where((Result.home_score <= bothscore_max) & (Result.away_score <= bothscore_max)) + all_results = all_results.where( + (Result.home_score <= bothscore_max) & (Result.away_score <= bothscore_max) + ) if season is not None: all_results = all_results.where(Result.season == season) @@ -3103,49 +3724,96 @@ async def v1_results_get( # logging.info(f'Result Query:\n\n{all_results}') if csv: - data_list = [['id', 'away_abbrev', 'home_abbrev', 'away_score', 'home_score', 'away_tv', 'home_tv', - 'game_type', 'season', 'week', 'short_game', 'ranked']] + data_list = [ + [ + "id", + "away_abbrev", + "home_abbrev", + "away_score", + "home_score", + "away_tv", + "home_tv", + "game_type", + "season", + "week", + "short_game", + "ranked", + ] + ] for line in all_results: - data_list.append([ - line.id, line.away_team.abbrev, line.home_team.abbrev, line.away_score, line.home_score, - line.away_team_value, line.home_team_value, line.game_type if line.game_type else 'minor-league', - line.season, line.week, line.short_game, line.ranked - ]) + data_list.append( + [ + line.id, + line.away_team.abbrev, + line.home_team.abbrev, + line.away_score, + line.home_score, + line.away_team_value, + line.home_team_value, + line.game_type if line.game_type else "minor-league", + line.season, + line.week, + line.short_game, + line.ranked, + ] + ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_results.count(), 'results': []} + return_val = {"count": all_results.count(), "results": []} for x in all_results: - return_val['results'].append(model_to_dict(x)) + return_val["results"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/results/{result_id}') +@app.get("/api/v1/results/{result_id}") async def v1_results_get_one(result_id, csv: Optional[bool] = None): try: this_result = Result.get_by_id(result_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No result found with id {result_id}') + raise HTTPException( + status_code=404, detail=f"No result found with id {result_id}" + ) if csv: data_list = [ - ['id', 'away_abbrev', 'home_abbrev', 'away_score', 'home_score', 'away_tv', 'home_tv', 'game_type', - 'season', 'week', 'game_type'], - [this_result.id, this_result.away_team.abbrev, this_result.away_team.abbrev, this_result.away_score, - this_result.home_score, this_result.away_team_value, this_result.home_team_value, - this_result.game_type if this_result.game_type else 'minor-league', - this_result.season, this_result.week, this_result.game_type] + [ + "id", + "away_abbrev", + "home_abbrev", + "away_score", + "home_score", + "away_tv", + "home_tv", + "game_type", + "season", + "week", + "game_type", + ], + [ + this_result.id, + this_result.away_team.abbrev, + this_result.away_team.abbrev, + this_result.away_score, + this_result.home_score, + this_result.away_team_value, + this_result.home_team_value, + this_result.game_type if this_result.game_type else "minor-league", + this_result.season, + this_result.week, + this_result.game_type, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_result) @@ -3153,15 +3821,21 @@ async def v1_results_get_one(result_id, csv: Optional[bool] = None): return return_val -@app.get('/api/v1/results/team/{team_id}') +@app.get("/api/v1/results/team/{team_id}") async def v1_results_team_get( - team_id: int, season: Optional[int] = None, week: Optional[int] = None, csv: Optional[bool] = False): - all_results = Result.select().where((Result.away_team_id == team_id) | (Result.home_team_id == team_id)) + team_id: int, + season: Optional[int] = None, + week: Optional[int] = None, + csv: Optional[bool] = False, +): + all_results = Result.select().where( + (Result.away_team_id == team_id) | (Result.home_team_id == team_id) + ) try: this_team = Team.get_by_id(team_id) except Exception as e: - logging.error(f'Unknown team id {team_id} trying to pull team results') - raise HTTPException(404, f'Team id {team_id} not found') + logging.error(f"Unknown team id {team_id} trying to pull team results") + raise HTTPException(404, f"Team id {team_id} not found") if season is not None: all_results = all_results.where(Result.season == season) @@ -3198,34 +3872,41 @@ async def v1_results_team_get( if csv: data_list = [ - ['team_id', 'ranked_wins', 'ranked_losses', 'casual_wins', 'casual_losses', 'team_ranking'], - [team_id, r_wins, r_loss, c_wins, c_loss, this_team.ranking] + [ + "team_id", + "ranked_wins", + "ranked_losses", + "casual_wins", + "casual_losses", + "team_ranking", + ], + [team_id, r_wins, r_loss, c_wins, c_loss, this_team.ranking], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = { - 'team': model_to_dict(this_team), - 'ranked_wins': r_wins, - 'ranked_losses': r_loss, - 'casual_wins': c_wins, - 'casual_losses': c_loss, + "team": model_to_dict(this_team), + "ranked_wins": r_wins, + "ranked_losses": r_loss, + "casual_wins": c_wins, + "casual_losses": c_loss, } db.close() return return_val -@app.post('/api/v1/results') +@app.post("/api/v1/results") async def v1_results_post(result: ResultModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post results. This event has been logged.' + detail="You are not authorized to post results. This event has been logged.", ) this_result = Result(**result.__dict__) @@ -3234,25 +3915,29 @@ async def v1_results_post(result: ResultModel, token: str = Depends(oauth2_schem if result.ranked: if not result.away_team_ranking: db.close() - error = f'Ranked game did not include away team ({result.away_team_id}) ranking.' + error = f"Ranked game did not include away team ({result.away_team_id}) ranking." logging.error(error) raise DataError(error) if not result.home_team_ranking: db.close() - error = f'Ranked game did not include home team ({result.home_team_id}) ranking.' + error = f"Ranked game did not include home team ({result.home_team_id}) ranking." logging.error(error) raise DataError(error) k_value = 20 if result.short_game else 60 ratio = (result.home_team_ranking - result.away_team_ranking) / 400 - exp_score = 1 / (1 + (10 ** ratio)) + exp_score = 1 / (1 + (10**ratio)) away_win = True if result.away_score > result.home_score else False total_delta = k_value * exp_score - high_delta = total_delta * exp_score if exp_score > .5 else total_delta * (1 - exp_score) + high_delta = ( + total_delta * exp_score + if exp_score > 0.5 + else total_delta * (1 - exp_score) + ) low_delta = total_delta - high_delta # exp_score > .5 means away team is favorite - if exp_score > .5 and away_win: + if exp_score > 0.5 and away_win: final_delta = low_delta away_delta = low_delta * 3 home_delta = -low_delta @@ -3260,7 +3945,7 @@ async def v1_results_post(result: ResultModel, token: str = Depends(oauth2_schem final_delta = high_delta away_delta = high_delta * 3 home_delta = -high_delta - elif exp_score <= .5 and not away_win: + elif exp_score <= 0.5 and not away_win: final_delta = low_delta away_delta = -low_delta home_delta = low_delta * 3 @@ -3273,18 +3958,20 @@ async def v1_results_post(result: ResultModel, token: str = Depends(oauth2_schem away_delta = 0 home_delta = 0 - logging.debug(f'/results ranking deltas\n\nk_value: {k_value} / ratio: {ratio} / ' - f'exp_score: {exp_score} / away_win: {away_win} / total_delta: {total_delta} / ' - f'high_delta: {high_delta} / low_delta: {low_delta} / final_delta: {final_delta} / ') + logging.debug( + f"/results ranking deltas\n\nk_value: {k_value} / ratio: {ratio} / " + f"exp_score: {exp_score} / away_win: {away_win} / total_delta: {total_delta} / " + f"high_delta: {high_delta} / low_delta: {low_delta} / final_delta: {final_delta} / " + ) away_team = Team.get_by_id(result.away_team_id) away_team.ranking += away_delta away_team.save() - logging.info(f'Just updated {away_team.abbrev} ranking to {away_team.ranking}') + logging.info(f"Just updated {away_team.abbrev} ranking to {away_team.ranking}") home_team = Team.get_by_id(result.home_team_id) home_team.ranking += home_delta home_team.save() - logging.info(f'Just updated {home_team.abbrev} ranking to {home_team.ranking}') + logging.info(f"Just updated {home_team.abbrev} ranking to {home_team.ranking}") if saved == 1: return_val = model_to_dict(this_result) @@ -3294,29 +3981,40 @@ async def v1_results_post(result: ResultModel, token: str = Depends(oauth2_schem db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that roster' + detail="Well slap my ass and call me a teapot; I could not save that roster", ) -@app.patch('/api/v1/results/{result_id}') +@app.patch("/api/v1/results/{result_id}") async def v1_results_patch( - result_id, away_team_id: Optional[int] = None, home_team_id: Optional[int] = None, - away_score: Optional[int] = None, home_score: Optional[int] = None, away_team_value: Optional[int] = None, - home_team_value: Optional[int] = None, scorecard: Optional[str] = None, week: Optional[int] = None, - season: Optional[int] = None, short_game: Optional[bool] = None, game_type: Optional[str] = None, - token: str = Depends(oauth2_scheme)): + result_id, + away_team_id: Optional[int] = None, + home_team_id: Optional[int] = None, + away_score: Optional[int] = None, + home_score: Optional[int] = None, + away_team_value: Optional[int] = None, + home_team_value: Optional[int] = None, + scorecard: Optional[str] = None, + week: Optional[int] = None, + season: Optional[int] = None, + short_game: Optional[bool] = None, + game_type: Optional[str] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch results. This event has been logged.' + detail="You are not authorized to patch results. This event has been logged.", ) try: this_result = Result.get_by_id(result_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No result found with id {result_id}') + raise HTTPException( + status_code=404, detail=f"No result found with id {result_id}" + ) if away_team_id is not None: this_result.away_team_id = away_team_id @@ -3362,32 +4060,38 @@ async def v1_results_patch( db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that event' + detail="Well slap my ass and call me a teapot; I could not save that event", ) -@app.delete('/api/v1/results/{result_id}') +@app.delete("/api/v1/results/{result_id}") async def v1_results_delete(result_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post results. This event has been logged.' + detail="You are not authorized to post results. This event has been logged.", ) try: this_result = Result.get_by_id(result_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No result found with id {result_id}') + raise HTTPException( + status_code=404, detail=f"No result found with id {result_id}" + ) count = this_result.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Result {result_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Result {result_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Result {result_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Result {result_id} was not deleted" + ) """ @@ -3398,22 +4102,27 @@ AWARDS ENDPOINTS class AwardModel(pydantic.BaseModel): name: str season: int - timing: str = 'In-Season' + timing: str = "In-Season" card_id: Optional[int] = None team_id: Optional[int] = None image: Optional[str] = None -@app.get('/api/v1/awards') +@app.get("/api/v1/awards") async def v1_awards_get( - name: Optional[str] = None, season: Optional[int] = None, timing: Optional[str] = None, - card_id: Optional[int] = None, team_id: Optional[int] = None, image: Optional[str] = None, - csv: Optional[bool] = None): + name: Optional[str] = None, + season: Optional[int] = None, + timing: Optional[str] = None, + card_id: Optional[int] = None, + team_id: Optional[int] = None, + image: Optional[str] = None, + csv: Optional[bool] = None, +): all_awards = Award.select() if all_awards.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'There are no awards to filter') + raise HTTPException(status_code=404, detail=f"There are no awards to filter") if name is not None: all_awards = all_awards.where(Award.name == name) @@ -3429,43 +4138,60 @@ async def v1_awards_get( all_awards = all_awards.where(Award.image == image) if csv: - data_list = [['id', 'name', 'season', 'timing', 'card', 'team', 'image']] + data_list = [["id", "name", "season", "timing", "card", "team", "image"]] for line in all_awards: - data_list.append([ - line.id, line.name, line.season, line.timing, line.card, line.team, line.image - ]) + data_list.append( + [ + line.id, + line.name, + line.season, + line.timing, + line.card, + line.team, + line.image, + ] + ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_awards.count(), 'awards': []} + return_val = {"count": all_awards.count(), "awards": []} for x in all_awards: - return_val['awards'].append(model_to_dict(x)) + return_val["awards"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/awards/{award_id}') +@app.get("/api/v1/awards/{award_id}") async def v1_awards_get_one(award_id, csv: Optional[bool] = None): try: this_award = Award.get_by_id(award_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No award found with id {award_id}') + raise HTTPException( + status_code=404, detail=f"No award found with id {award_id}" + ) if csv: data_list = [ - ['id', 'name', 'season', 'timing', 'card', 'team', 'image'], - [this_award.id, this_award.name, this_award.season, this_award.timing, this_award.card, - this_award.team, this_award.image] + ["id", "name", "season", "timing", "card", "team", "image"], + [ + this_award.id, + this_award.name, + this_award.season, + this_award.timing, + this_award.card, + this_award.team, + this_award.image, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_award) @@ -3473,14 +4199,14 @@ async def v1_awards_get_one(award_id, csv: Optional[bool] = None): return return_val -@app.post('/api/v1/awards') +@app.post("/api/v1/awards") async def v1_awards_post(award: AwardModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post awards. This event has been logged.' + detail="You are not authorized to post awards. This event has been logged.", ) this_award = Award( @@ -3489,7 +4215,7 @@ async def v1_awards_post(award: AwardModel, token: str = Depends(oauth2_scheme)) timing=award.season, card_id=award.card_id, team_id=award.team_id, - image=award.image + image=award.image, ) saved = this_award.save() @@ -3501,32 +4227,36 @@ async def v1_awards_post(award: AwardModel, token: str = Depends(oauth2_scheme)) db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that roster' + detail="Well slap my ass and call me a teapot; I could not save that roster", ) -@app.delete('/api/v1/awards/{award_id}') +@app.delete("/api/v1/awards/{award_id}") async def v1_awards_delete(award_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete awards. This event has been logged.' + detail="You are not authorized to delete awards. This event has been logged.", ) try: this_award = Award.get_by_id(award_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No award found with id {award_id}') + raise HTTPException( + status_code=404, detail=f"No award found with id {award_id}" + ) count = this_award.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Award {award_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Award {award_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Award {award_id} was not deleted') + raise HTTPException(status_code=500, detail=f"Award {award_id} was not deleted") """ @@ -3539,19 +4269,25 @@ class RewardModel(pydantic.BaseModel): season: int week: int team_id: int - created: Optional[int] = int(datetime.timestamp(datetime.now())*1000) + created: Optional[int] = int(datetime.timestamp(datetime.now()) * 1000) -@app.get('/api/v1/rewards') +@app.get("/api/v1/rewards") async def v1_rewards_get( - name: Optional[str] = None, in_name: Optional[str] = None, team_id: Optional[int] = None, - season: Optional[int] = None, week: Optional[int] = None, created_after: Optional[int] = None, - flat: Optional[bool] = False, csv: Optional[bool] = None): + name: Optional[str] = None, + in_name: Optional[str] = None, + team_id: Optional[int] = None, + season: Optional[int] = None, + week: Optional[int] = None, + created_after: Optional[int] = None, + flat: Optional[bool] = False, + csv: Optional[bool] = None, +): all_rewards = Reward.select() if all_rewards.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'There are no rewards to filter') + raise HTTPException(status_code=404, detail=f"There are no rewards to filter") if name is not None: all_rewards = all_rewards.where(fn.Lower(Reward.name) == name.lower()) @@ -3568,47 +4304,53 @@ async def v1_rewards_get( if all_rewards.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'No rewards found') + raise HTTPException(status_code=404, detail=f"No rewards found") if csv: - data_list = [['id', 'name', 'team', 'daily', 'created']] + data_list = [["id", "name", "team", "daily", "created"]] for line in all_rewards: data_list.append( - [ - line.id, line.name, line.team.id, line.daily, line.created - ] + [line.id, line.name, line.team.id, line.daily, line.created] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_rewards.count(), 'rewards': []} + return_val = {"count": all_rewards.count(), "rewards": []} for x in all_rewards: - return_val['rewards'].append(model_to_dict(x, recurse=not flat)) + return_val["rewards"].append(model_to_dict(x, recurse=not flat)) db.close() return return_val -@app.get('/api/v1/rewards/{reward_id}') +@app.get("/api/v1/rewards/{reward_id}") async def v1_rewards_get_one(reward_id, csv: Optional[bool] = False): try: this_reward = Reward.get_by_id(reward_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No reward found with id {reward_id}') + raise HTTPException( + status_code=404, detail=f"No reward found with id {reward_id}" + ) if csv: data_list = [ - ['id', 'name', 'card_count', 'description'], - [this_reward.id, this_reward.name, this_reward.team.id, this_reward.daily, this_reward.created] + ["id", "name", "card_count", "description"], + [ + this_reward.id, + this_reward.name, + this_reward.team.id, + this_reward.daily, + this_reward.created, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_reward) @@ -3616,14 +4358,14 @@ async def v1_rewards_get_one(reward_id, csv: Optional[bool] = False): return return_val -@app.post('/api/v1/rewards') +@app.post("/api/v1/rewards") async def v1_rewards_post(reward: RewardModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post rewards. This event has been logged.' + detail="You are not authorized to post rewards. This event has been logged.", ) this_reward = Reward(**reward.dict()) @@ -3636,26 +4378,32 @@ async def v1_rewards_post(reward: RewardModel, token: str = Depends(oauth2_schem else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that cardset' + detail="Well slap my ass and call me a teapot; I could not save that cardset", ) -@app.patch('/api/v1/rewards/{reward_id}') +@app.patch("/api/v1/rewards/{reward_id}") async def v1_rewards_patch( - reward_id, name: Optional[str] = None, team_id: Optional[int] = None, created: Optional[int] = None, - token: str = Depends(oauth2_scheme)): + reward_id, + name: Optional[str] = None, + team_id: Optional[int] = None, + created: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch rewards. This event has been logged.' + detail="You are not authorized to patch rewards. This event has been logged.", ) try: this_reward = Reward.get_by_id(reward_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No reward found with id {reward_id}') + raise HTTPException( + status_code=404, detail=f"No reward found with id {reward_id}" + ) if name is not None: this_reward.name = name @@ -3671,32 +4419,38 @@ async def v1_rewards_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.delete('/api/v1/rewards/{reward_id}') +@app.delete("/api/v1/rewards/{reward_id}") async def v1_rewards_delete(reward_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete rewards. This event has been logged.' + detail="You are not authorized to delete rewards. This event has been logged.", ) try: this_reward = Reward.get_by_id(reward_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No reward found with id {reward_id}') + raise HTTPException( + status_code=404, detail=f"No reward found with id {reward_id}" + ) count = this_reward.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Reward {reward_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Reward {reward_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Reward {reward_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Reward {reward_id} was not deleted" + ) """ @@ -3738,7 +4492,7 @@ class BatStat(pydantic.BaseModel): csc: Optional[int] = 0 week: int season: int - created: Optional[int] = int(datetime.timestamp(datetime.now())*100000) + created: Optional[int] = int(datetime.timestamp(datetime.now()) * 100000) game_id: int @@ -3746,10 +4500,19 @@ class BattingStatModel(pydantic.BaseModel): stats: List[BatStat] -@app.get('/api/v1/batstats') +@app.get("/api/v1/batstats") async def v1_batstats_get( - card_id: int = None, player_id: int = None, team_id: int = None, vs_team_id: int = None, week: int = None, - season: int = None, week_start: int = None, week_end: int = None, created: int = None, csv: bool = None): + card_id: int = None, + player_id: int = None, + team_id: int = None, + vs_team_id: int = None, + week: int = None, + season: int = None, + week_start: int = None, + week_end: int = None, + created: int = None, + csv: bool = None, +): all_stats = BattingStat.select().join(Card).join(Player) if card_id is not None: @@ -3776,42 +4539,120 @@ async def v1_batstats_get( # raise HTTPException(status_code=404, detail=f'No batting stats found') if csv: - data_list = [['id', 'card_id', 'player_id', 'cardset', 'team', 'vs_team', 'pos', 'pa', 'ab', 'run', 'hit', 'rbi', 'double', - 'triple', 'hr', 'bb', 'so', 'hbp', 'sac', 'ibb', 'gidp', 'sb', 'cs', 'bphr', 'bpfo', 'bp1b', - 'bplo', 'xch', 'xhit', 'error', 'pb', 'sbc', 'csc', 'week', 'season', 'created', 'game_id', 'roster_num']] + data_list = [ + [ + "id", + "card_id", + "player_id", + "cardset", + "team", + "vs_team", + "pos", + "pa", + "ab", + "run", + "hit", + "rbi", + "double", + "triple", + "hr", + "bb", + "so", + "hbp", + "sac", + "ibb", + "gidp", + "sb", + "cs", + "bphr", + "bpfo", + "bp1b", + "bplo", + "xch", + "xhit", + "error", + "pb", + "sbc", + "csc", + "week", + "season", + "created", + "game_id", + "roster_num", + ] + ] for line in all_stats: data_list.append( [ - line.id, line.card.id, line.card.player.player_id, line.card.player.cardset.name, line.team.abbrev, line.vs_team.abbrev, - line.pos, line.pa, line.ab, line.run, line.hit, line.rbi, line.double, line.triple, line.hr, - line.bb, line.so, line.hbp, line.sac, line.ibb, line.gidp, line.sb, line.cs, line.bphr, line.bpfo, - line.bp1b, line.bplo, line.xch, line.xhit, line.error, line.pb, line.sbc, line.csc, line.week, - line.season, line.created, line.game_id, line.roster_num + line.id, + line.card.id, + line.card.player.player_id, + line.card.player.cardset.name, + line.team.abbrev, + line.vs_team.abbrev, + line.pos, + line.pa, + line.ab, + line.run, + line.hit, + line.rbi, + line.double, + line.triple, + line.hr, + line.bb, + line.so, + line.hbp, + line.sac, + line.ibb, + line.gidp, + line.sb, + line.cs, + line.bphr, + line.bpfo, + line.bp1b, + line.bplo, + line.xch, + line.xhit, + line.error, + line.pb, + line.sbc, + line.csc, + line.week, + line.season, + line.created, + line.game_id, + line.roster_num, ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_stats.count(), 'stats': []} + return_val = {"count": all_stats.count(), "stats": []} for x in all_stats: - return_val['stats'].append(model_to_dict(x, recurse=False)) + return_val["stats"].append(model_to_dict(x, recurse=False)) db.close() return return_val -@app.get('/api/v1/batstats/player/{player_id}') +@app.get("/api/v1/batstats/player/{player_id}") async def v1_batstats_get_card( - player_id: int, team_id: int = None, vs_team_id: int = None, week_start: int = None, week_end: int = None, - csv: bool = None): - all_stats = (BattingStat - .select(fn.COUNT(BattingStat.created).alias('game_count')) - .join(Card) - .group_by(BattingStat.card) - .where(BattingStat.card.player == player_id)).scalar() + player_id: int, + team_id: int = None, + vs_team_id: int = None, + week_start: int = None, + week_end: int = None, + csv: bool = None, +): + all_stats = ( + BattingStat.select(fn.COUNT(BattingStat.created).alias("game_count")) + .join(Card) + .group_by(BattingStat.card) + .where(BattingStat.card.player == player_id) + ).scalar() if team_id is not None: all_stats = all_stats.where(BattingStat.team_id == team_id) @@ -3825,54 +4666,123 @@ async def v1_batstats_get_card( if csv: data_list = [ [ - 'pa', 'ab', 'run', 'hit', 'rbi', 'double', 'triple', 'hr', 'bb', 'so', 'hbp', 'sac', 'ibb', 'gidp', - 'sb', 'cs', 'bphr', 'bpfo', 'bp1b', 'bplo', 'xch', 'xhit', 'error', 'pb', 'sbc', 'csc', - ],[ - all_stats.pa_sum, all_stats.ab_sum, all_stats.run, all_stats.hit_sum, all_stats.rbi_sum, - all_stats.double_sum, all_stats.triple_sum, all_stats.hr_sum, all_stats.bb_sum, all_stats.so_sum, - all_stats.hbp_sum, all_stats.sac, all_stats.ibb_sum, all_stats.gidp_sum, all_stats.sb_sum, - all_stats.cs_sum, all_stats.bphr_sum, all_stats.bpfo_sum, all_stats.bp1b_sum, all_stats.bplo_sum, - all_stats.xch, all_stats.xhit_sum, all_stats.error_sum, all_stats.pb_sum, all_stats.sbc_sum, - all_stats.csc_sum - ] + "pa", + "ab", + "run", + "hit", + "rbi", + "double", + "triple", + "hr", + "bb", + "so", + "hbp", + "sac", + "ibb", + "gidp", + "sb", + "cs", + "bphr", + "bpfo", + "bp1b", + "bplo", + "xch", + "xhit", + "error", + "pb", + "sbc", + "csc", + ], + [ + all_stats.pa_sum, + all_stats.ab_sum, + all_stats.run, + all_stats.hit_sum, + all_stats.rbi_sum, + all_stats.double_sum, + all_stats.triple_sum, + all_stats.hr_sum, + all_stats.bb_sum, + all_stats.so_sum, + all_stats.hbp_sum, + all_stats.sac, + all_stats.ibb_sum, + all_stats.gidp_sum, + all_stats.sb_sum, + all_stats.cs_sum, + all_stats.bphr_sum, + all_stats.bpfo_sum, + all_stats.bp1b_sum, + all_stats.bplo_sum, + all_stats.xch, + all_stats.xhit_sum, + all_stats.error_sum, + all_stats.pb_sum, + all_stats.sbc_sum, + all_stats.csc_sum, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - logging.debug(f'stat pull query: {all_stats}\n') + logging.debug(f"stat pull query: {all_stats}\n") # logging.debug(f'result 0: {all_stats[0]}\n') for x in all_stats: - logging.debug(f'this_line: {model_to_dict(x)}') + logging.debug(f"this_line: {model_to_dict(x)}") return_val = model_to_dict(all_stats[0]) db.close() return return_val -@app.get('/api/v1/plays/batting') +@app.get("/api/v1/plays/batting") async def get_batting_totals( - player_id: list = Query(default=None), team_id: list = Query(default=None), min_pa: Optional[int] = 1, - season: list = Query(default=None), position: list = Query(default=None), - group_by: Literal['team', 'player', 'playerteam', 'playergame', 'teamgame', 'league'] = 'player', - sort: Optional[str] = None, limit: Optional[int] = None, short_output: Optional[bool] = False): - all_stats = BattingStat.select( - BattingStat.card, BattingStat.game_id, BattingStat.team, BattingStat.vs_team, BattingStat.pos, - BattingStat.card.player.alias('player'), - fn.SUM(BattingStat.pa).alias('sum_pa'), fn.SUM(BattingStat.ab).alias('sum_ab'), - fn.SUM(BattingStat.run).alias('sum_run'), fn.SUM(BattingStat.so).alias('sum_so'), - fn.SUM(BattingStat.hit).alias('sum_hit'), fn.SUM(BattingStat.rbi).alias('sum_rbi'), - fn.SUM(BattingStat.double).alias('sum_double'), fn.SUM(BattingStat.triple).alias('sum_triple'), - fn.SUM(BattingStat.hr).alias('sum_hr'), fn.SUM(BattingStat.bb).alias('sum_bb'), - fn.SUM(BattingStat.hbp).alias('sum_hbp'), fn.SUM(BattingStat.sac).alias('sum_sac'), - fn.SUM(BattingStat.ibb).alias('sum_ibb'), fn.SUM(BattingStat.gidp).alias('sum_gidp'), - fn.SUM(BattingStat.sb).alias('sum_sb'), fn.SUM(BattingStat.cs).alias('sum_cs'), - fn.SUM(BattingStat.bphr).alias('sum_bphr'), fn.SUM(BattingStat.bpfo).alias('sum_bpfo'), - fn.SUM(BattingStat.bp1b).alias('sum_bp1b'), fn.SUM(BattingStat.bplo).alias('sum_bplo') - ).having( - fn.SUM(BattingStat.pa) >= min_pa - ).join(Card) + player_id: list = Query(default=None), + team_id: list = Query(default=None), + min_pa: Optional[int] = 1, + season: list = Query(default=None), + position: list = Query(default=None), + group_by: Literal[ + "team", "player", "playerteam", "playergame", "teamgame", "league" + ] = "player", + sort: Optional[str] = None, + limit: Optional[int] = None, + short_output: Optional[bool] = False, +): + all_stats = ( + BattingStat.select( + BattingStat.card, + BattingStat.game_id, + BattingStat.team, + BattingStat.vs_team, + BattingStat.pos, + BattingStat.card.player.alias("player"), + fn.SUM(BattingStat.pa).alias("sum_pa"), + fn.SUM(BattingStat.ab).alias("sum_ab"), + fn.SUM(BattingStat.run).alias("sum_run"), + fn.SUM(BattingStat.so).alias("sum_so"), + fn.SUM(BattingStat.hit).alias("sum_hit"), + fn.SUM(BattingStat.rbi).alias("sum_rbi"), + fn.SUM(BattingStat.double).alias("sum_double"), + fn.SUM(BattingStat.triple).alias("sum_triple"), + fn.SUM(BattingStat.hr).alias("sum_hr"), + fn.SUM(BattingStat.bb).alias("sum_bb"), + fn.SUM(BattingStat.hbp).alias("sum_hbp"), + fn.SUM(BattingStat.sac).alias("sum_sac"), + fn.SUM(BattingStat.ibb).alias("sum_ibb"), + fn.SUM(BattingStat.gidp).alias("sum_gidp"), + fn.SUM(BattingStat.sb).alias("sum_sb"), + fn.SUM(BattingStat.cs).alias("sum_cs"), + fn.SUM(BattingStat.bphr).alias("sum_bphr"), + fn.SUM(BattingStat.bpfo).alias("sum_bpfo"), + fn.SUM(BattingStat.bp1b).alias("sum_bp1b"), + fn.SUM(BattingStat.bplo).alias("sum_bplo"), + ) + .having(fn.SUM(BattingStat.pa) >= min_pa) + .join(Card) + ) if player_id is not None: # all_players = Player.select().where(Player.id << player_id) @@ -3886,24 +4796,24 @@ async def get_batting_totals( if position is not None: all_stats = all_stats.where(BattingStat.pos << position) - if group_by == 'player': - all_stats = all_stats.group_by(SQL('player')) - elif group_by == 'playerteam': - all_stats = all_stats.group_by(SQL('player'), BattingStat.team) - elif group_by == 'playergame': - all_stats = all_stats.group_by(SQL('player'), BattingStat.game_id) - elif group_by == 'team': + if group_by == "player": + all_stats = all_stats.group_by(SQL("player")) + elif group_by == "playerteam": + all_stats = all_stats.group_by(SQL("player"), BattingStat.team) + elif group_by == "playergame": + all_stats = all_stats.group_by(SQL("player"), BattingStat.game_id) + elif group_by == "team": all_stats = all_stats.group_by(BattingStat.team) - elif group_by == 'teamgame': + elif group_by == "teamgame": all_stats = all_stats.group_by(BattingStat.team, BattingStat.game_id) - elif group_by == 'league': + elif group_by == "league": all_stats = all_stats.group_by(BattingStat.season) - if sort == 'pa-desc': - all_stats = all_stats.order_by(SQL('sum_pa').desc()) - elif sort == 'newest': + if sort == "pa-desc": + all_stats = all_stats.order_by(SQL("sum_pa").desc()) + elif sort == "newest": all_stats = all_stats.order_by(-BattingStat.game_id) - elif sort == 'oldest': + elif sort == "oldest": all_stats = all_stats.order_by(BattingStat.game_id) if limit is not None: @@ -3911,71 +4821,122 @@ async def get_batting_totals( limit = 1 all_stats = all_stats.limit(limit) - logging.info(f'bat_plays query: {all_stats}') + logging.info(f"bat_plays query: {all_stats}") return_stats = { - 'count': all_stats.count(), - 'stats': [{ - 'player': x.card.player_id if short_output else model_to_dict(x.card.player, recurse=False), - 'team': x.team_id if short_output else model_to_dict(x.team, recurse=False), - 'pa': x.sum_pa, - 'ab': x.sum_ab, - 'run': x.sum_run, - 'hit': x.sum_hit, - 'rbi': x.sum_rbi, - 'double': x.sum_double, - 'triple': x.sum_triple, - 'hr': x.sum_hr, - 'bb': x.sum_bb, - 'so': x.sum_so, - 'hbp': x.sum_hbp, - 'sac': x.sum_sac, - 'ibb': x.sum_ibb, - 'gidp': x.sum_gidp, - 'sb': x.sum_sb, - 'cs': x.sum_cs, - 'bphr': x.sum_bphr, - 'bpfo': x.sum_bpfo, - 'bp1b': x.sum_bp1b, - 'bplo': x.sum_bplo, - 'avg': x.sum_hit / max(x.sum_ab, 1), - 'obp': (x.sum_hit + x.sum_bb + x.sum_hbp + x.sum_ibb) / max(x.sum_pa, 1), - 'slg': (x.sum_hr * 4 + x.sum_triple * 3 + x.sum_double * 2 + - (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr)) / max(x.sum_ab, 1), - 'ops': ((x.sum_hit + x.sum_bb + x.sum_hbp + x.sum_ibb) / max(x.sum_pa, 1)) + - ((x.sum_hr * 4 + x.sum_triple * 3 + x.sum_double * 2 + - (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr)) / max(x.sum_ab, 1)), - 'woba': (.69 * x.sum_bb + .72 * x.sum_hbp + .89 * (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr) + - 1.27 * x.sum_double + 1.62 * x.sum_triple + 2.1 * x.sum_hr) / max(x.sum_pa - x.sum_ibb, 1), - 'game': x.game_id - } for x in all_stats] + "count": all_stats.count(), + "stats": [ + { + "player": x.card.player_id + if short_output + else model_to_dict(x.card.player, recurse=False), + "team": x.team_id + if short_output + else model_to_dict(x.team, recurse=False), + "pa": x.sum_pa, + "ab": x.sum_ab, + "run": x.sum_run, + "hit": x.sum_hit, + "rbi": x.sum_rbi, + "double": x.sum_double, + "triple": x.sum_triple, + "hr": x.sum_hr, + "bb": x.sum_bb, + "so": x.sum_so, + "hbp": x.sum_hbp, + "sac": x.sum_sac, + "ibb": x.sum_ibb, + "gidp": x.sum_gidp, + "sb": x.sum_sb, + "cs": x.sum_cs, + "bphr": x.sum_bphr, + "bpfo": x.sum_bpfo, + "bp1b": x.sum_bp1b, + "bplo": x.sum_bplo, + "avg": x.sum_hit / max(x.sum_ab, 1), + "obp": (x.sum_hit + x.sum_bb + x.sum_hbp + x.sum_ibb) + / max(x.sum_pa, 1), + "slg": ( + x.sum_hr * 4 + + x.sum_triple * 3 + + x.sum_double * 2 + + (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr) + ) + / max(x.sum_ab, 1), + "ops": ( + (x.sum_hit + x.sum_bb + x.sum_hbp + x.sum_ibb) / max(x.sum_pa, 1) + ) + + ( + ( + x.sum_hr * 4 + + x.sum_triple * 3 + + x.sum_double * 2 + + (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr) + ) + / max(x.sum_ab, 1) + ), + "woba": ( + 0.69 * x.sum_bb + + 0.72 * x.sum_hbp + + 0.89 * (x.sum_hit - x.sum_double - x.sum_triple - x.sum_hr) + + 1.27 * x.sum_double + + 1.62 * x.sum_triple + + 2.1 * x.sum_hr + ) + / max(x.sum_pa - x.sum_ibb, 1), + "game": x.game_id, + } + for x in all_stats + ], } db.close() return return_stats -@app.get('/api/v1/plays/pitching') +@app.get("/api/v1/plays/pitching") async def get_pitching_totals( - player_id: list = Query(default=None), team_id: list = Query(default=None), season: list = Query(default=None), - group_by: Literal['team', 'player', 'playerteam', 'playergame', 'teamgame', 'league'] = 'player', - min_pa: Optional[int] = 1, - sort: Optional[str] = None, limit: Optional[int] = None, short_output: Optional[bool] = False): - all_stats = PitchingStat.select( - PitchingStat.card, PitchingStat.team, PitchingStat.game_id, PitchingStat.vs_team, - PitchingStat.card.player.alias('player'), fn.SUM(PitchingStat.ip).alias('sum_ip'), - fn.SUM(PitchingStat.hit).alias('sum_hit'), fn.SUM(PitchingStat.run).alias('sum_run'), - fn.SUM(PitchingStat.erun).alias('sum_erun'), fn.SUM(PitchingStat.so).alias('sum_so'), - fn.SUM(PitchingStat.bb).alias('sum_bb'), fn.SUM(PitchingStat.hbp).alias('sum_hbp'), - fn.SUM(PitchingStat.wp).alias('sum_wp'), fn.SUM(PitchingStat.balk).alias('sum_balk'), - fn.SUM(PitchingStat.hr).alias('sum_hr'), fn.SUM(PitchingStat.ir).alias('sum_ir'), - fn.SUM(PitchingStat.irs).alias('sum_irs'), fn.SUM(PitchingStat.gs).alias('sum_gs'), - fn.SUM(PitchingStat.win).alias('sum_win'), fn.SUM(PitchingStat.loss).alias('sum_loss'), - fn.SUM(PitchingStat.hold).alias('sum_hold'), fn.SUM(PitchingStat.sv).alias('sum_sv'), - fn.SUM(PitchingStat.bsv).alias('sum_bsv'), fn.COUNT(PitchingStat.game_id).alias('sum_games') - ).having( - fn.SUM(PitchingStat.ip) >= max(min_pa / 3, 1) - ).join(Card) + player_id: list = Query(default=None), + team_id: list = Query(default=None), + season: list = Query(default=None), + group_by: Literal[ + "team", "player", "playerteam", "playergame", "teamgame", "league" + ] = "player", + min_pa: Optional[int] = 1, + sort: Optional[str] = None, + limit: Optional[int] = None, + short_output: Optional[bool] = False, +): + all_stats = ( + PitchingStat.select( + PitchingStat.card, + PitchingStat.team, + PitchingStat.game_id, + PitchingStat.vs_team, + PitchingStat.card.player.alias("player"), + fn.SUM(PitchingStat.ip).alias("sum_ip"), + fn.SUM(PitchingStat.hit).alias("sum_hit"), + fn.SUM(PitchingStat.run).alias("sum_run"), + fn.SUM(PitchingStat.erun).alias("sum_erun"), + fn.SUM(PitchingStat.so).alias("sum_so"), + fn.SUM(PitchingStat.bb).alias("sum_bb"), + fn.SUM(PitchingStat.hbp).alias("sum_hbp"), + fn.SUM(PitchingStat.wp).alias("sum_wp"), + fn.SUM(PitchingStat.balk).alias("sum_balk"), + fn.SUM(PitchingStat.hr).alias("sum_hr"), + fn.SUM(PitchingStat.ir).alias("sum_ir"), + fn.SUM(PitchingStat.irs).alias("sum_irs"), + fn.SUM(PitchingStat.gs).alias("sum_gs"), + fn.SUM(PitchingStat.win).alias("sum_win"), + fn.SUM(PitchingStat.loss).alias("sum_loss"), + fn.SUM(PitchingStat.hold).alias("sum_hold"), + fn.SUM(PitchingStat.sv).alias("sum_sv"), + fn.SUM(PitchingStat.bsv).alias("sum_bsv"), + fn.COUNT(PitchingStat.game_id).alias("sum_games"), + ) + .having(fn.SUM(PitchingStat.ip) >= max(min_pa / 3, 1)) + .join(Card) + ) if player_id is not None: all_cards = Card.select().where(Card.player_id << player_id) @@ -3986,24 +4947,24 @@ async def get_pitching_totals( if season is not None: all_stats = all_stats.where(PitchingStat.season << season) - if group_by == 'player': - all_stats = all_stats.group_by(SQL('player')) - elif group_by == 'playerteam': - all_stats = all_stats.group_by(SQL('player'), PitchingStat.team) - elif group_by == 'playergame': - all_stats = all_stats.group_by(SQL('player'), PitchingStat.game_id) - elif group_by == 'team': + if group_by == "player": + all_stats = all_stats.group_by(SQL("player")) + elif group_by == "playerteam": + all_stats = all_stats.group_by(SQL("player"), PitchingStat.team) + elif group_by == "playergame": + all_stats = all_stats.group_by(SQL("player"), PitchingStat.game_id) + elif group_by == "team": all_stats = all_stats.group_by(PitchingStat.team) - elif group_by == 'teamgame': + elif group_by == "teamgame": all_stats = all_stats.group_by(PitchingStat.team, PitchingStat.game_id) - elif group_by == 'league': + elif group_by == "league": all_stats = all_stats.group_by(PitchingStat.season) - if sort == 'pa-desc': - all_stats = all_stats.order_by(SQL('sum_pa').desc()) - elif sort == 'newest': + if sort == "pa-desc": + all_stats = all_stats.order_by(SQL("sum_pa").desc()) + elif sort == "newest": all_stats = all_stats.order_by(-PitchingStat.game_id) - elif sort == 'oldest': + elif sort == "oldest": all_stats = all_stats.order_by(PitchingStat.game_id) if limit is not None: @@ -4011,58 +4972,67 @@ async def get_pitching_totals( limit = 1 all_stats = all_stats.limit(limit) - logging.info(f'bat_plays query: {all_stats}') + logging.info(f"bat_plays query: {all_stats}") return_stats = { - 'count': all_stats.count(), - 'stats': [{ - 'player': x.card.player_id if short_output else model_to_dict(x.card.player, recurse=False), - 'team': x.team_id if short_output else model_to_dict(x.team, recurse=False), - 'tbf': None, - 'outs': round(x.sum_ip * 3), - 'games': x.sum_games, - 'gs': x.sum_gs, - 'win': x.sum_win, - 'loss': x.sum_loss, - 'hold': x.sum_hold, - 'save': x.sum_sv, - 'bsave': x.sum_bsv, - 'ir': x.sum_ir, - 'ir_sc': x.sum_irs, - 'runs': x.sum_run, - 'e_runs': x.sum_erun, - 'hits': x.sum_hit, - 'hr': x.sum_hr, - 'bb': x.sum_bb, - 'so': x.sum_so, - 'hbp': x.sum_hbp, - 'wp': x.sum_wp, - 'balk': x.sum_balk, - 'era': (x.sum_erun * 27) / round(x.sum_ip * 3), - 'whip': (x.sum_bb + x.sum_hit) / x.sum_ip, - 'avg': None, - 'obp': None, - 'woba': None, - 'k/9': x.sum_so * 9 / x.sum_ip, - 'bb/9': x.sum_bb * 9 / x.sum_ip, - 'k/bb': x.sum_so / max(x.sum_bb, .1), - 'game': None, - 'lob_2outs': None, - 'rbi%': None - } for x in all_stats] + "count": all_stats.count(), + "stats": [ + { + "player": x.card.player_id + if short_output + else model_to_dict(x.card.player, recurse=False), + "team": x.team_id + if short_output + else model_to_dict(x.team, recurse=False), + "tbf": None, + "outs": round(x.sum_ip * 3), + "games": x.sum_games, + "gs": x.sum_gs, + "win": x.sum_win, + "loss": x.sum_loss, + "hold": x.sum_hold, + "save": x.sum_sv, + "bsave": x.sum_bsv, + "ir": x.sum_ir, + "ir_sc": x.sum_irs, + "runs": x.sum_run, + "e_runs": x.sum_erun, + "hits": x.sum_hit, + "hr": x.sum_hr, + "bb": x.sum_bb, + "so": x.sum_so, + "hbp": x.sum_hbp, + "wp": x.sum_wp, + "balk": x.sum_balk, + "era": (x.sum_erun * 27) / round(x.sum_ip * 3), + "whip": (x.sum_bb + x.sum_hit) / x.sum_ip, + "avg": None, + "obp": None, + "woba": None, + "k/9": x.sum_so * 9 / x.sum_ip, + "bb/9": x.sum_bb * 9 / x.sum_ip, + "k/bb": x.sum_so / max(x.sum_bb, 0.1), + "game": None, + "lob_2outs": None, + "rbi%": None, + } + for x in all_stats + ], } db.close() return return_stats -@app.post('/api/v1/batstats') -async def v1_batstats_post(stats: BattingStatModel, token: str = Depends(oauth2_scheme)): +@app.post("/api/v1/batstats") +async def v1_batstats_post( + stats: BattingStatModel, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post stats. This event has been logged.' + detail="You are not authorized to post stats. This event has been logged.", ) new_stats = [] @@ -4102,7 +5072,7 @@ async def v1_batstats_post(stats: BattingStatModel, token: str = Depends(oauth2_ week=x.week, season=x.season, created=x.created, - game_id=x.game_id + game_id=x.game_id, ) new_stats.append(this_stat) @@ -4110,31 +5080,33 @@ async def v1_batstats_post(stats: BattingStatModel, token: str = Depends(oauth2_ BattingStat.bulk_create(new_stats, batch_size=15) db.close() - raise HTTPException(status_code=200, detail=f'{len(new_stats)} batting lines have been added') + raise HTTPException( + status_code=200, detail=f"{len(new_stats)} batting lines have been added" + ) -@app.delete('/api/v1/batstats/{stat_id}') +@app.delete("/api/v1/batstats/{stat_id}") async def v1_rewards_delete(stat_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete stats. This event has been logged.' + detail="You are not authorized to delete stats. This event has been logged.", ) try: this_reward = Reward.get_by_id(stat_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No stat found with id {stat_id}') + raise HTTPException(status_code=404, detail=f"No stat found with id {stat_id}") count = this_reward.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Stat {stat_id} has been deleted') + raise HTTPException(status_code=200, detail=f"Stat {stat_id} has been deleted") else: - raise HTTPException(status_code=500, detail=f'Stat {stat_id} was not deleted') + raise HTTPException(status_code=500, detail=f"Stat {stat_id} was not deleted") """ @@ -4167,7 +5139,7 @@ class PitStat(pydantic.BaseModel): bsv: Optional[int] = 0 week: int season: int - created: Optional[int] = int(datetime.timestamp(datetime.now())*100000) + created: Optional[int] = int(datetime.timestamp(datetime.now()) * 100000) game_id: int @@ -4175,13 +5147,22 @@ class PitchingStatModel(pydantic.BaseModel): stats: List[PitStat] -@app.get('/api/v1/pitstats') +@app.get("/api/v1/pitstats") async def v1_pitstats_get( - card_id: int = None, player_id: int = None, team_id: int = None, vs_team_id: int = None, week: int = None, - season: int = None, week_start: int = None, week_end: int = None, created: int = None, gs: bool = None, - csv: bool = None): + card_id: int = None, + player_id: int = None, + team_id: int = None, + vs_team_id: int = None, + week: int = None, + season: int = None, + week_start: int = None, + week_end: int = None, + created: int = None, + gs: bool = None, + csv: bool = None, +): all_stats = PitchingStat.select().join(Card).join(Player) - logging.debug(f'pit query:\n\n{all_stats}') + logging.debug(f"pit query:\n\n{all_stats}") if card_id is not None: all_stats = all_stats.where(PitchingStat.card_id == card_id) @@ -4209,41 +5190,97 @@ async def v1_pitstats_get( # raise HTTPException(status_code=404, detail=f'No pitching stats found') if csv: - data_list = [['id', 'card_id', 'player_id', 'cardset', 'team', 'vs_team', 'ip', 'hit', 'run', 'erun', 'so', 'bb', 'hbp', - 'wp', 'balk', 'hr', 'ir', 'irs', 'gs', 'win', 'loss', 'hold', 'sv', 'bsv', 'week', 'season', - 'created', 'game_id', 'roster_num']] + data_list = [ + [ + "id", + "card_id", + "player_id", + "cardset", + "team", + "vs_team", + "ip", + "hit", + "run", + "erun", + "so", + "bb", + "hbp", + "wp", + "balk", + "hr", + "ir", + "irs", + "gs", + "win", + "loss", + "hold", + "sv", + "bsv", + "week", + "season", + "created", + "game_id", + "roster_num", + ] + ] for line in all_stats: data_list.append( [ - line.id, line.card.id, line.card.player.player_id, line.card.player.cardset.name, line.team.abbrev, - line.vs_team.abbrev, line.ip, line.hit, - line.run, line.erun, line.so, line.bb, line.hbp, line.wp, line.balk, line.hr, line.ir, line.irs, - line.gs, line.win, line.loss, line.hold, line.sv, line.bsv, line.week, line.season, line.created, - line.game_id, line.roster_num + line.id, + line.card.id, + line.card.player.player_id, + line.card.player.cardset.name, + line.team.abbrev, + line.vs_team.abbrev, + line.ip, + line.hit, + line.run, + line.erun, + line.so, + line.bb, + line.hbp, + line.wp, + line.balk, + line.hr, + line.ir, + line.irs, + line.gs, + line.win, + line.loss, + line.hold, + line.sv, + line.bsv, + line.week, + line.season, + line.created, + line.game_id, + line.roster_num, ] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_stats.count(), 'stats': []} + return_val = {"count": all_stats.count(), "stats": []} for x in all_stats: - return_val['stats'].append(model_to_dict(x, recurse=False)) + return_val["stats"].append(model_to_dict(x, recurse=False)) db.close() return return_val -@app.post('/api/v1/pitstats') -async def v1_batstats_post(stats: PitchingStatModel, token: str = Depends(oauth2_scheme)): +@app.post("/api/v1/pitstats") +async def v1_batstats_post( + stats: PitchingStatModel, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post stats. This event has been logged.' + detail="You are not authorized to post stats. This event has been logged.", ) new_stats = [] @@ -4274,7 +5311,7 @@ async def v1_batstats_post(stats: PitchingStatModel, token: str = Depends(oauth2 week=x.week, season=x.season, created=x.created, - game_id=x.game_id + game_id=x.game_id, ) new_stats.append(this_stat) @@ -4282,31 +5319,33 @@ async def v1_batstats_post(stats: PitchingStatModel, token: str = Depends(oauth2 PitchingStat.bulk_create(new_stats, batch_size=15) db.close() - raise HTTPException(status_code=200, detail=f'{len(new_stats)} pitching lines have been added') + raise HTTPException( + status_code=200, detail=f"{len(new_stats)} pitching lines have been added" + ) -@app.delete('/api/v1/pitstats/{stat_id}') +@app.delete("/api/v1/pitstats/{stat_id}") async def v1_rewards_delete(stat_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete stats. This event has been logged.' + detail="You are not authorized to delete stats. This event has been logged.", ) try: this_reward = Reward.get_by_id(stat_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No stat found with id {stat_id}') + raise HTTPException(status_code=404, detail=f"No stat found with id {stat_id}") count = this_reward.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Stat {stat_id} has been deleted') + raise HTTPException(status_code=200, detail=f"Stat {stat_id} has been deleted") else: - raise HTTPException(status_code=500, detail=f'Stat {stat_id} was not deleted') + raise HTTPException(status_code=500, detail=f"Stat {stat_id} was not deleted") """ @@ -4320,20 +5359,28 @@ class NotifModel(pydantic.BaseModel): desc: Optional[str] = None field_name: str message: str - about: Optional[str] = 'blank' + about: Optional[str] = "blank" ack: Optional[bool] = False -@app.get('/api/v1/notifs') +@app.get("/api/v1/notifs") async def v1_notifs_get( - created_after: Optional[int] = None, title: Optional[str] = None, desc: Optional[str] = None, - field_name: Optional[str] = None, in_desc: Optional[str] = None, about: Optional[str] = None, - ack: Optional[bool] = None, csv: Optional[bool] = None): + created_after: Optional[int] = None, + title: Optional[str] = None, + desc: Optional[str] = None, + field_name: Optional[str] = None, + in_desc: Optional[str] = None, + about: Optional[str] = None, + ack: Optional[bool] = None, + csv: Optional[bool] = None, +): all_notif = Notification.select() if all_notif.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'There are no notifications to filter') + raise HTTPException( + status_code=404, detail=f"There are no notifications to filter" + ) if created_after is not None: all_notif = all_notif.where(Notification.created < created_after) @@ -4344,50 +5391,73 @@ async def v1_notifs_get( if field_name is not None: all_notif = all_notif.where(Notification.field_name == field_name) if in_desc is not None: - all_notif = all_notif.where(fn.Lower(Notification.desc).contains(in_desc.lower())) + all_notif = all_notif.where( + fn.Lower(Notification.desc).contains(in_desc.lower()) + ) if about is not None: all_notif = all_notif.where(Notification.about == about) if ack is not None: all_notif = all_notif.where(Notification.ack == ack) if csv: - data_list = [['id', 'created', 'title', 'desc', 'field_name', 'message', 'about', 'ack']] + data_list = [ + ["id", "created", "title", "desc", "field_name", "message", "about", "ack"] + ] for line in all_notif: - data_list.append([ - line.id, line.created, line.title, line.desc, line.field_name, line.message, line.about, line.ack - ]) + data_list.append( + [ + line.id, + line.created, + line.title, + line.desc, + line.field_name, + line.message, + line.about, + line.ack, + ] + ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_notif.count(), 'notifs': []} + return_val = {"count": all_notif.count(), "notifs": []} for x in all_notif: - return_val['notifs'].append(model_to_dict(x)) + return_val["notifs"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/notifs/{notif_id}') +@app.get("/api/v1/notifs/{notif_id}") async def v1_notifs_get_one(notif_id, csv: Optional[bool] = None): try: this_notif = Notification.get_by_id(notif_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No notification found with id {notif_id}') + raise HTTPException( + status_code=404, detail=f"No notification found with id {notif_id}" + ) if csv: data_list = [ - ['id', 'created', 'title', 'desc', 'field_name', 'message', 'about', 'ack'], - [this_notif.id, this_notif.created, this_notif.title, this_notif.desc, this_notif.field_name, - this_notif.message, this_notif.about, this_notif.ack] + ["id", "created", "title", "desc", "field_name", "message", "about", "ack"], + [ + this_notif.id, + this_notif.created, + this_notif.title, + this_notif.desc, + this_notif.field_name, + this_notif.message, + this_notif.about, + this_notif.ack, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_notif) @@ -4395,17 +5465,17 @@ async def v1_notifs_get_one(notif_id, csv: Optional[bool] = None): return return_val -@app.post('/api/v1/notifs') +@app.post("/api/v1/notifs") async def v1_notifs_post(notif: NotifModel, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post notifications. This event has been logged.' + detail="You are not authorized to post notifications. This event has been logged.", ) - logging.info(f'new notif: {notif}') + logging.info(f"new notif: {notif}") this_notif = Notification( created=notif.created, title=notif.title, @@ -4424,27 +5494,36 @@ async def v1_notifs_post(notif: NotifModel, token: str = Depends(oauth2_scheme)) db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that notification' + detail="Well slap my ass and call me a teapot; I could not save that notification", ) -@app.patch('/api/v1/notifs/{notif_id}') +@app.patch("/api/v1/notifs/{notif_id}") async def v1_rewards_patch( - notif_id, created: Optional[int] = None, title: Optional[str] = None, desc: Optional[str] = None, - field_name: Optional[str] = None, message: Optional[str] = None, about: Optional[str] = None, - ack: Optional[bool] = None, token: str = Depends(oauth2_scheme)): + notif_id, + created: Optional[int] = None, + title: Optional[str] = None, + desc: Optional[str] = None, + field_name: Optional[str] = None, + message: Optional[str] = None, + about: Optional[str] = None, + ack: Optional[bool] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch notifications. This event has been logged.' + detail="You are not authorized to patch notifications. This event has been logged.", ) try: this_notif = Notification.get_by_id(notif_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No notification found with id {notif_id}') + raise HTTPException( + status_code=404, detail=f"No notification found with id {notif_id}" + ) if title is not None: this_notif.title = title @@ -4468,32 +5547,38 @@ async def v1_rewards_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.delete('/api/v1/notifs/{notif_id}') +@app.delete("/api/v1/notifs/{notif_id}") async def v1_notifs_delete(notif_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete notifications. This event has been logged.' + detail="You are not authorized to delete notifications. This event has been logged.", ) try: this_notif = Notification.get_by_id(notif_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No notification found with id {notif_id}') + raise HTTPException( + status_code=404, detail=f"No notification found with id {notif_id}" + ) count = this_notif.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Notification {notif_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Notification {notif_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Notification {notif_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Notification {notif_id} was not deleted" + ) """ @@ -4504,19 +5589,24 @@ PAPERDEX ENDPOINTS class PaperdexModel(pydantic.BaseModel): team_id: int player_id: int - created: Optional[int] = int(datetime.timestamp(datetime.now())*1000) + created: Optional[int] = int(datetime.timestamp(datetime.now()) * 1000) -@app.get('/api/v1/paperdex') +@app.get("/api/v1/paperdex") async def v1_paperdex_get( - team_id: Optional[int] = None, player_id: Optional[int] = None, created_after: Optional[int] = None, - cardset_id: Optional[int] = None, created_before: Optional[int] = None, flat: Optional[bool] = False, - csv: Optional[bool] = None): + team_id: Optional[int] = None, + player_id: Optional[int] = None, + created_after: Optional[int] = None, + cardset_id: Optional[int] = None, + created_before: Optional[int] = None, + flat: Optional[bool] = False, + csv: Optional[bool] = None, +): all_dex = Paperdex.select().join(Player).join(Cardset) if all_dex.count() == 0: db.close() - raise HTTPException(status_code=404, detail=f'There are no paperdex to filter') + raise HTTPException(status_code=404, detail=f"There are no paperdex to filter") if team_id is not None: all_dex = all_dex.where(Paperdex.team_id == team_id) @@ -4534,44 +5624,44 @@ async def v1_paperdex_get( # raise HTTPException(status_code=404, detail=f'No paperdex found') if csv: - data_list = [['id', 'team_id', 'player_id', 'created']] + data_list = [["id", "team_id", "player_id", "created"]] for line in all_dex: data_list.append( - [ - line.id, line.team.id, line.player.player_id, line.created - ] + [line.id, line.team.id, line.player.player_id, line.created] ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_dex.count(), 'paperdex': []} + return_val = {"count": all_dex.count(), "paperdex": []} for x in all_dex: - return_val['paperdex'].append(model_to_dict(x, recurse=not flat)) + return_val["paperdex"].append(model_to_dict(x, recurse=not flat)) db.close() return return_val -@app.get('/api/v1/paperdex/{paperdex_id}') +@app.get("/api/v1/paperdex/{paperdex_id}") async def v1_paperdex_get_one(paperdex_id, csv: Optional[bool] = False): try: this_dex = Paperdex.get_by_id(paperdex_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No paperdex found with id {paperdex_id}') + raise HTTPException( + status_code=404, detail=f"No paperdex found with id {paperdex_id}" + ) if csv: data_list = [ - ['id', 'team_id', 'player_id', 'created'], - [this_dex.id, this_dex.team.id, this_dex.player.id, this_dex.created] + ["id", "team_id", "player_id", "created"], + [this_dex.id, this_dex.team.id, this_dex.player.id, this_dex.created], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_dex) @@ -4579,26 +5669,28 @@ async def v1_paperdex_get_one(paperdex_id, csv: Optional[bool] = False): return return_val -@app.post('/api/v1/paperdex') -async def v1_paperdex_post(paperdex: PaperdexModel, token: str = Depends(oauth2_scheme)): +@app.post("/api/v1/paperdex") +async def v1_paperdex_post( + paperdex: PaperdexModel, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post paperdex. This event has been logged.' + detail="You are not authorized to post paperdex. This event has been logged.", ) - dupe_dex = Paperdex.get_or_none(Paperdex.team_id == paperdex.team_id, Paperdex.player_id == paperdex.player_id) + dupe_dex = Paperdex.get_or_none( + Paperdex.team_id == paperdex.team_id, Paperdex.player_id == paperdex.player_id + ) if dupe_dex: return_val = model_to_dict(dupe_dex) db.close() return return_val this_dex = Paperdex( - team_id=paperdex.team_id, - player_id=paperdex.player_id, - created=paperdex.created + team_id=paperdex.team_id, player_id=paperdex.player_id, created=paperdex.created ) saved = this_dex.save() @@ -4609,26 +5701,32 @@ async def v1_paperdex_post(paperdex: PaperdexModel, token: str = Depends(oauth2_ else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that dex' + detail="Well slap my ass and call me a teapot; I could not save that dex", ) -@app.patch('/api/v1/paperdex/{paperdex_id}') +@app.patch("/api/v1/paperdex/{paperdex_id}") async def v1_paperdex_patch( - paperdex_id, team_id: Optional[int] = None, player_id: Optional[int] = None, created: Optional[int] = None, - token: str = Depends(oauth2_scheme)): + paperdex_id, + team_id: Optional[int] = None, + player_id: Optional[int] = None, + created: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch paperdex. This event has been logged.' + detail="You are not authorized to patch paperdex. This event has been logged.", ) try: this_dex = Paperdex.get_by_id(paperdex_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No paperdex found with id {paperdex_id}') + raise HTTPException( + status_code=404, detail=f"No paperdex found with id {paperdex_id}" + ) if team_id is not None: this_dex.team_id = team_id @@ -4644,47 +5742,50 @@ async def v1_paperdex_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.delete('/api/v1/paperdex/{paperdex_id}') +@app.delete("/api/v1/paperdex/{paperdex_id}") async def v1_paperdex_delete(paperdex_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete rewards. This event has been logged.' + detail="You are not authorized to delete rewards. This event has been logged.", ) try: this_dex = Paperdex.get_by_id(paperdex_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No paperdex found with id {paperdex_id}') + raise HTTPException( + status_code=404, detail=f"No paperdex found with id {paperdex_id}" + ) count = this_dex.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Paperdex {this_dex} has been deleted') - else: - raise HTTPException(status_code=500, detail=f'Paperdex {this_dex} was not deleted') - - -@app.post('/api/v1/paperdex/wipe-ai') -async def v1_paperdex_wipeai(token: str = Depends(oauth2_scheme)): - if not valid_token(token): - logging.warning(f'Bad Token: {token}') - db.close() raise HTTPException( - status_code=401, - detail='Unauthorized' + status_code=200, detail=f"Paperdex {this_dex} has been deleted" + ) + else: + raise HTTPException( + status_code=500, detail=f"Paperdex {this_dex} was not deleted" ) - g_teams = Team.select().where(Team.abbrev.contains('Gauntlet')) + +@app.post("/api/v1/paperdex/wipe-ai") +async def v1_paperdex_wipeai(token: str = Depends(oauth2_scheme)): + if not valid_token(token): + logging.warning(f"Bad Token: {token}") + db.close() + raise HTTPException(status_code=401, detail="Unauthorized") + + g_teams = Team.select().where(Team.abbrev.contains("Gauntlet")) count = Paperdex.delete().where(Paperdex.team << g_teams).execute() - return f'Deleted {count} records' + return f"Deleted {count} records" """ @@ -4699,10 +5800,14 @@ class GameRewardModel(pydantic.BaseModel): money: Optional[int] = None -@app.get('/api/v1/gamerewards') +@app.get("/api/v1/gamerewards") async def v1_gamerewards_get( - name: Optional[str] = None, pack_type_id: Optional[int] = None, player_id: Optional[int] = None, - money: Optional[int] = None, csv: Optional[bool] = None): + name: Optional[str] = None, + pack_type_id: Optional[int] = None, + player_id: Optional[int] = None, + money: Optional[int] = None, + csv: Optional[bool] = None, +): all_rewards = GameRewards.select() # if all_rewards.count() == 0: @@ -4719,44 +5824,54 @@ async def v1_gamerewards_get( all_rewards = all_rewards.where(GameRewards.money == money) if csv: - data_list = [['id', 'pack_type_id', 'player_id', 'money']] + data_list = [["id", "pack_type_id", "player_id", "money"]] for line in all_rewards: - data_list.append([ - line.id, line.pack_type_id if line.pack_type else None, line.player_id if line.player else None, - line.money - ]) + data_list.append( + [ + line.id, + line.pack_type_id if line.pack_type else None, + line.player_id if line.player else None, + line.money, + ] + ) return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: - return_val = {'count': all_rewards.count(), 'gamerewards': []} + return_val = {"count": all_rewards.count(), "gamerewards": []} for x in all_rewards: - return_val['gamerewards'].append(model_to_dict(x)) + return_val["gamerewards"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/gamerewards/{gameaward_id}') +@app.get("/api/v1/gamerewards/{gameaward_id}") async def v1_gamerewards_get_one(gamereward_id, csv: Optional[bool] = None): try: this_game_reward = GameRewards.get_by_id(gamereward_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No game reward found with id {gamereward_id}') + raise HTTPException( + status_code=404, detail=f"No game reward found with id {gamereward_id}" + ) if csv: data_list = [ - ['id', 'pack_type_id', 'player_id', 'money'], - [this_game_reward.id, this_game_reward.pack_type_id if this_game_reward.pack_type else None, - this_game_reward.player_id if this_game_reward.player else None, this_game_reward.money] + ["id", "pack_type_id", "player_id", "money"], + [ + this_game_reward.id, + this_game_reward.pack_type_id if this_game_reward.pack_type else None, + this_game_reward.player_id if this_game_reward.player else None, + this_game_reward.money, + ], ] return_val = DataFrame(data_list).to_csv(header=False, index=False) db.close() - return Response(content=return_val, media_type='text/csv') + return Response(content=return_val, media_type="text/csv") else: return_val = model_to_dict(this_game_reward) @@ -4764,21 +5879,23 @@ async def v1_gamerewards_get_one(gamereward_id, csv: Optional[bool] = None): return return_val -@app.post('/api/v1/gamerewards') -async def v1_gamerewards_post(game_reward: GameRewardModel, token: str = Depends(oauth2_scheme)): +@app.post("/api/v1/gamerewards") +async def v1_gamerewards_post( + game_reward: GameRewardModel, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post game rewards. This event has been logged.' + detail="You are not authorized to post game rewards. This event has been logged.", ) this_award = GameRewards( name=game_reward.name, pack_type_id=game_reward.pack_type_id, player_id=game_reward.player_id, - money=game_reward.money + money=game_reward.money, ) saved = this_award.save() @@ -4790,26 +5907,33 @@ async def v1_gamerewards_post(game_reward: GameRewardModel, token: str = Depends db.close() raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that roster' + detail="Well slap my ass and call me a teapot; I could not save that roster", ) -@app.patch('/api/v1/gamerewards/{game_reward_id}') +@app.patch("/api/v1/gamerewards/{game_reward_id}") async def v1_gamerewards_patch( - game_reward_id: int, name: Optional[str] = None, pack_type_id: Optional[int] = None, - player_id: Optional[int] = None, money: Optional[int] = None, token: str = Depends(oauth2_scheme)): + game_reward_id: int, + name: Optional[str] = None, + pack_type_id: Optional[int] = None, + player_id: Optional[int] = None, + money: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch gamerewards. This event has been logged.' + detail="You are not authorized to patch gamerewards. This event has been logged.", ) try: this_game_reward = GameRewards.get_by_id(game_reward_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No game reward found with id {game_reward_id}') + raise HTTPException( + status_code=404, detail=f"No game reward found with id {game_reward_id}" + ) if name is not None: this_game_reward.name = name @@ -4836,32 +5960,38 @@ async def v1_gamerewards_patch( else: raise HTTPException( status_code=418, - detail='Well slap my ass and call me a teapot; I could not save that rarity' + detail="Well slap my ass and call me a teapot; I could not save that rarity", ) -@app.delete('/api/v1/gamerewards/{gamereward_id}') +@app.delete("/api/v1/gamerewards/{gamereward_id}") async def v1_gamerewards_delete(gamereward_id, token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to delete awards. This event has been logged.' + detail="You are not authorized to delete awards. This event has been logged.", ) try: this_award = GameRewards.get_by_id(gamereward_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No award found with id {gamereward_id}') + raise HTTPException( + status_code=404, detail=f"No award found with id {gamereward_id}" + ) count = this_award.delete_instance() db.close() if count == 1: - raise HTTPException(status_code=200, detail=f'Game Reward {gamereward_id} has been deleted') + raise HTTPException( + status_code=200, detail=f"Game Reward {gamereward_id} has been deleted" + ) else: - raise HTTPException(status_code=500, detail=f'Game Reward {gamereward_id} was not deleted') + raise HTTPException( + status_code=500, detail=f"Game Reward {gamereward_id} was not deleted" + ) """ @@ -4881,10 +6011,14 @@ class GauntletRewardList(pydantic.BaseModel): rewards: List[GauntletRewardModel] -@app.get('/api/v1/gauntletrewards') +@app.get("/api/v1/gauntletrewards") async def v1_gauntletreward_get( - name: Optional[str] = None, gauntlet_id: Optional[int] = None, reward_id: list = Query(default=None), - win_num: Optional[int] = None, loss_max: Optional[int] = None): + name: Optional[str] = None, + gauntlet_id: Optional[int] = None, + reward_id: list = Query(default=None), + win_num: Optional[int] = None, + loss_max: Optional[int] = None, +): all_rewards = GauntletReward.select() if name is not None: @@ -4900,44 +6034,52 @@ async def v1_gauntletreward_get( all_rewards = all_rewards.order_by(-GauntletReward.loss_max, GauntletReward.win_num) - return_val = {'count': all_rewards.count(), 'rewards': []} + return_val = {"count": all_rewards.count(), "rewards": []} for x in all_rewards: - return_val['rewards'].append(model_to_dict(x)) + return_val["rewards"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/gauntletrewards/{gauntletreward_id}') +@app.get("/api/v1/gauntletrewards/{gauntletreward_id}") async def v1_gauntletreward_get_one(gauntletreward_id): try: this_reward = GauntletReward.get_by_id(gauntletreward_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No gauntlet reward found with id {gauntletreward_id}') + raise HTTPException( + status_code=404, + detail=f"No gauntlet reward found with id {gauntletreward_id}", + ) return_val = model_to_dict(this_reward) db.close() return return_val -@app.patch('/api/v1/gauntletrewards/{gauntletreward_id}') +@app.patch("/api/v1/gauntletrewards/{gauntletreward_id}") async def v1_gauntletreward_patch( - gauntletreward_id, name: Optional[str] = None, gauntlet_id: Optional[int] = None, - reward_id: Optional[int] = None, win_num: Optional[int] = None, loss_max: Optional[int] = None, - token: str = Depends(oauth2_scheme)): + gauntletreward_id, + name: Optional[str] = None, + gauntlet_id: Optional[int] = None, + reward_id: Optional[int] = None, + win_num: Optional[int] = None, + loss_max: Optional[int] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch gauntlet rewards. This event has been logged.' + detail="You are not authorized to patch gauntlet rewards. This event has been logged.", ) this_reward = GauntletReward.get_or_none(GauntletReward.id == gauntletreward_id) if this_reward is None: db.close() - raise KeyError(f'Gauntlet Reward ID {gauntletreward_id} not found') + raise KeyError(f"Gauntlet Reward ID {gauntletreward_id} not found") if gauntlet_id is not None: this_reward.gauntlet_id = gauntlet_id @@ -4956,17 +6098,19 @@ async def v1_gauntletreward_patch( return r_curr else: db.close() - raise DatabaseError(f'Unable to patch gauntlet reward {gauntletreward_id}') + raise DatabaseError(f"Unable to patch gauntlet reward {gauntletreward_id}") -@app.post('/api/v1/gauntletrewards') -async def v1_gauntletreward_post(gauntletreward: GauntletRewardList, token: str = Depends(oauth2_scheme)): +@app.post("/api/v1/gauntletrewards") +async def v1_gauntletreward_post( + gauntletreward: GauntletRewardList, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post gauntlets. This event has been logged.' + detail="You are not authorized to post gauntlets. This event has been logged.", ) all_rewards = [] @@ -4974,19 +6118,19 @@ async def v1_gauntletreward_post(gauntletreward: GauntletRewardList, token: str all_rewards.append(x.dict()) with db.atomic(): - for batch in chunked(all_rewards, 15): - GauntletReward.insert_many(batch).on_conflict_replace().execute() + # Use PostgreSQL-compatible upsert helper + upsert_gauntlet_rewards(all_rewards, batch_size=15) db.close() - return f'Inserted {len(all_rewards)} gauntlet rewards' + return f"Inserted {len(all_rewards)} gauntlet rewards" -@app.delete('/api/v1/gauntletrewards/{gauntletreward_id}') +@app.delete("/api/v1/gauntletrewards/{gauntletreward_id}") async def v1_gauntletreward_delete(gauntletreward_id): if GauntletReward.delete_by_id(gauntletreward_id) == 1: - return f'Deleted gauntlet reward ID {gauntletreward_id}' + return f"Deleted gauntlet reward ID {gauntletreward_id}" - raise DatabaseError(f'Unable to delete gauntlet run {gauntletreward_id}') + raise DatabaseError(f"Unable to delete gauntlet run {gauntletreward_id}") """ @@ -5000,17 +6144,28 @@ class GauntletRunModel(pydantic.BaseModel): wins: Optional[int] = 0 losses: Optional[int] = 0 gsheet: Optional[str] = None - created: Optional[int] = int(datetime.timestamp(datetime.now())*1000) + created: Optional[int] = int(datetime.timestamp(datetime.now()) * 1000) ended: Optional[int] = 0 -@app.get('/api/v1/gauntletruns') +@app.get("/api/v1/gauntletruns") async def v1_gauntletrun_get( - team_id: list = Query(default=None), wins: Optional[int] = None, wins_min: Optional[int] = None, - wins_max: Optional[int] = None, losses: Optional[int] = None, losses_min: Optional[int] = None, - losses_max: Optional[int] = None, gsheet: Optional[str] = None, created_after: Optional[int] = None, - created_before: Optional[int] = None, ended_after: Optional[int] = None, ended_before: Optional[int] = None, - is_active: Optional[bool] = None, gauntlet_id: list = Query(default=None), season: list = Query(default=None)): + team_id: list = Query(default=None), + wins: Optional[int] = None, + wins_min: Optional[int] = None, + wins_max: Optional[int] = None, + losses: Optional[int] = None, + losses_min: Optional[int] = None, + losses_max: Optional[int] = None, + gsheet: Optional[str] = None, + created_after: Optional[int] = None, + created_before: Optional[int] = None, + ended_after: Optional[int] = None, + ended_before: Optional[int] = None, + is_active: Optional[bool] = None, + gauntlet_id: list = Query(default=None), + season: list = Query(default=None), +): all_gauntlets = GauntletRun.select() if team_id is not None: @@ -5047,44 +6202,52 @@ async def v1_gauntletrun_get( if season is not None: all_gauntlets = all_gauntlets.where(GauntletRun.team.season << season) - return_val = {'count': all_gauntlets.count(), 'runs': []} + return_val = {"count": all_gauntlets.count(), "runs": []} for x in all_gauntlets: - return_val['runs'].append(model_to_dict(x)) + return_val["runs"].append(model_to_dict(x)) db.close() return return_val -@app.get('/api/v1/gauntletruns/{gauntletrun_id}') +@app.get("/api/v1/gauntletruns/{gauntletrun_id}") async def v1_gauntletrun_get_one(gauntletrun_id): try: this_gauntlet = GauntletRun.get_by_id(gauntletrun_id) except Exception: db.close() - raise HTTPException(status_code=404, detail=f'No gauntlet found with id {gauntletrun_id}') + raise HTTPException( + status_code=404, detail=f"No gauntlet found with id {gauntletrun_id}" + ) return_val = model_to_dict(this_gauntlet) db.close() return return_val -@app.patch('/api/v1/gauntletruns/{gauntletrun_id}') +@app.patch("/api/v1/gauntletruns/{gauntletrun_id}") async def v1_gauntletrun_patch( - gauntletrun_id, team_id: Optional[int] = None, wins: Optional[int] = None, losses: Optional[int] = None, - gsheet: Optional[str] = None, created: Optional[bool] = None, ended: Optional[bool] = None, - token: str = Depends(oauth2_scheme)): + gauntletrun_id, + team_id: Optional[int] = None, + wins: Optional[int] = None, + losses: Optional[int] = None, + gsheet: Optional[str] = None, + created: Optional[bool] = None, + ended: Optional[bool] = None, + token: str = Depends(oauth2_scheme), +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to patch gauntlet runs. This event has been logged.' + detail="You are not authorized to patch gauntlet runs. This event has been logged.", ) this_run = GauntletRun.get_or_none(GauntletRun.id == gauntletrun_id) if this_run is None: db.close() - raise KeyError(f'Gauntlet Run ID {gauntletrun_id} not found') + raise KeyError(f"Gauntlet Run ID {gauntletrun_id} not found") if team_id is not None: this_run.team_id = team_id @@ -5096,12 +6259,12 @@ async def v1_gauntletrun_patch( this_run.gsheet = gsheet if created is not None: if created is True: - this_run.created = int(datetime.timestamp(datetime.now())*1000) + this_run.created = int(datetime.timestamp(datetime.now()) * 1000) else: this_run.created = None if ended is not None: if ended is True: - this_run.ended = int(datetime.timestamp(datetime.now())*1000) + this_run.ended = int(datetime.timestamp(datetime.now()) * 1000) else: this_run.ended = 0 @@ -5111,17 +6274,19 @@ async def v1_gauntletrun_patch( return r_curr else: db.close() - raise DatabaseError(f'Unable to patch gauntlet run {gauntletrun_id}') + raise DatabaseError(f"Unable to patch gauntlet run {gauntletrun_id}") -@app.post('/api/v1/gauntletruns') -async def v1_gauntletrun_post(gauntletrun: GauntletRunModel, token: str = Depends(oauth2_scheme)): +@app.post("/api/v1/gauntletruns") +async def v1_gauntletrun_post( + gauntletrun: GauntletRunModel, token: str = Depends(oauth2_scheme) +): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post gauntlets. This event has been logged.' + detail="You are not authorized to post gauntlets. This event has been logged.", ) this_run = GauntletRun(**gauntletrun.dict()) @@ -5132,56 +6297,67 @@ async def v1_gauntletrun_post(gauntletrun: GauntletRunModel, token: str = Depend return r_run else: db.close() - raise DatabaseError(f'Unable to post gauntlet run') + raise DatabaseError(f"Unable to post gauntlet run") -@app.delete('/api/v1/gauntletruns/{gauntletrun_id}') +@app.delete("/api/v1/gauntletruns/{gauntletrun_id}") async def v1_gauntletrun_delete(gauntletrun_id): if GauntletRun.delete_by_id(gauntletrun_id) == 1: - return f'Deleted gauntlet run ID {gauntletrun_id}' + return f"Deleted gauntlet run ID {gauntletrun_id}" - raise DatabaseError(f'Unable to delete gauntlet run {gauntletrun_id}') + raise DatabaseError(f"Unable to delete gauntlet run {gauntletrun_id}") -@app.post('/api/v1/stl-fix') +@app.post("/api/v1/stl-fix") async def v1_stl_fix(token: str = Depends(oauth2_scheme)): if not valid_token(token): - logging.warning(f'Bad Token: {token}') + logging.warning(f"Bad Token: {token}") db.close() raise HTTPException( status_code=401, - detail='You are not authorized to post. This event has been logged.' + detail="You are not authorized to post. This event has been logged.", ) - p_query = Player.update(mlbclub='St Louis Cardinals', franchise='Cardinals').where( - Player.mlbclub == 'St. Louis Cardinals' - ).execute() + p_query = ( + Player.update(mlbclub="St Louis Cardinals", franchise="Cardinals") + .where(Player.mlbclub == "St. Louis Cardinals") + .execute() + ) db.close() - return {'detail': f'Removed the period from St Louis'} + return {"detail": f"Removed the period from St Louis"} -@app.get('/api/v1/ratings/batter/{sba_id}') +@app.get("/api/v1/ratings/batter/{sba_id}") async def v1_ratings_batter_get( - sba_id: int, cardset_name: Optional[str] = None, vs_hand: Literal["l", "r", "L", "R"] = None): + sba_id: int, + cardset_name: Optional[str] = None, + vs_hand: Literal["l", "r", "L", "R"] = None, +): this_player = ScoutPlayer.get_or_none(ScoutPlayer.sba_id == sba_id) - logging.info(f'num players: {ScoutPlayer.select().count()}') + logging.info(f"num players: {ScoutPlayer.select().count()}") if this_player is None: scout_db.close() - raise HTTPException(status_code=404, detail=f'SBa ID {sba_id} not found') + raise HTTPException(status_code=404, detail=f"SBa ID {sba_id} not found") r_query = BatterRatings.select().where( (BatterRatings.player == this_player) & (BatterRatings.is_prep == 0) ) if cardset_name is not None: - this_cardset = ScoutCardset.get_or_none(fn.Lower(ScoutCardset.set_title) == cardset_name.lower()) + this_cardset = ScoutCardset.get_or_none( + fn.Lower(ScoutCardset.set_title) == cardset_name.lower() + ) r_query = r_query.where(BatterRatings.cardset == this_cardset) if vs_hand is not None: - r_query = r_query.where(fn.Lower(BatterRatings.vs_hand) == f'v{vs_hand.lower()}') + r_query = r_query.where( + fn.Lower(BatterRatings.vs_hand) == f"v{vs_hand.lower()}" + ) - return_val = {'count': r_query.count(), 'ratings': [model_to_dict(x) for x in r_query]} + return_val = { + "count": r_query.count(), + "ratings": [model_to_dict(x) for x in r_query], + } scout_db.close() return return_val - diff --git a/scripts/audit_results.json b/scripts/audit_results.json new file mode 100644 index 0000000..cce2295 --- /dev/null +++ b/scripts/audit_results.json @@ -0,0 +1,92 @@ +{ + "generated_at": "2026-01-25T23:04:23.049271", + "summary": { + "total_tables": 29, + "total_records": 658963, + "total_issues": 3, + "critical_issues": 0, + "high_issues": 3, + "medium_issues": 0 + }, + "table_counts": { + "award": 0, + "battingcard": 6036, + "battingcardratings": 12072, + "battingstat": 50228, + "card": 61746, + "cardposition": 18654, + "cardset": 29, + "current": 1, + "decision": 29694, + "event": 9, + "gamerewards": 10, + "gauntletreward": 45, + "gauntletrun": 458, + "mlbplayer": 4781, + "notification": 14362, + "pack": 20595, + "packtype": 9, + "paperdex": 46121, + "pitchingcard": 6887, + "pitchingcardratings": 13774, + "pitchingstat": 13135, + "player": 12964, + "rarity": 6, + "result": 2235, + "reward": 8043, + "roster": 23, + "stratgame": 4208, + "stratplay": 332737, + "team": 101 + }, + "issues": [ + { + "type": "ORPHANED_FK", + "severity": "HIGH", + "child_table": "battingstat", + "child_field": "card_id", + "parent_table": "card", + "parent_field": "id", + "description": "Batting stats referencing non-existent cards", + "orphan_count": 1953, + "sample_orphan_ids": [ + 1419, + 1419, + 1419, + 1419, + 1433 + ] + }, + { + "type": "ORPHANED_FK", + "severity": "HIGH", + "child_table": "pitchingstat", + "child_field": "card_id", + "parent_table": "card", + "parent_field": "id", + "description": "Pitching stats referencing non-existent cards", + "orphan_count": 437, + "sample_orphan_ids": [ + 1412, + 1660, + 2045, + 2046, + 2061 + ] + }, + { + "type": "VARCHAR_TOO_LONG", + "severity": "HIGH", + "table": "team", + "field": "abbrev", + "description": "Team abbreviation", + "max_found": 13, + "expected_max": 10, + "sample_values": [ + "Gauntlet-KC...", + "Gauntlet-NCB...", + "Gauntlet-SLV..." + ] + } + ] +} \ No newline at end of file diff --git a/scripts/audit_sqlite.py b/scripts/audit_sqlite.py new file mode 100755 index 0000000..4ae6ecb --- /dev/null +++ b/scripts/audit_sqlite.py @@ -0,0 +1,564 @@ +#!/usr/bin/env python3 +""" +Paper Dynasty SQLite Data Integrity Audit + +Pre-migration script to identify potential issues before migrating to PostgreSQL. +Based on issues discovered during Major Domo migration (August 2025). + +Checks for: +1. NULL values in fields that will be NOT NULL in PostgreSQL +2. Orphaned foreign key records +3. VARCHAR field max lengths (PostgreSQL is stricter) +4. Record counts for baseline comparison +5. Primary key gaps or duplicates + +Usage: + python scripts/audit_sqlite.py + python scripts/audit_sqlite.py --fix # Apply safe fixes +""" + +import argparse +import json +import sqlite3 +import sys +from datetime import datetime +from pathlib import Path + + +def connect_db(db_path: str) -> sqlite3.Connection: + """Connect to SQLite database.""" + conn = sqlite3.connect(db_path) + conn.row_factory = sqlite3.Row + return conn + + +def get_table_record_counts(conn: sqlite3.Connection) -> dict: + """Get record counts for all tables.""" + counts = {} + cursor = conn.execute( + "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" + ) + for row in cursor: + table_name = row["name"] + count_cursor = conn.execute(f"SELECT COUNT(*) FROM {table_name}") + counts[table_name] = count_cursor.fetchone()[0] + return counts + + +def check_null_values(conn: sqlite3.Connection) -> list: + """ + Check for NULL values in fields that should not be null. + + These are the fields that Major Domo found issues with. + """ + issues = [] + + # Fields to check - based on Major Domo experience + null_checks = [ + # (table, field, description) + ("team", "abbrev", "Team abbreviation"), + ("team", "sname", "Team short name"), + ("team", "lname", "Team long name"), + ("player", "p_name", "Player name"), + ("player", "image", "Player image URL"), + ("card", "player_id", "Card player reference"), + ("stratplay", "game_id", "Play game reference"), + ("stratplay", "pitcher_id", "Play pitcher reference"), + ("decision", "game_id", "Decision game reference"), + ("decision", "pitcher_id", "Decision pitcher reference"), + ] + + for table, field, description in null_checks: + try: + cursor = conn.execute(f"SELECT COUNT(*) FROM {table} WHERE {field} IS NULL") + null_count = cursor.fetchone()[0] + if null_count > 0: + cursor = conn.execute(f"SELECT COUNT(*) FROM {table}") + total_count = cursor.fetchone()[0] + issues.append( + { + "type": "NULL_VALUE", + "severity": "HIGH" + if null_count > total_count * 0.1 + else "MEDIUM", + "table": table, + "field": field, + "description": description, + "null_count": null_count, + "total_count": total_count, + "percentage": round(null_count / total_count * 100, 2) + if total_count > 0 + else 0, + } + ) + except sqlite3.OperationalError: + # Table or column doesn't exist + pass + + return issues + + +def check_orphaned_foreign_keys(conn: sqlite3.Connection) -> list: + """ + Check for orphaned foreign key records. + + These will fail with foreign key constraint violations in PostgreSQL. + """ + issues = [] + + # Foreign key relationships to check + fk_checks = [ + # (child_table, child_field, parent_table, parent_field, description) + ( + "card", + "player_id", + "player", + "player_id", + "Cards referencing non-existent players", + ), + ("card", "team_id", "team", "id", "Cards referencing non-existent teams"), + ( + "stratplay", + "game_id", + "stratgame", + "id", + "Plays referencing non-existent games", + ), + ( + "stratplay", + "batter_id", + "player", + "player_id", + "Plays referencing non-existent batters", + ), + ( + "stratplay", + "pitcher_id", + "player", + "player_id", + "Plays referencing non-existent pitchers", + ), + ( + "decision", + "game_id", + "stratgame", + "id", + "Decisions referencing non-existent games", + ), + ( + "decision", + "pitcher_id", + "player", + "player_id", + "Decisions referencing non-existent pitchers", + ), + ( + "battingstat", + "card_id", + "card", + "id", + "Batting stats referencing non-existent cards", + ), + ( + "pitchingstat", + "card_id", + "card", + "id", + "Pitching stats referencing non-existent cards", + ), + ( + "battingcard", + "player_id", + "player", + "player_id", + "Batting cards referencing non-existent players", + ), + ( + "pitchingcard", + "player_id", + "player", + "player_id", + "Pitching cards referencing non-existent players", + ), + ( + "cardposition", + "player_id", + "player", + "player_id", + "Card positions referencing non-existent players", + ), + ( + "paperdex", + "player_id", + "player", + "player_id", + "Paperdex entries referencing non-existent players", + ), + ( + "paperdex", + "team_id", + "team", + "id", + "Paperdex entries referencing non-existent teams", + ), + ( + "gauntletrun", + "team_id", + "team", + "id", + "Gauntlet runs referencing non-existent teams", + ), + ] + + for child_table, child_field, parent_table, parent_field, description in fk_checks: + try: + # Use explicit column names to avoid ambiguity + query = f""" + SELECT COUNT(*) + FROM {child_table} c + LEFT JOIN {parent_table} p ON c.{child_field} = p.{parent_field} + WHERE c.{child_field} IS NOT NULL AND p.{parent_field} IS NULL + """ + cursor = conn.execute(query) + orphan_count = cursor.fetchone()[0] + + if orphan_count > 0: + # Get sample orphaned IDs + sample_query = f""" + SELECT c.{child_field} + FROM {child_table} c + LEFT JOIN {parent_table} p ON c.{child_field} = p.{parent_field} + WHERE c.{child_field} IS NOT NULL AND p.{parent_field} IS NULL + LIMIT 5 + """ + sample_cursor = conn.execute(sample_query) + sample_ids = [row[0] for row in sample_cursor.fetchall()] + + issues.append( + { + "type": "ORPHANED_FK", + "severity": "HIGH", + "child_table": child_table, + "child_field": child_field, + "parent_table": parent_table, + "parent_field": parent_field, + "description": description, + "orphan_count": orphan_count, + "sample_orphan_ids": sample_ids, + } + ) + except sqlite3.OperationalError as e: + # Table or column doesn't exist + print( + f"Warning: Could not check {child_table}.{child_field} -> {parent_table}.{parent_field}: {e}" + ) + + return issues + + +def check_varchar_lengths(conn: sqlite3.Connection) -> list: + """ + Check max lengths of string fields. + + PostgreSQL VARCHAR fields have stricter length limits than SQLite. + """ + issues = [] + + # Fields to check with expected max lengths + varchar_checks = [ + # (table, field, expected_max_length, description) + ("player", "p_name", 255, "Player name"), + ("player", "image", 1000, "Player image URL"), + ("player", "image2", 1000, "Player image2 URL"), + ("player", "headshot", 500, "Player headshot URL"), + ("player", "vanity_card", 500, "Player vanity card"), + ("player", "strat_code", 100, "Strat code"), + ("player", "bbref_id", 50, "Baseball Reference ID"), + ("player", "description", 1000, "Player description"), + ("team", "abbrev", 10, "Team abbreviation"), + ("team", "sname", 100, "Team short name"), + ("team", "lname", 255, "Team long name"), + ("notification", "title", 255, "Notification title"), + ("notification", "message", 2000, "Notification message"), + ] + + for table, field, expected_max, description in varchar_checks: + try: + cursor = conn.execute(f"SELECT MAX(LENGTH({field})) FROM {table}") + max_length = cursor.fetchone()[0] + + if max_length and max_length > expected_max: + # Get sample of long values + sample_cursor = conn.execute( + f"SELECT {field} FROM {table} WHERE LENGTH({field}) > {expected_max} LIMIT 3" + ) + samples = [ + row[0][:100] + "..." if row[0] else None + for row in sample_cursor.fetchall() + ] + + issues.append( + { + "type": "VARCHAR_TOO_LONG", + "severity": "HIGH", + "table": table, + "field": field, + "description": description, + "max_found": max_length, + "expected_max": expected_max, + "sample_values": samples, + } + ) + elif max_length: + # Info: report actual max for reference + pass + except sqlite3.OperationalError: + pass + + return issues + + +def check_duplicate_primary_keys(conn: sqlite3.Connection) -> list: + """ + Check for duplicate primary keys (shouldn't happen but good to verify). + """ + issues = [] + + pk_checks = [ + ("player", "player_id"), + ("team", "id"), + ("card", "id"), + ("stratgame", "id"), + ("stratplay", "id"), + ] + + for table, pk_field in pk_checks: + try: + cursor = conn.execute(f""" + SELECT {pk_field}, COUNT(*) as cnt + FROM {table} + GROUP BY {pk_field} + HAVING COUNT(*) > 1 + """) + duplicates = cursor.fetchall() + + if duplicates: + issues.append( + { + "type": "DUPLICATE_PK", + "severity": "CRITICAL", + "table": table, + "pk_field": pk_field, + "duplicate_ids": [row[0] for row in duplicates[:10]], + "duplicate_count": len(duplicates), + } + ) + except sqlite3.OperationalError: + pass + + return issues + + +def check_unique_constraints(conn: sqlite3.Connection) -> list: + """ + Check that composite unique constraints would be satisfied. + These are the indexes that on_conflict_replace() depends on. + """ + issues = [] + + unique_checks = [ + # (table, fields, description) + ("battingcard", ["player_id", "variant"], "Batting card unique constraint"), + ("pitchingcard", ["player_id", "variant"], "Pitching card unique constraint"), + ( + "cardposition", + ["player_id", "variant", "position"], + "Card position unique constraint", + ), + ( + "battingcardratings", + ["battingcard_id", "vs_hand"], + "Batting card ratings unique constraint", + ), + ( + "pitchingcardratings", + ["pitchingcard_id", "vs_hand"], + "Pitching card ratings unique constraint", + ), + ] + + for table, fields, description in unique_checks: + try: + fields_str = ", ".join(fields) + cursor = conn.execute(f""" + SELECT {fields_str}, COUNT(*) as cnt + FROM {table} + GROUP BY {fields_str} + HAVING COUNT(*) > 1 + """) + duplicates = cursor.fetchall() + + if duplicates: + issues.append( + { + "type": "DUPLICATE_UNIQUE", + "severity": "HIGH", + "table": table, + "fields": fields, + "description": description, + "duplicate_count": len(duplicates), + "sample_duplicates": [ + dict(zip(fields + ["count"], row)) for row in duplicates[:5] + ], + } + ) + except sqlite3.OperationalError as e: + print(f"Warning: Could not check unique constraint on {table}: {e}") + + return issues + + +def generate_report(counts: dict, issues: list, output_path: str = None) -> str: + """Generate audit report.""" + report = { + "generated_at": datetime.now().isoformat(), + "summary": { + "total_tables": len(counts), + "total_records": sum(counts.values()), + "total_issues": len(issues), + "critical_issues": len( + [i for i in issues if i.get("severity") == "CRITICAL"] + ), + "high_issues": len([i for i in issues if i.get("severity") == "HIGH"]), + "medium_issues": len([i for i in issues if i.get("severity") == "MEDIUM"]), + }, + "table_counts": counts, + "issues": issues, + } + + if output_path: + with open(output_path, "w") as f: + json.dump(report, f, indent=2) + + return json.dumps(report, indent=2) + + +def main(): + parser = argparse.ArgumentParser( + description="Audit SQLite database before PostgreSQL migration" + ) + parser.add_argument( + "--db-path", + type=str, + default="storage/pd_master.db", + help="Path to SQLite database", + ) + parser.add_argument("--output", type=str, help="Output JSON file for report") + parser.add_argument( + "--fix", action="store_true", help="Apply safe fixes (not implemented)" + ) + args = parser.parse_args() + + print("=" * 60) + print("Paper Dynasty SQLite Data Integrity Audit") + print("=" * 60) + + if not Path(args.db_path).exists(): + print(f"ERROR: Database not found: {args.db_path}") + sys.exit(1) + + conn = connect_db(args.db_path) + + # Run checks + print("\n1. Getting table record counts...") + counts = get_table_record_counts(conn) + print(f" Found {len(counts)} tables with {sum(counts.values()):,} total records") + + print("\n2. Checking for NULL values...") + null_issues = check_null_values(conn) + print(f" Found {len(null_issues)} NULL value issues") + + print("\n3. Checking for orphaned foreign keys...") + fk_issues = check_orphaned_foreign_keys(conn) + print(f" Found {len(fk_issues)} orphaned FK issues") + + print("\n4. Checking VARCHAR lengths...") + varchar_issues = check_varchar_lengths(conn) + print(f" Found {len(varchar_issues)} VARCHAR length issues") + + print("\n5. Checking for duplicate primary keys...") + pk_issues = check_duplicate_primary_keys(conn) + print(f" Found {len(pk_issues)} duplicate PK issues") + + print("\n6. Checking unique constraints...") + unique_issues = check_unique_constraints(conn) + print(f" Found {len(unique_issues)} unique constraint issues") + + # Combine all issues + all_issues = null_issues + fk_issues + varchar_issues + pk_issues + unique_issues + + # Generate report + print("\n" + "=" * 60) + print("AUDIT RESULTS") + print("=" * 60) + + if args.output: + report = generate_report(counts, all_issues, args.output) + print(f"Full report saved to: {args.output}") + else: + report = generate_report(counts, all_issues) + + # Print summary + print(f"\nTotal Issues: {len(all_issues)}") + critical = [i for i in all_issues if i.get("severity") == "CRITICAL"] + high = [i for i in all_issues if i.get("severity") == "HIGH"] + medium = [i for i in all_issues if i.get("severity") == "MEDIUM"] + + if critical: + print(f"\n CRITICAL ({len(critical)}):") + for issue in critical: + print( + f" - {issue['type']}: {issue.get('description', issue.get('table', 'Unknown'))}" + ) + + if high: + print(f"\n HIGH ({len(high)}):") + for issue in high: + desc = issue.get( + "description", + f"{issue.get('table', 'Unknown')}.{issue.get('field', 'Unknown')}", + ) + print(f" - {issue['type']}: {desc}") + + if medium: + print(f"\n MEDIUM ({len(medium)}):") + for issue in medium: + desc = issue.get( + "description", + f"{issue.get('table', 'Unknown')}.{issue.get('field', 'Unknown')}", + ) + print(f" - {issue['type']}: {desc}") + + # Table counts + print("\n" + "-" * 60) + print("TABLE RECORD COUNTS (for baseline comparison)") + print("-" * 60) + for table, count in sorted(counts.items()): + print(f" {table:30} {count:>10,}") + + conn.close() + + # Exit code based on issues + if critical: + print("\n CRITICAL ISSUES FOUND - Migration may fail!") + sys.exit(2) + elif high: + print("\n HIGH PRIORITY ISSUES FOUND - Review before migration") + sys.exit(1) + else: + print("\n No critical issues found - Ready for migration") + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/scripts/migrate_to_postgres.py b/scripts/migrate_to_postgres.py new file mode 100755 index 0000000..8e880c0 --- /dev/null +++ b/scripts/migrate_to_postgres.py @@ -0,0 +1,510 @@ +#!/usr/bin/env python3 +""" +Paper Dynasty SQLite to PostgreSQL Migration Script + +CRITICAL: This script preserves primary key IDs exactly as they exist in SQLite. +Failing to preserve IDs will cause all foreign key references to break. + +Usage: + # Dry run (validate only, no changes) + python scripts/migrate_to_postgres.py --dry-run + + # Full migration + python scripts/migrate_to_postgres.py + + # Migrate specific table only + python scripts/migrate_to_postgres.py --table player + +Environment Variables Required: + POSTGRES_HOST, POSTGRES_DB, POSTGRES_USER, POSTGRES_PASSWORD, POSTGRES_PORT + +Based on lessons learned from Major Domo PostgreSQL migration (August 2025). +""" + +import argparse +import logging +import os +import sqlite3 +import sys +from datetime import datetime +from typing import Any, Dict, List, Optional, Tuple + +import psycopg2 +from psycopg2.extras import execute_values + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(levelname)s - %(message)s", + handlers=[ + logging.StreamHandler(sys.stdout), + logging.FileHandler( + f"logs/migration_{datetime.now().strftime('%Y%m%d_%H%M%S')}.log" + ), + ], +) +logger = logging.getLogger(__name__) + +# Migration order - tables with no FK dependencies first, then dependent tables +# This ensures parent records exist before children are inserted +MIGRATION_ORDER = [ + # Tier 1: No foreign key dependencies + "current", + "rarity", + "event", + "packtype", + "notification", + # Tier 2: Simple FK dependencies (single level) + "cardset", # -> event + "mlbplayer", # no FKs + "gamerewards", # -> packtype, player (but player not created yet, so nullable) + # Tier 3: Core entity tables + "team", # -> event + "player", # -> cardset, rarity, mlbplayer + # Tier 4: Dependent on core entities + "pack", # -> team, packtype, cardset + "card", # -> player, team, pack + "roster", # -> team, card (x26) + "result", # -> team (x2) + "stratgame", # -> team (x2) + # Tier 5: Statistics and game data + "battingstat", # -> card, team, result + "pitchingstat", # -> card, team, result + "stratplay", # -> stratgame, player (many), team (many) + "decision", # -> stratgame, player, team + # Tier 6: Card detail tables + "battingcard", # -> player + "battingcardratings", # -> battingcard + "pitchingcard", # -> player + "pitchingcardratings", # -> pitchingcard + "cardposition", # -> player + # Tier 7: Other dependent tables + "award", # -> card, team + "paperdex", # -> team, player + "reward", # -> team + "gauntletreward", # -> event, gamerewards + "gauntletrun", # -> team, event +] + +# Tables with explicit primary keys (not auto-increment) +EXPLICIT_PK_TABLES = { + "player": "player_id", # Uses player_id as explicit PK +} + +# All other tables use 'id' as auto-increment PK + + +def get_sqlite_connection(db_path: str) -> sqlite3.Connection: + """Connect to SQLite database.""" + if not os.path.exists(db_path): + raise FileNotFoundError(f"SQLite database not found: {db_path}") + + conn = sqlite3.connect(db_path) + conn.row_factory = sqlite3.Row + return conn + + +def get_postgres_connection() -> psycopg2.extensions.connection: + """Connect to PostgreSQL database using environment variables.""" + required_vars = [ + "POSTGRES_HOST", + "POSTGRES_DB", + "POSTGRES_USER", + "POSTGRES_PASSWORD", + ] + missing = [v for v in required_vars if not os.environ.get(v)] + if missing: + raise EnvironmentError(f"Missing required environment variables: {missing}") + + return psycopg2.connect( + host=os.environ["POSTGRES_HOST"], + database=os.environ["POSTGRES_DB"], + user=os.environ["POSTGRES_USER"], + password=os.environ["POSTGRES_PASSWORD"], + port=int(os.environ.get("POSTGRES_PORT", "5432")), + ) + + +def get_table_columns(sqlite_conn: sqlite3.Connection, table_name: str) -> List[str]: + """Get column names for a table from SQLite.""" + cursor = sqlite_conn.execute(f"PRAGMA table_info({table_name})") + return [row["name"] for row in cursor.fetchall()] + + +def get_primary_key_column(table_name: str) -> str: + """Get the primary key column name for a table.""" + return EXPLICIT_PK_TABLES.get(table_name, "id") + + +def get_sequence_name(table_name: str, pk_column: str) -> str: + """Get the PostgreSQL sequence name for a table's primary key.""" + return f"{table_name}_{pk_column}_seq" + + +def get_record_count(conn, table_name: str, is_sqlite: bool = True) -> int: + """Get record count for a table.""" + if is_sqlite: + cursor = conn.execute(f"SELECT COUNT(*) FROM {table_name}") + return cursor.fetchone()[0] + else: + cursor = conn.cursor() + cursor.execute(f"SELECT COUNT(*) FROM {table_name}") + return cursor.fetchone()[0] + + +def migrate_table( + sqlite_conn: sqlite3.Connection, + pg_conn: psycopg2.extensions.connection, + table_name: str, + batch_size: int = 500, + dry_run: bool = False, +) -> Dict[str, Any]: + """ + Migrate a single table from SQLite to PostgreSQL. + + CRITICAL: Preserves primary key IDs exactly. + + Returns: + Dict with migration statistics + """ + stats = { + "table": table_name, + "sqlite_count": 0, + "postgres_count": 0, + "inserted": 0, + "skipped": 0, + "errors": [], + "success": False, + } + + try: + # Get column info + columns = get_table_columns(sqlite_conn, table_name) + pk_column = get_primary_key_column(table_name) + + # Count source records + stats["sqlite_count"] = get_record_count( + sqlite_conn, table_name, is_sqlite=True + ) + logger.info(f"Table {table_name}: {stats['sqlite_count']} records to migrate") + + if stats["sqlite_count"] == 0: + logger.info(f"Table {table_name}: No records to migrate") + stats["success"] = True + return stats + + if dry_run: + logger.info( + f"[DRY RUN] Would migrate {stats['sqlite_count']} records from {table_name}" + ) + stats["success"] = True + return stats + + # Read all records from SQLite + cursor = sqlite_conn.execute(f"SELECT * FROM {table_name}") + rows = cursor.fetchall() + + # Prepare PostgreSQL insert + pg_cursor = pg_conn.cursor() + + # Build column list string + columns_str = ", ".join(columns) + placeholders = ", ".join(["%s"] * len(columns)) + + # Process in batches + for i in range(0, len(rows), batch_size): + batch = rows[i : i + batch_size] + batch_values = [] + + for row in batch: + # Convert sqlite3.Row to tuple, preserving all values including ID + values = tuple(row[col] for col in columns) + batch_values.append(values) + + try: + # Use execute_values for efficient batch insert + insert_sql = f"INSERT INTO {table_name} ({columns_str}) VALUES %s" + execute_values(pg_cursor, insert_sql, batch_values) + stats["inserted"] += len(batch) + + except psycopg2.errors.ForeignKeyViolation as e: + # Foreign key error - fall back to individual inserts + logger.warning( + f"FK violation in batch, falling back to individual inserts: {e}" + ) + pg_conn.rollback() + + for values in batch_values: + try: + pg_cursor.execute( + f"INSERT INTO {table_name} ({columns_str}) VALUES ({placeholders})", + values, + ) + stats["inserted"] += 1 + except psycopg2.errors.ForeignKeyViolation as e: + stats["skipped"] += 1 + # Extract ID for logging + pk_idx = columns.index(pk_column) if pk_column in columns else 0 + record_id = values[pk_idx] + stats["errors"].append( + { + "id": record_id, + "error": "ForeignKeyViolation", + "message": str(e), + } + ) + logger.warning( + f"Skipped orphaned record {table_name}.{pk_column}={record_id}" + ) + pg_conn.rollback() + except Exception as e: + stats["skipped"] += 1 + pk_idx = columns.index(pk_column) if pk_column in columns else 0 + record_id = values[pk_idx] + stats["errors"].append( + { + "id": record_id, + "error": type(e).__name__, + "message": str(e), + } + ) + logger.error( + f"Error inserting {table_name}.{pk_column}={record_id}: {e}" + ) + pg_conn.rollback() + + logger.info( + f"Table {table_name}: Processed {min(i + batch_size, len(rows))}/{len(rows)} records" + ) + + # Commit the transaction + pg_conn.commit() + + # CRITICAL: Reset the PostgreSQL sequence to MAX(id) + 1 + # Without this, new inserts will fail with duplicate key errors + sequence_name = get_sequence_name(table_name, pk_column) + try: + pg_cursor.execute(f""" + SELECT setval('{sequence_name}', COALESCE((SELECT MAX({pk_column}) FROM {table_name}), 1), true) + """) + pg_conn.commit() + logger.info(f"Table {table_name}: Reset sequence {sequence_name}") + except psycopg2.errors.UndefinedTable as e: + # Sequence might not exist for explicit PK tables + logger.warning(f"Could not reset sequence {sequence_name}: {e}") + pg_conn.rollback() + + # Verify counts + stats["postgres_count"] = get_record_count(pg_conn, table_name, is_sqlite=False) + + if stats["postgres_count"] == stats["sqlite_count"]: + logger.info( + f"Table {table_name}: SUCCESS - {stats['postgres_count']} records migrated" + ) + stats["success"] = True + elif stats["postgres_count"] == stats["inserted"]: + logger.warning( + f"Table {table_name}: PARTIAL - {stats['inserted']} inserted, " + f"{stats['skipped']} skipped (orphaned FK records)" + ) + stats["success"] = ( + True # Partial success is acceptable for orphaned records + ) + else: + logger.error( + f"Table {table_name}: MISMATCH - SQLite: {stats['sqlite_count']}, " + f"PostgreSQL: {stats['postgres_count']}" + ) + stats["success"] = False + + except Exception as e: + logger.error(f"Table {table_name}: FAILED - {e}") + stats["errors"].append({"error": type(e).__name__, "message": str(e)}) + stats["success"] = False + pg_conn.rollback() + + return stats + + +def verify_id_preservation( + sqlite_conn: sqlite3.Connection, + pg_conn: psycopg2.extensions.connection, + sample_tables: List[str] = None, +) -> bool: + """ + Verify that primary key IDs were preserved correctly. + + This is a CRITICAL check - if IDs don't match, the migration has failed. + """ + if sample_tables is None: + sample_tables = ["player", "team", "card", "stratgame"] + + all_match = True + + for table_name in sample_tables: + pk_column = get_primary_key_column(table_name) + + # Get first and last 5 IDs from SQLite + sqlite_cursor = sqlite_conn.execute( + f"SELECT {pk_column} FROM {table_name} ORDER BY {pk_column} LIMIT 5" + ) + sqlite_first = [row[0] for row in sqlite_cursor.fetchall()] + + sqlite_cursor = sqlite_conn.execute( + f"SELECT {pk_column} FROM {table_name} ORDER BY {pk_column} DESC LIMIT 5" + ) + sqlite_last = [row[0] for row in sqlite_cursor.fetchall()] + + # Get same IDs from PostgreSQL + pg_cursor = pg_conn.cursor() + pg_cursor.execute( + f"SELECT {pk_column} FROM {table_name} ORDER BY {pk_column} LIMIT 5" + ) + pg_first = [row[0] for row in pg_cursor.fetchall()] + + pg_cursor.execute( + f"SELECT {pk_column} FROM {table_name} ORDER BY {pk_column} DESC LIMIT 5" + ) + pg_last = [row[0] for row in pg_cursor.fetchall()] + + if sqlite_first == pg_first and sqlite_last == pg_last: + logger.info(f"ID Verification {table_name}: PASS - IDs match") + else: + logger.error( + f"ID Verification {table_name}: FAIL - " + f"SQLite first: {sqlite_first}, PG first: {pg_first}, " + f"SQLite last: {sqlite_last}, PG last: {pg_last}" + ) + all_match = False + + return all_match + + +def main(): + parser = argparse.ArgumentParser( + description="Migrate Paper Dynasty from SQLite to PostgreSQL" + ) + parser.add_argument( + "--dry-run", action="store_true", help="Validate without making changes" + ) + parser.add_argument("--table", type=str, help="Migrate only this table") + parser.add_argument( + "--sqlite-path", + type=str, + default="storage/pd_master.db", + help="Path to SQLite database", + ) + parser.add_argument( + "--batch-size", type=int, default=500, help="Batch size for inserts" + ) + parser.add_argument( + "--skip-verification", action="store_true", help="Skip ID verification" + ) + args = parser.parse_args() + + logger.info("=" * 60) + logger.info("Paper Dynasty SQLite to PostgreSQL Migration") + logger.info("=" * 60) + + if args.dry_run: + logger.info("DRY RUN MODE - No changes will be made") + + # Connect to databases + try: + sqlite_conn = get_sqlite_connection(args.sqlite_path) + logger.info(f"Connected to SQLite: {args.sqlite_path}") + except FileNotFoundError as e: + logger.error(str(e)) + sys.exit(1) + + try: + pg_conn = get_postgres_connection() + logger.info( + f"Connected to PostgreSQL: {os.environ['POSTGRES_HOST']}/{os.environ['POSTGRES_DB']}" + ) + except EnvironmentError as e: + logger.error(str(e)) + sys.exit(1) + except psycopg2.Error as e: + logger.error(f"PostgreSQL connection failed: {e}") + sys.exit(1) + + # Determine tables to migrate + tables_to_migrate = [args.table] if args.table else MIGRATION_ORDER + + # Validate tables exist + available_tables = set() + cursor = sqlite_conn.execute("SELECT name FROM sqlite_master WHERE type='table'") + for row in cursor: + available_tables.add(row[0]) + + for table in tables_to_migrate: + if table not in available_tables: + logger.warning(f"Table {table} not found in SQLite, skipping") + tables_to_migrate.remove(table) + + # Migration summary + results = [] + start_time = datetime.now() + + logger.info(f"Migrating {len(tables_to_migrate)} tables...") + logger.info("-" * 60) + + for table_name in tables_to_migrate: + stats = migrate_table( + sqlite_conn, + pg_conn, + table_name, + batch_size=args.batch_size, + dry_run=args.dry_run, + ) + results.append(stats) + logger.info("-" * 60) + + # Summary + elapsed = datetime.now() - start_time + successful = sum(1 for r in results if r["success"]) + total_records = sum(r["inserted"] for r in results) + total_skipped = sum(r["skipped"] for r in results) + + logger.info("=" * 60) + logger.info("MIGRATION SUMMARY") + logger.info("=" * 60) + logger.info(f"Tables: {successful}/{len(results)} successful") + logger.info(f"Records: {total_records} inserted, {total_skipped} skipped") + logger.info(f"Duration: {elapsed}") + + # Failed tables + failed = [r for r in results if not r["success"]] + if failed: + logger.error("FAILED TABLES:") + for r in failed: + logger.error(f" - {r['table']}: {r['errors']}") + + # ID Verification (CRITICAL) + if not args.dry_run and not args.skip_verification: + logger.info("-" * 60) + logger.info("VERIFYING ID PRESERVATION...") + if verify_id_preservation(sqlite_conn, pg_conn): + logger.info("ID VERIFICATION: PASS - All IDs preserved correctly") + else: + logger.error("ID VERIFICATION: FAIL - IDs do not match!") + logger.error( + "THIS IS A CRITICAL FAILURE - Foreign key references may be broken" + ) + sys.exit(1) + + # Close connections + sqlite_conn.close() + pg_conn.close() + + if all(r["success"] for r in results): + logger.info("MIGRATION COMPLETE - SUCCESS") + sys.exit(0) + else: + logger.warning("MIGRATION COMPLETE - PARTIAL SUCCESS (some tables failed)") + sys.exit(1) + + +if __name__ == "__main__": + main()