PostgreSQL migration: Complete code preparation phase
- Add db_helpers.py with cross-database upsert functions for SQLite/PostgreSQL - Replace 12 on_conflict_replace() calls with PostgreSQL-compatible upserts - Add unique indexes: StratPlay(game, play_num), Decision(game, pitcher) - Add max_length to Team model fields (abbrev, sname, lname) - Fix boolean comparison in teams.py (== 0/1 to == False/True) - Create migrate_to_postgres.py with ID-preserving migration logic - Create audit_sqlite.py for pre-migration data integrity checks - Add PROJECT_PLAN.json for migration tracking - Add .secrets/ to .gitignore for credentials Audit results: 658,963 records across 29 tables, 2,390 orphaned stats (expected) Based on Major Domo migration lessons learned (33 issues resolved there)
This commit is contained in:
parent
fbe8623eb4
commit
0cba52cea5
5
.gitignore
vendored
5
.gitignore
vendored
@ -74,4 +74,7 @@ CLAUDE.md
|
|||||||
*.backup
|
*.backup
|
||||||
|
|
||||||
# PostgreSQL data directory (local mount)
|
# PostgreSQL data directory (local mount)
|
||||||
postgres_data/
|
postgres_data/
|
||||||
|
|
||||||
|
# PostgreSQL credentials
|
||||||
|
.secrets/
|
||||||
|
|||||||
482
PROJECT_PLAN.json
Normal file
482
PROJECT_PLAN.json
Normal file
@ -0,0 +1,482 @@
|
|||||||
|
{
|
||||||
|
"meta": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"created": "2026-01-25",
|
||||||
|
"lastUpdated": "2026-01-25",
|
||||||
|
"planType": "migration",
|
||||||
|
"description": "SQLite to PostgreSQL migration for Paper Dynasty database API",
|
||||||
|
"branch": "postgres-migration",
|
||||||
|
"totalEstimatedHours": 22,
|
||||||
|
"totalTasks": 16,
|
||||||
|
"completedTasks": 13
|
||||||
|
},
|
||||||
|
"context": {
|
||||||
|
"sourceDatabase": {
|
||||||
|
"type": "SQLite",
|
||||||
|
"file": "storage/pd_master.db",
|
||||||
|
"size": "110 MB",
|
||||||
|
"tables": 29,
|
||||||
|
"totalRecords": 515000,
|
||||||
|
"largestTable": {
|
||||||
|
"name": "stratplay",
|
||||||
|
"records": 332737
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targetDatabase": {
|
||||||
|
"type": "PostgreSQL 17",
|
||||||
|
"server": "sba_postgres (same server as Major Domo)",
|
||||||
|
"database": "pd_master",
|
||||||
|
"user": "pd_admin",
|
||||||
|
"credentialsFile": ".secrets/pd_admin_credentials.txt"
|
||||||
|
},
|
||||||
|
"lessonsFromMajorDomo": [
|
||||||
|
"CRITICAL: Primary key IDs must be explicitly preserved during migration",
|
||||||
|
"PostgreSQL GROUP BY requires ALL non-aggregated columns",
|
||||||
|
"Boolean fields cannot be summed directly - cast to integer first",
|
||||||
|
"Discord snowflake IDs must be strings, not integers (N/A for Paper Dynasty)",
|
||||||
|
"VARCHAR fields need explicit max_length",
|
||||||
|
"NULL constraints are stricter in PostgreSQL",
|
||||||
|
"Foreign key orphaned records need smart fallback handling",
|
||||||
|
"Reset sequences after ID-preserving inserts"
|
||||||
|
],
|
||||||
|
"devServer": {
|
||||||
|
"access": "ssh sba-db",
|
||||||
|
"composeLocation": "cd container-data/dev-sba-database/"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"categories": {
|
||||||
|
"critical": "Must complete before migration - blocks production",
|
||||||
|
"high": "Required for successful migration",
|
||||||
|
"medium": "Improves migration quality/reliability",
|
||||||
|
"low": "Polish and nice-to-have",
|
||||||
|
"completed": "Already done on postgres-migration branch"
|
||||||
|
},
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"id": "MIG-001",
|
||||||
|
"name": "Environment-based database configuration",
|
||||||
|
"description": "Add PostgreSQL support with environment variable switching between SQLite/PostgreSQL",
|
||||||
|
"category": "completed",
|
||||||
|
"priority": 1,
|
||||||
|
"completed": true,
|
||||||
|
"tested": true,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "app/db_engine.py",
|
||||||
|
"lines": [11, 35],
|
||||||
|
"issue": "Now supports DATABASE_TYPE env var for SQLite/PostgreSQL switching"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Already implemented with PooledPostgresqlDatabase",
|
||||||
|
"estimatedHours": 2,
|
||||||
|
"notes": "Includes connection pooling (20 max, 5-min stale timeout, autorollback)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-002",
|
||||||
|
"name": "Add table_name to all models",
|
||||||
|
"description": "Explicit table naming for PostgreSQL compatibility",
|
||||||
|
"category": "completed",
|
||||||
|
"priority": 2,
|
||||||
|
"completed": true,
|
||||||
|
"tested": true,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "app/db_engine.py",
|
||||||
|
"lines": [],
|
||||||
|
"issue": "All 29 models now have Meta.table_name defined"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Already implemented",
|
||||||
|
"estimatedHours": 1,
|
||||||
|
"notes": "Prevents Peewee naming inconsistencies"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-003",
|
||||||
|
"name": "Fix GROUP BY queries for PostgreSQL",
|
||||||
|
"description": "PostgreSQL requires all non-aggregated SELECT fields in GROUP BY clause",
|
||||||
|
"category": "completed",
|
||||||
|
"priority": 3,
|
||||||
|
"completed": true,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/stratplays.py",
|
||||||
|
"lines": [342, 456, 645, 733],
|
||||||
|
"issue": "Conditionally build SELECT fields based on group_by parameter"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Already implemented - needs testing with all group_by variations",
|
||||||
|
"estimatedHours": 4,
|
||||||
|
"notes": "Pattern: only include non-aggregated fields that will be in GROUP BY"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-004",
|
||||||
|
"name": "Add psycopg2-binary dependency",
|
||||||
|
"description": "PostgreSQL adapter for Python",
|
||||||
|
"category": "completed",
|
||||||
|
"priority": 4,
|
||||||
|
"completed": true,
|
||||||
|
"tested": true,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "requirements.txt",
|
||||||
|
"lines": [],
|
||||||
|
"issue": "psycopg2-binary added"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Already implemented",
|
||||||
|
"estimatedHours": 0.1,
|
||||||
|
"notes": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-005",
|
||||||
|
"name": "Docker Compose for local testing",
|
||||||
|
"description": "Local PostgreSQL environment for development testing",
|
||||||
|
"category": "completed",
|
||||||
|
"priority": 5,
|
||||||
|
"completed": true,
|
||||||
|
"tested": true,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "docker-compose.yml",
|
||||||
|
"lines": [],
|
||||||
|
"issue": "PostgreSQL 17 + Adminer configured"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "QUICK_START.md",
|
||||||
|
"lines": [],
|
||||||
|
"issue": "Testing guide created"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Already implemented",
|
||||||
|
"estimatedHours": 1,
|
||||||
|
"notes": "Adminer on port 8081"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-006",
|
||||||
|
"name": "Migration script auto-detection",
|
||||||
|
"description": "db_migrations.py auto-selects PostgresqlMigrator or SqliteMigrator",
|
||||||
|
"category": "completed",
|
||||||
|
"priority": 6,
|
||||||
|
"completed": true,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": ["MIG-001"],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "db_migrations.py",
|
||||||
|
"lines": [],
|
||||||
|
"issue": "Migrator selection based on DATABASE_TYPE"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Already implemented",
|
||||||
|
"estimatedHours": 0.5,
|
||||||
|
"notes": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-007",
|
||||||
|
"name": "Create data migration script with ID preservation",
|
||||||
|
"description": "CRITICAL: Migrate all data from SQLite to PostgreSQL while preserving primary key IDs exactly",
|
||||||
|
"category": "critical",
|
||||||
|
"priority": 1,
|
||||||
|
"completed": false,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": ["MIG-001", "MIG-002"],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "scripts/migrate_to_postgres.py",
|
||||||
|
"lines": [],
|
||||||
|
"issue": "New file - must explicitly insert IDs and reset sequences"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "1. Read all records from SQLite\n2. Insert into PostgreSQL with explicit ID values\n3. Reset PostgreSQL sequences: SELECT setval('table_id_seq', MAX(id))\n4. Validate record counts match\n5. Smart FK error handling (batch insert with individual fallback)",
|
||||||
|
"estimatedHours": 3,
|
||||||
|
"notes": "Major Domo's #1 lesson: Without explicit ID preservation, PostgreSQL auto-assigns sequential IDs starting from 1, causing all FK references to point to wrong records"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-008",
|
||||||
|
"name": "Fix on_conflict_replace() calls (Player model)",
|
||||||
|
"description": "Convert SQLite on_conflict_replace() to PostgreSQL on_conflict() for Player model",
|
||||||
|
"category": "critical",
|
||||||
|
"priority": 2,
|
||||||
|
"completed": false,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "main.py",
|
||||||
|
"lines": [1696],
|
||||||
|
"issue": "Player.insert_many(batch).on_conflict_replace()"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/players.py",
|
||||||
|
"lines": [808],
|
||||||
|
"issue": "Player.insert_many(batch).on_conflict_replace()"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Player.insert_many(batch).on_conflict(\n conflict_target=[Player.player_id],\n action='update',\n update={Player.p_name: EXCLUDED.p_name, ...all fields}\n).execute()",
|
||||||
|
"estimatedHours": 0.5,
|
||||||
|
"notes": "Player has explicit player_id primary key - straightforward"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-009",
|
||||||
|
"name": "Fix on_conflict_replace() calls (Card models)",
|
||||||
|
"description": "Convert SQLite on_conflict_replace() for BattingCard, PitchingCard, CardPosition, ratings",
|
||||||
|
"category": "critical",
|
||||||
|
"priority": 3,
|
||||||
|
"completed": false,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/battingcards.py",
|
||||||
|
"lines": [134],
|
||||||
|
"issue": "BattingCard - unique on (player, variant)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/pitchingcards.py",
|
||||||
|
"lines": [130],
|
||||||
|
"issue": "PitchingCard - unique on (player, variant)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/cardpositions.py",
|
||||||
|
"lines": [131],
|
||||||
|
"issue": "CardPosition - unique on (player, variant, position)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/battingcardratings.py",
|
||||||
|
"lines": [549],
|
||||||
|
"issue": "BattingCardRatings - unique on (battingcard, vs_hand)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/pitchingcardratings.py",
|
||||||
|
"lines": [432],
|
||||||
|
"issue": "PitchingCardRatings - unique on (pitchingcard, vs_hand)"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "All have existing unique indexes - use those as conflict_target",
|
||||||
|
"estimatedHours": 2,
|
||||||
|
"notes": "These have many fields to update - consider helper function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-010",
|
||||||
|
"name": "Fix on_conflict_replace() calls (Game models)",
|
||||||
|
"description": "Convert SQLite on_conflict_replace() for StratPlay, Decision, GauntletReward",
|
||||||
|
"category": "critical",
|
||||||
|
"priority": 4,
|
||||||
|
"completed": false,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": ["MIG-011"],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/stratplays.py",
|
||||||
|
"lines": [1082],
|
||||||
|
"issue": "StratPlay - needs unique index on (game, play_num)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/decisions.py",
|
||||||
|
"lines": [217],
|
||||||
|
"issue": "Decision - needs unique index on (game, pitcher)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "main.py",
|
||||||
|
"lines": [4978],
|
||||||
|
"issue": "GauntletReward - investigate if id provided or needs refactor"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/gauntletrewards.py",
|
||||||
|
"lines": [127],
|
||||||
|
"issue": "GauntletReward - same as main.py"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Add unique indexes first (MIG-011), then implement on_conflict()",
|
||||||
|
"estimatedHours": 1.5,
|
||||||
|
"notes": "StratPlay and Decision need new unique indexes to be created first"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-011",
|
||||||
|
"name": "Add missing unique indexes for upserts",
|
||||||
|
"description": "Create unique indexes needed for PostgreSQL on_conflict() operations",
|
||||||
|
"category": "high",
|
||||||
|
"priority": 5,
|
||||||
|
"completed": false,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "app/db_engine.py",
|
||||||
|
"lines": [779, 848],
|
||||||
|
"issue": "Add unique indexes for StratPlay and Decision"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "StratPlay: ModelIndex(StratPlay, (StratPlay.game, StratPlay.play_num), unique=True)\nDecision: ModelIndex(Decision, (Decision.game, Decision.pitcher), unique=True)",
|
||||||
|
"estimatedHours": 1,
|
||||||
|
"notes": "These are natural business keys - a play number should be unique within a game, and a pitcher should have one decision per game"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-012",
|
||||||
|
"name": "Fix on_conflict_replace() for MlbPlayer",
|
||||||
|
"description": "Convert or remove on_conflict_replace() for MlbPlayer",
|
||||||
|
"category": "medium",
|
||||||
|
"priority": 6,
|
||||||
|
"completed": false,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/mlbplayers.py",
|
||||||
|
"lines": [185],
|
||||||
|
"issue": "MlbPlayer.insert_many(batch).on_conflict_replace()"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Code already checks for duplicates before insert (lines 170-179) and raises HTTPException. The on_conflict_replace() may be unnecessary. Option 1: Remove it and use plain insert_many(). Option 2: Use on_conflict with id as target.",
|
||||||
|
"estimatedHours": 0.25,
|
||||||
|
"notes": "Low risk - pre-check rejects duplicates"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-013",
|
||||||
|
"name": "Fix boolean comparison in teams.py",
|
||||||
|
"description": "PostgreSQL requires True/False instead of 1/0 for boolean comparisons",
|
||||||
|
"category": "low",
|
||||||
|
"priority": 7,
|
||||||
|
"completed": false,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "app/routers_v2/teams.py",
|
||||||
|
"lines": [110, 112],
|
||||||
|
"issue": "Team.has_guide == 0 / Team.has_guide == 1"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Change to Team.has_guide == False / Team.has_guide == True",
|
||||||
|
"estimatedHours": 0.25,
|
||||||
|
"notes": "Peewee may handle this automatically, but explicit is better"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-014",
|
||||||
|
"name": "SQLite data integrity audit",
|
||||||
|
"description": "Check for NULL values, orphaned FKs, VARCHAR lengths before migration",
|
||||||
|
"category": "high",
|
||||||
|
"priority": 8,
|
||||||
|
"completed": false,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": [],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "scripts/audit_sqlite.py",
|
||||||
|
"lines": [],
|
||||||
|
"issue": "New file - pre-migration data validation"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"suggestedFix": "Create script to check:\n1. NULL values in NOT NULL fields\n2. Orphaned foreign key records\n3. VARCHAR field max lengths\n4. Table record counts for baseline",
|
||||||
|
"estimatedHours": 1.5,
|
||||||
|
"notes": "Major Domo found 206 orphaned decisions and VARCHAR violations"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-015",
|
||||||
|
"name": "Test on dev PostgreSQL server",
|
||||||
|
"description": "Full migration test on sba-db dev server with production data copy",
|
||||||
|
"category": "high",
|
||||||
|
"priority": 9,
|
||||||
|
"completed": false,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": ["MIG-007", "MIG-008", "MIG-009", "MIG-010", "MIG-014"],
|
||||||
|
"files": [],
|
||||||
|
"suggestedFix": "1. ssh sba-db\n2. Create pd_master database with pd_admin user\n3. Copy production SQLite to dev\n4. Run migration script\n5. Verify record counts\n6. Test API endpoints",
|
||||||
|
"estimatedHours": 3,
|
||||||
|
"notes": "Dev server access: ssh sba-db, then cd container-data/dev-sba-database/"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "MIG-016",
|
||||||
|
"name": "Production migration execution",
|
||||||
|
"description": "Execute migration on production server within maintenance window",
|
||||||
|
"category": "critical",
|
||||||
|
"priority": 10,
|
||||||
|
"completed": false,
|
||||||
|
"tested": false,
|
||||||
|
"dependencies": ["MIG-015"],
|
||||||
|
"files": [],
|
||||||
|
"suggestedFix": "1. Notify users of maintenance window\n2. Stop Paper Dynasty API\n3. Create SQLite backup\n4. Create pd_master database and pd_admin user\n5. Run migration script\n6. Verify data integrity\n7. Update docker-compose.yml with PostgreSQL env vars\n8. Start API\n9. Smoke test critical endpoints\n10. Announce migration complete",
|
||||||
|
"estimatedHours": 3,
|
||||||
|
"notes": "Downtime window: 1-4 hours. Have rollback plan ready."
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"quickWins": [
|
||||||
|
{
|
||||||
|
"taskId": "MIG-013",
|
||||||
|
"estimatedMinutes": 15,
|
||||||
|
"impact": "Prevents boolean comparison issues in team queries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"taskId": "MIG-012",
|
||||||
|
"estimatedMinutes": 15,
|
||||||
|
"impact": "Simplify MlbPlayer insert logic"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"productionBlockers": [
|
||||||
|
{
|
||||||
|
"taskId": "MIG-007",
|
||||||
|
"reason": "Without ID-preserving migration, all foreign key references will break"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"taskId": "MIG-008",
|
||||||
|
"reason": "Player upserts will fail without PostgreSQL-compatible syntax"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"taskId": "MIG-009",
|
||||||
|
"reason": "Card data upserts will fail without PostgreSQL-compatible syntax"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"taskId": "MIG-010",
|
||||||
|
"reason": "Game data upserts will fail without PostgreSQL-compatible syntax"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"weeklyRoadmap": {
|
||||||
|
"week1": {
|
||||||
|
"theme": "Code Changes - Make PostgreSQL Compatible",
|
||||||
|
"tasks": ["MIG-007", "MIG-008", "MIG-009", "MIG-010", "MIG-011", "MIG-012", "MIG-013"],
|
||||||
|
"estimatedHours": 8.5
|
||||||
|
},
|
||||||
|
"week2": {
|
||||||
|
"theme": "Testing & Validation",
|
||||||
|
"tasks": ["MIG-014", "MIG-015"],
|
||||||
|
"estimatedHours": 4.5
|
||||||
|
},
|
||||||
|
"week3": {
|
||||||
|
"theme": "Production Migration",
|
||||||
|
"tasks": ["MIG-016"],
|
||||||
|
"estimatedHours": 3
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"rollbackPlan": {
|
||||||
|
"triggers": [
|
||||||
|
"Data corruption detected",
|
||||||
|
"More than 5% of endpoints failing",
|
||||||
|
"Performance more than 5x worse than SQLite",
|
||||||
|
"Critical functionality broken"
|
||||||
|
],
|
||||||
|
"duringTesting": {
|
||||||
|
"steps": [
|
||||||
|
"Set DATABASE_TYPE=sqlite",
|
||||||
|
"API immediately uses SQLite",
|
||||||
|
"No data loss - PostgreSQL was a copy"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"afterProduction": {
|
||||||
|
"steps": [
|
||||||
|
"Stop API: docker-compose down",
|
||||||
|
"Update docker-compose.yml: DATABASE_TYPE=sqlite",
|
||||||
|
"Restore SQLite backup if needed",
|
||||||
|
"Start API: docker-compose up -d",
|
||||||
|
"Verify SQLite connectivity",
|
||||||
|
"Document issues for retry"
|
||||||
|
],
|
||||||
|
"timeLimit": "24 hours from migration"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
328
app/db_engine.py
328
app/db_engine.py
@ -10,39 +10,36 @@ from peewee import ModelSelect
|
|||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
|
|
||||||
# Database configuration - supports both SQLite and PostgreSQL
|
# Database configuration - supports both SQLite and PostgreSQL
|
||||||
DATABASE_TYPE = os.environ.get('DATABASE_TYPE', 'sqlite')
|
DATABASE_TYPE = os.environ.get("DATABASE_TYPE", "sqlite")
|
||||||
|
|
||||||
if DATABASE_TYPE.lower() == 'postgresql':
|
if DATABASE_TYPE.lower() == "postgresql":
|
||||||
from playhouse.pool import PooledPostgresqlDatabase
|
from playhouse.pool import PooledPostgresqlDatabase
|
||||||
|
|
||||||
db = PooledPostgresqlDatabase(
|
db = PooledPostgresqlDatabase(
|
||||||
os.environ.get('POSTGRES_DB', 'pd_master'),
|
os.environ.get("POSTGRES_DB", "pd_master"),
|
||||||
user=os.environ.get('POSTGRES_USER', 'pd_admin'),
|
user=os.environ.get("POSTGRES_USER", "pd_admin"),
|
||||||
password=os.environ.get('POSTGRES_PASSWORD'),
|
password=os.environ.get("POSTGRES_PASSWORD"),
|
||||||
host=os.environ.get('POSTGRES_HOST', 'localhost'),
|
host=os.environ.get("POSTGRES_HOST", "localhost"),
|
||||||
port=int(os.environ.get('POSTGRES_PORT', '5432')),
|
port=int(os.environ.get("POSTGRES_PORT", "5432")),
|
||||||
max_connections=20,
|
max_connections=20,
|
||||||
stale_timeout=300, # 5 minutes
|
stale_timeout=300, # 5 minutes
|
||||||
timeout=0,
|
timeout=0,
|
||||||
autoconnect=True,
|
autoconnect=True,
|
||||||
autorollback=True # Automatically rollback failed transactions
|
autorollback=True, # Automatically rollback failed transactions
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# Default SQLite configuration for local development
|
# Default SQLite configuration for local development
|
||||||
db = SqliteDatabase(
|
db = SqliteDatabase(
|
||||||
'storage/pd_master.db',
|
"storage/pd_master.db",
|
||||||
pragmas={
|
pragmas={"journal_mode": "wal", "cache_size": -1 * 64000, "synchronous": 0},
|
||||||
'journal_mode': 'wal',
|
|
||||||
'cache_size': -1 * 64000,
|
|
||||||
'synchronous': 0
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
date = f'{datetime.now().year}-{datetime.now().month}-{datetime.now().day}'
|
date = f"{datetime.now().year}-{datetime.now().month}-{datetime.now().day}"
|
||||||
log_level = logging.INFO if os.environ.get('LOG_LEVEL') == 'INFO' else 'WARN'
|
log_level = logging.INFO if os.environ.get("LOG_LEVEL") == "INFO" else "WARN"
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
filename=f'logs/database/{date}.log',
|
filename=f"logs/database/{date}.log",
|
||||||
format='%(asctime)s - database - %(levelname)s - %(message)s',
|
format="%(asctime)s - database - %(levelname)s - %(message)s",
|
||||||
level=log_level
|
level=log_level,
|
||||||
)
|
)
|
||||||
|
|
||||||
# 2025, 2005
|
# 2025, 2005
|
||||||
@ -50,63 +47,66 @@ ranked_cardsets = [24, 25, 26, 27, 28, 29]
|
|||||||
LIVE_CARDSET_ID = 27
|
LIVE_CARDSET_ID = 27
|
||||||
LIVE_PROMO_CARDSET_ID = 28
|
LIVE_PROMO_CARDSET_ID = 28
|
||||||
CARDSETS = {
|
CARDSETS = {
|
||||||
'ranked': {
|
"ranked": {"primary": ranked_cardsets, "human": ranked_cardsets},
|
||||||
'primary': ranked_cardsets,
|
"minor-league": {
|
||||||
'human': ranked_cardsets
|
"primary": [27, 8], # 2005, Mario
|
||||||
|
"secondary": [24], # 2025
|
||||||
|
"human": [x for x in range(1, 30)],
|
||||||
},
|
},
|
||||||
'minor-league': {
|
"major-league": {
|
||||||
'primary': [27, 8], # 2005, Mario
|
"primary": [
|
||||||
'secondary': [24], # 2025
|
27,
|
||||||
'human': [x for x in range(1, 30)]
|
28,
|
||||||
|
24,
|
||||||
|
25,
|
||||||
|
13,
|
||||||
|
14,
|
||||||
|
6,
|
||||||
|
8,
|
||||||
|
], # 2005 + Promos, 2025 + Promos, 2018 + Promos, 2012, Mario
|
||||||
|
"secondary": [5, 3], # 2019, 2022
|
||||||
|
"human": ranked_cardsets,
|
||||||
},
|
},
|
||||||
'major-league': {
|
"hall-of-fame": {"primary": [x for x in range(1, 30)], "human": ranked_cardsets},
|
||||||
'primary': [27, 28, 24, 25, 13, 14, 6, 8], # 2005 + Promos, 2025 + Promos, 2018 + Promos, 2012, Mario
|
"flashback": {
|
||||||
'secondary': [5, 3], # 2019, 2022
|
"primary": [13, 5, 1, 3, 8], # 2018, 2019, 2021, 2022, Mario
|
||||||
'human': ranked_cardsets
|
"secondary": [24], # 2025
|
||||||
|
"human": [13, 5, 1, 3, 8], # 2018, 2019, 2021, 2022
|
||||||
},
|
},
|
||||||
'hall-of-fame': {
|
"gauntlet-3": {
|
||||||
'primary': [x for x in range(1, 30)],
|
"primary": [13], # 2018
|
||||||
'human': ranked_cardsets
|
"secondary": [5, 11, 9], # 2019, 2016, 2023
|
||||||
|
"human": [x for x in range(1, 30)],
|
||||||
},
|
},
|
||||||
'flashback': {
|
"gauntlet-4": {
|
||||||
'primary': [13, 5, 1, 3, 8], # 2018, 2019, 2021, 2022, Mario
|
"primary": [3, 6, 16], # 2022, 2013, Backyard Baseball
|
||||||
'secondary': [24], # 2025
|
"secondary": [4, 9], # 2022 Promos, 2023
|
||||||
'human': [13, 5, 1, 3, 8] # 2018, 2019, 2021, 2022
|
"human": [3, 4, 6, 9, 15, 16],
|
||||||
},
|
},
|
||||||
'gauntlet-3': {
|
"gauntlet-5": {
|
||||||
'primary': [13], # 2018
|
"primary": [17, 8], # 2024, Mario
|
||||||
'secondary': [5, 11, 9], # 2019, 2016, 2023
|
"secondary": [13], # 2018
|
||||||
'human': [x for x in range(1, 30)]
|
"human": [x for x in range(1, 30)],
|
||||||
},
|
},
|
||||||
'gauntlet-4': {
|
"gauntlet-6": {
|
||||||
'primary': [3, 6, 16], # 2022, 2013, Backyard Baseball
|
"primary": [20, 8], # 1998, Mario
|
||||||
'secondary': [4, 9], # 2022 Promos, 2023
|
"secondary": [12], # 2008
|
||||||
'human': [3, 4, 6, 9, 15, 16]
|
"human": [x for x in range(1, 30)],
|
||||||
},
|
},
|
||||||
'gauntlet-5': {
|
"gauntlet-7": {
|
||||||
'primary': [17, 8], # 2024, Mario
|
"primary": [5, 23], # 2019, Brilliant Stars
|
||||||
'secondary': [13], # 2018
|
"secondary": [1], # 2021
|
||||||
'human': [x for x in range(1, 30)]
|
"human": [x for x in range(1, 30)],
|
||||||
},
|
},
|
||||||
'gauntlet-6': {
|
"gauntlet-8": {
|
||||||
'primary': [20, 8], # 1998, Mario
|
"primary": [24], # 2025
|
||||||
'secondary': [12], # 2008
|
"secondary": [17],
|
||||||
'human': [x for x in range(1, 30)]
|
"human": [24, 25, 22, 23],
|
||||||
},
|
},
|
||||||
'gauntlet-7': {
|
"gauntlet-9": {
|
||||||
'primary': [5, 23], # 2019, Brilliant Stars
|
"primary": [27], # 2005
|
||||||
'secondary': [1], # 2021
|
"secondary": [24], # 2025
|
||||||
'human': [x for x in range(1, 30)]
|
|
||||||
},
|
},
|
||||||
'gauntlet-8': {
|
|
||||||
'primary': [24], # 2025
|
|
||||||
'secondary': [17],
|
|
||||||
'human': [24, 25, 22, 23]
|
|
||||||
},
|
|
||||||
'gauntlet-9': {
|
|
||||||
'primary': [27], # 2005
|
|
||||||
'secondary': [24] # 2025
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -122,7 +122,7 @@ def model_to_csv(this_obj, exclude=None) -> List:
|
|||||||
|
|
||||||
def query_to_csv(all_items: ModelSelect, exclude=None):
|
def query_to_csv(all_items: ModelSelect, exclude=None):
|
||||||
if all_items.count() == 0:
|
if all_items.count() == 0:
|
||||||
data_list = [['No data found']]
|
data_list = [["No data found"]]
|
||||||
else:
|
else:
|
||||||
data_list = [model_csv_headers(all_items[0], exclude=exclude)]
|
data_list = [model_csv_headers(all_items[0], exclude=exclude)]
|
||||||
for x in all_items:
|
for x in all_items:
|
||||||
@ -133,29 +133,29 @@ def query_to_csv(all_items: ModelSelect, exclude=None):
|
|||||||
|
|
||||||
def complex_data_to_csv(complex_data: List):
|
def complex_data_to_csv(complex_data: List):
|
||||||
if len(complex_data) == 0:
|
if len(complex_data) == 0:
|
||||||
data_list = [['No data found']]
|
data_list = [["No data found"]]
|
||||||
else:
|
else:
|
||||||
data_list = [[x for x in complex_data[0].keys()]]
|
data_list = [[x for x in complex_data[0].keys()]]
|
||||||
for line in complex_data:
|
for line in complex_data:
|
||||||
logging.debug(f'line: {line}')
|
logging.debug(f"line: {line}")
|
||||||
this_row = []
|
this_row = []
|
||||||
for key in line:
|
for key in line:
|
||||||
logging.debug(f'key: {key}')
|
logging.debug(f"key: {key}")
|
||||||
if line[key] is None:
|
if line[key] is None:
|
||||||
this_row.append('')
|
this_row.append("")
|
||||||
|
|
||||||
elif isinstance(line[key], dict):
|
elif isinstance(line[key], dict):
|
||||||
if 'name' in line[key]:
|
if "name" in line[key]:
|
||||||
this_row.append(line[key]['name'])
|
this_row.append(line[key]["name"])
|
||||||
elif 'abbrev' in line[key]:
|
elif "abbrev" in line[key]:
|
||||||
this_row.append(line[key]['abbrev'])
|
this_row.append(line[key]["abbrev"])
|
||||||
else:
|
else:
|
||||||
this_row.append(line[key]['id'])
|
this_row.append(line[key]["id"])
|
||||||
|
|
||||||
elif isinstance(line[key], int) and line[key] > 100000000:
|
elif isinstance(line[key], int) and line[key] > 100000000:
|
||||||
this_row.append(f"'{line[key]}")
|
this_row.append(f"'{line[key]}")
|
||||||
|
|
||||||
elif isinstance(line[key], str) and ',' in line[key]:
|
elif isinstance(line[key], str) and "," in line[key]:
|
||||||
this_row.append(line[key].replace(",", "-_-"))
|
this_row.append(line[key].replace(",", "-_-"))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -180,7 +180,7 @@ class Current(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'current'
|
table_name = "current"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def latest():
|
def latest():
|
||||||
@ -198,7 +198,7 @@ class Rarity(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'rarity'
|
table_name = "rarity"
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
@ -217,7 +217,7 @@ class Event(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'event'
|
table_name = "event"
|
||||||
|
|
||||||
|
|
||||||
db.create_tables([Event])
|
db.create_tables([Event])
|
||||||
@ -234,7 +234,7 @@ class Cardset(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'cardset'
|
table_name = "cardset"
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
@ -254,7 +254,7 @@ class MlbPlayer(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'mlbplayer'
|
table_name = "mlbplayer"
|
||||||
|
|
||||||
|
|
||||||
db.create_tables([MlbPlayer])
|
db.create_tables([MlbPlayer])
|
||||||
@ -289,7 +289,7 @@ class Player(BaseModel):
|
|||||||
mlbplayer = ForeignKeyField(MlbPlayer, null=True)
|
mlbplayer = ForeignKeyField(MlbPlayer, null=True)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f'{self.cardset} {self.p_name} ({self.rarity.name})'
|
return f"{self.cardset} {self.p_name} ({self.rarity.name})"
|
||||||
|
|
||||||
# def __eq__(self, other):
|
# def __eq__(self, other):
|
||||||
# if self.cardset.id == other.cardset.id and self.name == other.name:
|
# if self.cardset.id == other.cardset.id and self.name == other.name:
|
||||||
@ -310,21 +310,21 @@ class Player(BaseModel):
|
|||||||
def get_all_pos(self):
|
def get_all_pos(self):
|
||||||
all_pos = []
|
all_pos = []
|
||||||
|
|
||||||
if self.pos_1 and self.pos_1 != 'CP':
|
if self.pos_1 and self.pos_1 != "CP":
|
||||||
all_pos.append(self.pos_1)
|
all_pos.append(self.pos_1)
|
||||||
if self.pos_2 and self.pos_2 != 'CP':
|
if self.pos_2 and self.pos_2 != "CP":
|
||||||
all_pos.append(self.pos_2)
|
all_pos.append(self.pos_2)
|
||||||
if self.pos_3 and self.pos_3 != 'CP':
|
if self.pos_3 and self.pos_3 != "CP":
|
||||||
all_pos.append(self.pos_3)
|
all_pos.append(self.pos_3)
|
||||||
if self.pos_4 and self.pos_4 != 'CP':
|
if self.pos_4 and self.pos_4 != "CP":
|
||||||
all_pos.append(self.pos_4)
|
all_pos.append(self.pos_4)
|
||||||
if self.pos_5 and self.pos_5 != 'CP':
|
if self.pos_5 and self.pos_5 != "CP":
|
||||||
all_pos.append(self.pos_5)
|
all_pos.append(self.pos_5)
|
||||||
if self.pos_6 and self.pos_6 != 'CP':
|
if self.pos_6 and self.pos_6 != "CP":
|
||||||
all_pos.append(self.pos_6)
|
all_pos.append(self.pos_6)
|
||||||
if self.pos_7 and self.pos_7 != 'CP':
|
if self.pos_7 and self.pos_7 != "CP":
|
||||||
all_pos.append(self.pos_7)
|
all_pos.append(self.pos_7)
|
||||||
if self.pos_8 and self.pos_8 != 'CP':
|
if self.pos_8 and self.pos_8 != "CP":
|
||||||
all_pos.append(self.pos_8)
|
all_pos.append(self.pos_8)
|
||||||
|
|
||||||
return all_pos
|
return all_pos
|
||||||
@ -338,33 +338,33 @@ class Player(BaseModel):
|
|||||||
# 'mvp': 2500,
|
# 'mvp': 2500,
|
||||||
# 'hof': 999999999
|
# 'hof': 999999999
|
||||||
# }
|
# }
|
||||||
logging.info(f'{self.p_name} cost changing from: {self.cost}')
|
logging.info(f"{self.p_name} cost changing from: {self.cost}")
|
||||||
self.cost = max(math.floor(self.cost * .95), 1)
|
self.cost = max(math.floor(self.cost * 0.95), 1)
|
||||||
# if self.quantity != 999:
|
# if self.quantity != 999:
|
||||||
# self.quantity += 1
|
# self.quantity += 1
|
||||||
logging.info(f'{self.p_name} cost now: {self.cost}')
|
logging.info(f"{self.p_name} cost now: {self.cost}")
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
def change_on_buy(self):
|
def change_on_buy(self):
|
||||||
logging.info(f'{self.p_name} cost changing from: {self.cost}')
|
logging.info(f"{self.p_name} cost changing from: {self.cost}")
|
||||||
self.cost = math.ceil(self.cost * 1.1)
|
self.cost = math.ceil(self.cost * 1.1)
|
||||||
# if self.quantity != 999:
|
# if self.quantity != 999:
|
||||||
# self.quantity -= 1
|
# self.quantity -= 1
|
||||||
logging.info(f'{self.p_name} cost now: {self.cost}')
|
logging.info(f"{self.p_name} cost now: {self.cost}")
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'player'
|
table_name = "player"
|
||||||
|
|
||||||
|
|
||||||
db.create_tables([Player])
|
db.create_tables([Player])
|
||||||
|
|
||||||
|
|
||||||
class Team(BaseModel):
|
class Team(BaseModel):
|
||||||
abbrev = CharField()
|
abbrev = CharField(max_length=20) # Gauntlet teams use prefixes like "Gauntlet-NCB"
|
||||||
sname = CharField()
|
sname = CharField(max_length=100)
|
||||||
lname = CharField()
|
lname = CharField(max_length=255)
|
||||||
gmid = IntegerField()
|
gmid = IntegerField()
|
||||||
gmname = CharField()
|
gmname = CharField()
|
||||||
gsheet = CharField()
|
gsheet = CharField()
|
||||||
@ -381,7 +381,7 @@ class Team(BaseModel):
|
|||||||
is_ai = IntegerField(null=True)
|
is_ai = IntegerField(null=True)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f'S{self.season} {self.lname}'
|
return f"S{self.season} {self.lname}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_by_owner(gmid, season=None):
|
def get_by_owner(gmid, season=None):
|
||||||
@ -407,13 +407,13 @@ class Team(BaseModel):
|
|||||||
return Team.get_or_none(Team.season == season, Team.abbrev == abbrev.upper())
|
return Team.get_or_none(Team.season == season, Team.abbrev == abbrev.upper())
|
||||||
|
|
||||||
def team_hash(self):
|
def team_hash(self):
|
||||||
hash_string = f'{self.sname[-1]}{self.gmid / 6950123:.0f}{self.sname[-2]}{self.gmid / 42069123:.0f}'
|
hash_string = f"{self.sname[-1]}{self.gmid / 6950123:.0f}{self.sname[-2]}{self.gmid / 42069123:.0f}"
|
||||||
logging.info(f'string: {hash_string}')
|
logging.info(f"string: {hash_string}")
|
||||||
return hash_string
|
return hash_string
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'team'
|
table_name = "team"
|
||||||
|
|
||||||
|
|
||||||
db.create_tables([Team])
|
db.create_tables([Team])
|
||||||
@ -428,7 +428,7 @@ class PackType(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'packtype'
|
table_name = "packtype"
|
||||||
|
|
||||||
|
|
||||||
db.create_tables([PackType])
|
db.create_tables([PackType])
|
||||||
@ -443,7 +443,7 @@ class Pack(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'pack'
|
table_name = "pack"
|
||||||
|
|
||||||
|
|
||||||
db.create_tables([Pack])
|
db.create_tables([Pack])
|
||||||
@ -457,9 +457,9 @@ class Card(BaseModel):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.player:
|
if self.player:
|
||||||
return f'{self.player} - {self.team.sname}'
|
return f"{self.player} - {self.team.sname}"
|
||||||
else:
|
else:
|
||||||
return f'Blank - {self.team.sname}'
|
return f"Blank - {self.team.sname}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def select_season(season):
|
def select_season(season):
|
||||||
@ -467,7 +467,7 @@ class Card(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'card'
|
table_name = "card"
|
||||||
|
|
||||||
|
|
||||||
db.create_tables([Card])
|
db.create_tables([Card])
|
||||||
@ -505,7 +505,7 @@ class Roster(BaseModel):
|
|||||||
card_26 = ForeignKeyField(Card)
|
card_26 = ForeignKeyField(Card)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f'{self.team} Roster'
|
return f"{self.team} Roster"
|
||||||
|
|
||||||
# def get_cards(self, team):
|
# def get_cards(self, team):
|
||||||
# all_cards = Card.select().where(Card.roster == self)
|
# all_cards = Card.select().where(Card.roster == self)
|
||||||
@ -519,7 +519,7 @@ class Roster(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'roster'
|
table_name = "roster"
|
||||||
|
|
||||||
|
|
||||||
class Result(BaseModel):
|
class Result(BaseModel):
|
||||||
@ -546,7 +546,7 @@ class Result(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'result'
|
table_name = "result"
|
||||||
|
|
||||||
|
|
||||||
class BattingStat(BaseModel):
|
class BattingStat(BaseModel):
|
||||||
@ -589,7 +589,7 @@ class BattingStat(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'battingstat'
|
table_name = "battingstat"
|
||||||
|
|
||||||
|
|
||||||
class PitchingStat(BaseModel):
|
class PitchingStat(BaseModel):
|
||||||
@ -623,7 +623,7 @@ class PitchingStat(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'pitchingstat'
|
table_name = "pitchingstat"
|
||||||
|
|
||||||
|
|
||||||
class Award(BaseModel):
|
class Award(BaseModel):
|
||||||
@ -636,17 +636,17 @@ class Award(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'award'
|
table_name = "award"
|
||||||
|
|
||||||
|
|
||||||
class Paperdex(BaseModel):
|
class Paperdex(BaseModel):
|
||||||
team = ForeignKeyField(Team)
|
team = ForeignKeyField(Team)
|
||||||
player = ForeignKeyField(Player)
|
player = ForeignKeyField(Player)
|
||||||
created = DateTimeField(default=int(datetime.timestamp(datetime.now())*1000))
|
created = DateTimeField(default=int(datetime.timestamp(datetime.now()) * 1000))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'paperdex'
|
table_name = "paperdex"
|
||||||
|
|
||||||
# def add_to_paperdex(self, team, cards: list):
|
# def add_to_paperdex(self, team, cards: list):
|
||||||
# for x in players:
|
# for x in players:
|
||||||
@ -665,7 +665,7 @@ class Reward(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'reward'
|
table_name = "reward"
|
||||||
|
|
||||||
|
|
||||||
class GameRewards(BaseModel):
|
class GameRewards(BaseModel):
|
||||||
@ -676,7 +676,7 @@ class GameRewards(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'gamerewards'
|
table_name = "gamerewards"
|
||||||
|
|
||||||
|
|
||||||
class Notification(BaseModel):
|
class Notification(BaseModel):
|
||||||
@ -690,7 +690,7 @@ class Notification(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'notification'
|
table_name = "notification"
|
||||||
|
|
||||||
|
|
||||||
class GauntletReward(BaseModel):
|
class GauntletReward(BaseModel):
|
||||||
@ -702,7 +702,7 @@ class GauntletReward(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'gauntletreward'
|
table_name = "gauntletreward"
|
||||||
|
|
||||||
|
|
||||||
class GauntletRun(BaseModel):
|
class GauntletRun(BaseModel):
|
||||||
@ -711,18 +711,29 @@ class GauntletRun(BaseModel):
|
|||||||
wins = IntegerField(default=0)
|
wins = IntegerField(default=0)
|
||||||
losses = IntegerField(default=0)
|
losses = IntegerField(default=0)
|
||||||
gsheet = CharField(null=True)
|
gsheet = CharField(null=True)
|
||||||
created = DateTimeField(default=int(datetime.timestamp(datetime.now())*1000))
|
created = DateTimeField(default=int(datetime.timestamp(datetime.now()) * 1000))
|
||||||
ended = DateTimeField(default=0)
|
ended = DateTimeField(default=0)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'gauntletrun'
|
table_name = "gauntletrun"
|
||||||
|
|
||||||
|
|
||||||
db.create_tables([
|
db.create_tables(
|
||||||
Roster, BattingStat, PitchingStat, Result, Award, Paperdex, Reward, GameRewards, Notification, GauntletReward,
|
[
|
||||||
GauntletRun
|
Roster,
|
||||||
])
|
BattingStat,
|
||||||
|
PitchingStat,
|
||||||
|
Result,
|
||||||
|
Award,
|
||||||
|
Paperdex,
|
||||||
|
Reward,
|
||||||
|
GameRewards,
|
||||||
|
Notification,
|
||||||
|
GauntletReward,
|
||||||
|
GauntletRun,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class BattingCard(BaseModel):
|
class BattingCard(BaseModel):
|
||||||
@ -736,20 +747,22 @@ class BattingCard(BaseModel):
|
|||||||
hit_and_run = CharField()
|
hit_and_run = CharField()
|
||||||
running = IntegerField()
|
running = IntegerField()
|
||||||
offense_col = IntegerField()
|
offense_col = IntegerField()
|
||||||
hand = CharField(default='R')
|
hand = CharField(default="R")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'battingcard'
|
table_name = "battingcard"
|
||||||
|
|
||||||
|
|
||||||
bc_index = ModelIndex(BattingCard, (BattingCard.player, BattingCard.variant), unique=True)
|
bc_index = ModelIndex(
|
||||||
|
BattingCard, (BattingCard.player, BattingCard.variant), unique=True
|
||||||
|
)
|
||||||
BattingCard.add_index(bc_index)
|
BattingCard.add_index(bc_index)
|
||||||
|
|
||||||
|
|
||||||
class BattingCardRatings(BaseModel):
|
class BattingCardRatings(BaseModel):
|
||||||
battingcard = ForeignKeyField(BattingCard)
|
battingcard = ForeignKeyField(BattingCard)
|
||||||
vs_hand = CharField(default='R')
|
vs_hand = CharField(default="R")
|
||||||
pull_rate = FloatField()
|
pull_rate = FloatField()
|
||||||
center_rate = FloatField()
|
center_rate = FloatField()
|
||||||
slap_rate = FloatField()
|
slap_rate = FloatField()
|
||||||
@ -781,11 +794,13 @@ class BattingCardRatings(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'battingcardratings'
|
table_name = "battingcardratings"
|
||||||
|
|
||||||
|
|
||||||
bcr_index = ModelIndex(
|
bcr_index = ModelIndex(
|
||||||
BattingCardRatings, (BattingCardRatings.battingcard, BattingCardRatings.vs_hand), unique=True
|
BattingCardRatings,
|
||||||
|
(BattingCardRatings.battingcard, BattingCardRatings.vs_hand),
|
||||||
|
unique=True,
|
||||||
)
|
)
|
||||||
BattingCardRatings.add_index(bcr_index)
|
BattingCardRatings.add_index(bcr_index)
|
||||||
|
|
||||||
@ -801,20 +816,22 @@ class PitchingCard(BaseModel):
|
|||||||
closer_rating = IntegerField(null=True)
|
closer_rating = IntegerField(null=True)
|
||||||
batting = CharField(null=True)
|
batting = CharField(null=True)
|
||||||
offense_col = IntegerField()
|
offense_col = IntegerField()
|
||||||
hand = CharField(default='R')
|
hand = CharField(default="R")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'pitchingcard'
|
table_name = "pitchingcard"
|
||||||
|
|
||||||
|
|
||||||
pc_index = ModelIndex(PitchingCard, (PitchingCard.player, PitchingCard.variant), unique=True)
|
pc_index = ModelIndex(
|
||||||
|
PitchingCard, (PitchingCard.player, PitchingCard.variant), unique=True
|
||||||
|
)
|
||||||
PitchingCard.add_index(pc_index)
|
PitchingCard.add_index(pc_index)
|
||||||
|
|
||||||
|
|
||||||
class PitchingCardRatings(BaseModel):
|
class PitchingCardRatings(BaseModel):
|
||||||
pitchingcard = ForeignKeyField(PitchingCard)
|
pitchingcard = ForeignKeyField(PitchingCard)
|
||||||
vs_hand = CharField(default='R')
|
vs_hand = CharField(default="R")
|
||||||
homerun = FloatField()
|
homerun = FloatField()
|
||||||
bp_homerun = FloatField()
|
bp_homerun = FloatField()
|
||||||
triple = FloatField()
|
triple = FloatField()
|
||||||
@ -848,11 +865,13 @@ class PitchingCardRatings(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'pitchingcardratings'
|
table_name = "pitchingcardratings"
|
||||||
|
|
||||||
|
|
||||||
pcr_index = ModelIndex(
|
pcr_index = ModelIndex(
|
||||||
PitchingCardRatings, (PitchingCardRatings.pitchingcard, PitchingCardRatings.vs_hand), unique=True
|
PitchingCardRatings,
|
||||||
|
(PitchingCardRatings.pitchingcard, PitchingCardRatings.vs_hand),
|
||||||
|
unique=True,
|
||||||
)
|
)
|
||||||
PitchingCardRatings.add_index(pcr_index)
|
PitchingCardRatings.add_index(pcr_index)
|
||||||
|
|
||||||
@ -870,16 +889,20 @@ class CardPosition(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'cardposition'
|
table_name = "cardposition"
|
||||||
|
|
||||||
|
|
||||||
pos_index = ModelIndex(
|
pos_index = ModelIndex(
|
||||||
CardPosition, (CardPosition.player, CardPosition.variant, CardPosition.position), unique=True
|
CardPosition,
|
||||||
|
(CardPosition.player, CardPosition.variant, CardPosition.position),
|
||||||
|
unique=True,
|
||||||
)
|
)
|
||||||
CardPosition.add_index(pos_index)
|
CardPosition.add_index(pos_index)
|
||||||
|
|
||||||
|
|
||||||
db.create_tables([BattingCard, BattingCardRatings, PitchingCard, PitchingCardRatings, CardPosition])
|
db.create_tables(
|
||||||
|
[BattingCard, BattingCardRatings, PitchingCard, PitchingCardRatings, CardPosition]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class StratGame(BaseModel):
|
class StratGame(BaseModel):
|
||||||
@ -900,7 +923,7 @@ class StratGame(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'stratgame'
|
table_name = "stratgame"
|
||||||
|
|
||||||
|
|
||||||
class StratPlay(BaseModel):
|
class StratPlay(BaseModel):
|
||||||
@ -973,7 +996,15 @@ class StratPlay(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'stratplay'
|
table_name = "stratplay"
|
||||||
|
|
||||||
|
|
||||||
|
# Unique index for StratPlay - a play number should be unique within a game
|
||||||
|
# Required for PostgreSQL on_conflict() upsert operations
|
||||||
|
stratplay_index = ModelIndex(
|
||||||
|
StratPlay, (StratPlay.game, StratPlay.play_num), unique=True
|
||||||
|
)
|
||||||
|
StratPlay.add_index(stratplay_index)
|
||||||
|
|
||||||
|
|
||||||
class Decision(BaseModel):
|
class Decision(BaseModel):
|
||||||
@ -995,7 +1026,13 @@ class Decision(BaseModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
table_name = 'decision'
|
table_name = "decision"
|
||||||
|
|
||||||
|
|
||||||
|
# Unique index for Decision - one decision per pitcher per game
|
||||||
|
# Required for PostgreSQL on_conflict() upsert operations
|
||||||
|
decision_index = ModelIndex(Decision, (Decision.game, Decision.pitcher), unique=True)
|
||||||
|
Decision.add_index(decision_index)
|
||||||
|
|
||||||
|
|
||||||
db.create_tables([StratGame, StratPlay, Decision])
|
db.create_tables([StratGame, StratPlay, Decision])
|
||||||
@ -1176,4 +1213,3 @@ db.close()
|
|||||||
#
|
#
|
||||||
#
|
#
|
||||||
# scout_db.close()
|
# scout_db.close()
|
||||||
|
|
||||||
|
|||||||
284
app/db_helpers.py
Normal file
284
app/db_helpers.py
Normal file
@ -0,0 +1,284 @@
|
|||||||
|
"""
|
||||||
|
Database helper functions for PostgreSQL compatibility.
|
||||||
|
|
||||||
|
This module provides cross-database compatible upsert operations that work
|
||||||
|
with both SQLite and PostgreSQL.
|
||||||
|
|
||||||
|
The key difference:
|
||||||
|
- SQLite: .on_conflict_replace() works directly
|
||||||
|
- PostgreSQL: Requires .on_conflict() with explicit conflict_target and update dict
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
from app.db_helpers import upsert_many, DATABASE_TYPE
|
||||||
|
|
||||||
|
# Instead of:
|
||||||
|
Model.insert_many(batch).on_conflict_replace().execute()
|
||||||
|
|
||||||
|
# Use:
|
||||||
|
upsert_many(Model, batch, conflict_fields=['field1', 'field2'])
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from typing import Any, Dict, List, Type, Union
|
||||||
|
|
||||||
|
from peewee import Model, SQL
|
||||||
|
|
||||||
|
# Re-export DATABASE_TYPE for convenience
|
||||||
|
DATABASE_TYPE = os.environ.get("DATABASE_TYPE", "sqlite").lower()
|
||||||
|
|
||||||
|
|
||||||
|
def get_model_fields(model: Type[Model], exclude: List[str] = None) -> List[str]:
|
||||||
|
"""
|
||||||
|
Get all field names for a model, excluding specified fields.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
model: Peewee Model class
|
||||||
|
exclude: Field names to exclude (e.g., primary key)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of field names
|
||||||
|
"""
|
||||||
|
if exclude is None:
|
||||||
|
exclude = []
|
||||||
|
|
||||||
|
return [
|
||||||
|
field.name for field in model._meta.sorted_fields if field.name not in exclude
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_many(
|
||||||
|
model: Type[Model],
|
||||||
|
data: List[Dict[str, Any]],
|
||||||
|
conflict_fields: List[str],
|
||||||
|
update_fields: List[str] = None,
|
||||||
|
batch_size: int = 100,
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Insert or update multiple records in a database-agnostic way.
|
||||||
|
|
||||||
|
Works with both SQLite (on_conflict_replace) and PostgreSQL (on_conflict).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
model: Peewee Model class
|
||||||
|
data: List of dictionaries with field values
|
||||||
|
conflict_fields: Fields that define uniqueness (for PostgreSQL ON CONFLICT)
|
||||||
|
update_fields: Fields to update on conflict (defaults to all non-conflict fields)
|
||||||
|
batch_size: Number of records per batch
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of records processed
|
||||||
|
|
||||||
|
Example:
|
||||||
|
# For BattingCard with unique constraint on (player, variant)
|
||||||
|
upsert_many(
|
||||||
|
BattingCard,
|
||||||
|
batch_data,
|
||||||
|
conflict_fields=['player', 'variant']
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
if not data:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
total = 0
|
||||||
|
|
||||||
|
# Determine update fields if not specified
|
||||||
|
if update_fields is None:
|
||||||
|
# Get primary key name
|
||||||
|
pk_name = model._meta.primary_key.name if model._meta.primary_key else "id"
|
||||||
|
# Update all fields except PK and conflict fields
|
||||||
|
exclude = [pk_name] + conflict_fields
|
||||||
|
update_fields = get_model_fields(model, exclude=exclude)
|
||||||
|
|
||||||
|
# Process in batches
|
||||||
|
for i in range(0, len(data), batch_size):
|
||||||
|
batch = data[i : i + batch_size]
|
||||||
|
|
||||||
|
if DATABASE_TYPE == "postgresql":
|
||||||
|
# PostgreSQL: Use ON CONFLICT with explicit target and update
|
||||||
|
from peewee import EXCLUDED
|
||||||
|
|
||||||
|
# Build conflict target - get actual field objects
|
||||||
|
conflict_target = [getattr(model, f) for f in conflict_fields]
|
||||||
|
|
||||||
|
# Build update dict
|
||||||
|
update_dict = {
|
||||||
|
getattr(model, f): EXCLUDED[f]
|
||||||
|
for f in update_fields
|
||||||
|
if hasattr(model, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
if update_dict:
|
||||||
|
model.insert_many(batch).on_conflict(
|
||||||
|
conflict_target=conflict_target, action="update", update=update_dict
|
||||||
|
).execute()
|
||||||
|
else:
|
||||||
|
# No fields to update, just ignore conflicts
|
||||||
|
model.insert_many(batch).on_conflict_ignore().execute()
|
||||||
|
else:
|
||||||
|
# SQLite: Use on_conflict_replace (simpler)
|
||||||
|
model.insert_many(batch).on_conflict_replace().execute()
|
||||||
|
|
||||||
|
total += len(batch)
|
||||||
|
|
||||||
|
return total
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_by_pk(
|
||||||
|
model: Type[Model],
|
||||||
|
data: List[Dict[str, Any]],
|
||||||
|
pk_field: str = None,
|
||||||
|
batch_size: int = 100,
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Upsert records using primary key as conflict target.
|
||||||
|
|
||||||
|
This is for models where the primary key is explicitly provided in the data
|
||||||
|
(like Player with player_id).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
model: Peewee Model class
|
||||||
|
data: List of dictionaries with field values (including PK)
|
||||||
|
pk_field: Primary key field name (auto-detected if not specified)
|
||||||
|
batch_size: Number of records per batch
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of records processed
|
||||||
|
|
||||||
|
Example:
|
||||||
|
# For Player with explicit player_id
|
||||||
|
upsert_by_pk(Player, player_data, pk_field='player_id')
|
||||||
|
"""
|
||||||
|
if not data:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Auto-detect primary key
|
||||||
|
if pk_field is None:
|
||||||
|
pk_field = model._meta.primary_key.name if model._meta.primary_key else "id"
|
||||||
|
|
||||||
|
return upsert_many(model, data, conflict_fields=[pk_field], batch_size=batch_size)
|
||||||
|
|
||||||
|
|
||||||
|
# Pre-configured upsert functions for specific models
|
||||||
|
# These encode the unique constraint knowledge for each model
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_players(data: List[Dict], batch_size: int = 15) -> int:
|
||||||
|
"""Upsert Player records using player_id as conflict target."""
|
||||||
|
from app.db_engine import Player
|
||||||
|
|
||||||
|
return upsert_by_pk(Player, data, pk_field="player_id", batch_size=batch_size)
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_batting_cards(data: List[Dict], batch_size: int = 30) -> int:
|
||||||
|
"""Upsert BattingCard records using (player, variant) unique constraint."""
|
||||||
|
from app.db_engine import BattingCard
|
||||||
|
|
||||||
|
return upsert_many(
|
||||||
|
BattingCard, data, conflict_fields=["player", "variant"], batch_size=batch_size
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_pitching_cards(data: List[Dict], batch_size: int = 30) -> int:
|
||||||
|
"""Upsert PitchingCard records using (player, variant) unique constraint."""
|
||||||
|
from app.db_engine import PitchingCard
|
||||||
|
|
||||||
|
return upsert_many(
|
||||||
|
PitchingCard, data, conflict_fields=["player", "variant"], batch_size=batch_size
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_batting_card_ratings(data: List[Dict], batch_size: int = 30) -> int:
|
||||||
|
"""Upsert BattingCardRatings using (battingcard, vs_hand) unique constraint."""
|
||||||
|
from app.db_engine import BattingCardRatings
|
||||||
|
|
||||||
|
return upsert_many(
|
||||||
|
BattingCardRatings,
|
||||||
|
data,
|
||||||
|
conflict_fields=["battingcard", "vs_hand"],
|
||||||
|
batch_size=batch_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_pitching_card_ratings(data: List[Dict], batch_size: int = 30) -> int:
|
||||||
|
"""Upsert PitchingCardRatings using (pitchingcard, vs_hand) unique constraint."""
|
||||||
|
from app.db_engine import PitchingCardRatings
|
||||||
|
|
||||||
|
return upsert_many(
|
||||||
|
PitchingCardRatings,
|
||||||
|
data,
|
||||||
|
conflict_fields=["pitchingcard", "vs_hand"],
|
||||||
|
batch_size=batch_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_card_positions(data: List[Dict], batch_size: int = 30) -> int:
|
||||||
|
"""Upsert CardPosition using (player, variant, position) unique constraint."""
|
||||||
|
from app.db_engine import CardPosition
|
||||||
|
|
||||||
|
return upsert_many(
|
||||||
|
CardPosition,
|
||||||
|
data,
|
||||||
|
conflict_fields=["player", "variant", "position"],
|
||||||
|
batch_size=batch_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_strat_plays(data: List[Dict], batch_size: int = 20) -> int:
|
||||||
|
"""Upsert StratPlay using (game, play_num) unique constraint."""
|
||||||
|
from app.db_engine import StratPlay
|
||||||
|
|
||||||
|
return upsert_many(
|
||||||
|
StratPlay, data, conflict_fields=["game", "play_num"], batch_size=batch_size
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_decisions(data: List[Dict], batch_size: int = 10) -> int:
|
||||||
|
"""Upsert Decision using (game, pitcher) unique constraint."""
|
||||||
|
from app.db_engine import Decision
|
||||||
|
|
||||||
|
return upsert_many(
|
||||||
|
Decision, data, conflict_fields=["game", "pitcher"], batch_size=batch_size
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_gauntlet_rewards(data: List[Dict], batch_size: int = 15) -> int:
|
||||||
|
"""
|
||||||
|
Upsert GauntletReward records.
|
||||||
|
|
||||||
|
Note: GauntletReward doesn't have a natural unique key defined.
|
||||||
|
For PostgreSQL, we use id if provided, otherwise insert-only.
|
||||||
|
"""
|
||||||
|
from app.db_engine import GauntletReward
|
||||||
|
|
||||||
|
# Check if any records have 'id' field
|
||||||
|
has_ids = any("id" in record for record in data)
|
||||||
|
|
||||||
|
if has_ids:
|
||||||
|
return upsert_by_pk(GauntletReward, data, pk_field="id", batch_size=batch_size)
|
||||||
|
else:
|
||||||
|
# No IDs provided - just insert (may fail on duplicates)
|
||||||
|
total = 0
|
||||||
|
for i in range(0, len(data), batch_size):
|
||||||
|
batch = data[i : i + batch_size]
|
||||||
|
GauntletReward.insert_many(batch).execute()
|
||||||
|
total += len(batch)
|
||||||
|
return total
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_mlb_players(data: List[Dict], batch_size: int = 15) -> int:
|
||||||
|
"""
|
||||||
|
Upsert MlbPlayer records.
|
||||||
|
|
||||||
|
Note: The calling code already checks for duplicates before insert,
|
||||||
|
so this is effectively just an insert operation.
|
||||||
|
"""
|
||||||
|
from app.db_engine import MlbPlayer
|
||||||
|
|
||||||
|
# MlbPlayer doesn't have a good unique key other than id
|
||||||
|
# Since duplicates are already checked, just insert
|
||||||
|
total = 0
|
||||||
|
for i in range(0, len(data), batch_size):
|
||||||
|
batch = data[i : i + batch_size]
|
||||||
|
MlbPlayer.insert_many(batch).execute()
|
||||||
|
total += len(batch)
|
||||||
|
return total
|
||||||
@ -9,27 +9,34 @@ import pandas as pd
|
|||||||
import pydantic
|
import pydantic
|
||||||
from pydantic import validator, root_validator
|
from pydantic import validator, root_validator
|
||||||
|
|
||||||
from ..db_engine import db, BattingCardRatings, model_to_dict, chunked, BattingCard, Player, query_to_csv, Team, \
|
from ..db_engine import (
|
||||||
CardPosition
|
db,
|
||||||
|
BattingCardRatings,
|
||||||
|
model_to_dict,
|
||||||
|
chunked,
|
||||||
|
BattingCard,
|
||||||
|
Player,
|
||||||
|
query_to_csv,
|
||||||
|
Team,
|
||||||
|
CardPosition,
|
||||||
|
)
|
||||||
|
from ..db_helpers import upsert_batting_card_ratings
|
||||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA, PRIVATE_IN_SCHEMA
|
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA, PRIVATE_IN_SCHEMA
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
filename=LOG_DATA['filename'],
|
filename=LOG_DATA["filename"],
|
||||||
format=LOG_DATA['format'],
|
format=LOG_DATA["format"],
|
||||||
level=LOG_DATA['log_level']
|
level=LOG_DATA["log_level"],
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(prefix="/api/v2/battingcardratings", tags=["battingcardratings"])
|
||||||
prefix='/api/v2/battingcardratings',
|
RATINGS_FILE = "storage/batting-ratings.csv"
|
||||||
tags=['battingcardratings']
|
BASIC_FILE = "storage/batting-basic.csv"
|
||||||
)
|
|
||||||
RATINGS_FILE = 'storage/batting-ratings.csv'
|
|
||||||
BASIC_FILE = 'storage/batting-basic.csv'
|
|
||||||
|
|
||||||
|
|
||||||
class BattingCardRatingsModel(pydantic.BaseModel):
|
class BattingCardRatingsModel(pydantic.BaseModel):
|
||||||
battingcard_id: int
|
battingcard_id: int
|
||||||
vs_hand: Literal['R', 'L', 'vR', 'vL']
|
vs_hand: Literal["R", "L", "vR", "vL"]
|
||||||
homerun: float = 0.0
|
homerun: float = 0.0
|
||||||
bp_homerun: float = 0.0
|
bp_homerun: float = 0.0
|
||||||
triple: float = 0.0
|
triple: float = 0.0
|
||||||
@ -61,33 +68,70 @@ class BattingCardRatingsModel(pydantic.BaseModel):
|
|||||||
|
|
||||||
@validator("avg", always=True)
|
@validator("avg", always=True)
|
||||||
def avg_validator(cls, v, values, **kwargs):
|
def avg_validator(cls, v, values, **kwargs):
|
||||||
return (values['homerun'] + values['bp_homerun'] / 2 + values['triple'] + values['double_three'] +
|
return (
|
||||||
values['double_two'] + values['double_pull'] + values['single_two'] + values['single_one'] +
|
values["homerun"]
|
||||||
values['single_center'] + values['bp_single'] / 2) / 108
|
+ values["bp_homerun"] / 2
|
||||||
|
+ values["triple"]
|
||||||
|
+ values["double_three"]
|
||||||
|
+ values["double_two"]
|
||||||
|
+ values["double_pull"]
|
||||||
|
+ values["single_two"]
|
||||||
|
+ values["single_one"]
|
||||||
|
+ values["single_center"]
|
||||||
|
+ values["bp_single"] / 2
|
||||||
|
) / 108
|
||||||
|
|
||||||
@validator("obp", always=True)
|
@validator("obp", always=True)
|
||||||
def obp_validator(cls, v, values, **kwargs):
|
def obp_validator(cls, v, values, **kwargs):
|
||||||
return ((values['hbp'] + values['walk']) / 108) + values['avg']
|
return ((values["hbp"] + values["walk"]) / 108) + values["avg"]
|
||||||
|
|
||||||
@validator("slg", always=True)
|
@validator("slg", always=True)
|
||||||
def slg_validator(cls, v, values, **kwargs):
|
def slg_validator(cls, v, values, **kwargs):
|
||||||
return (values['homerun'] * 4 + values['bp_homerun'] * 2 + values['triple'] * 3 + values['double_three'] * 2 +
|
return (
|
||||||
values['double_two'] * 2 + values['double_pull'] * 2 + values['single_two'] + values['single_one'] +
|
values["homerun"] * 4
|
||||||
values['single_center'] + values['bp_single'] / 2) / 108
|
+ values["bp_homerun"] * 2
|
||||||
|
+ values["triple"] * 3
|
||||||
|
+ values["double_three"] * 2
|
||||||
|
+ values["double_two"] * 2
|
||||||
|
+ values["double_pull"] * 2
|
||||||
|
+ values["single_two"]
|
||||||
|
+ values["single_one"]
|
||||||
|
+ values["single_center"]
|
||||||
|
+ values["bp_single"] / 2
|
||||||
|
) / 108
|
||||||
|
|
||||||
@root_validator(skip_on_failure=True)
|
@root_validator(skip_on_failure=True)
|
||||||
def validate_chance_total(cls, values):
|
def validate_chance_total(cls, values):
|
||||||
total_chances = (
|
total_chances = (
|
||||||
values['homerun'] + values['bp_homerun'] + values['triple'] + values['double_three'] +
|
values["homerun"]
|
||||||
values['double_two'] + values['double_pull'] + values['single_two'] + values['single_one'] +
|
+ values["bp_homerun"]
|
||||||
values['single_center'] + values['bp_single'] + values['hbp'] + values['walk'] +
|
+ values["triple"]
|
||||||
values['strikeout'] + values['lineout'] + values['popout'] + values['flyout_a'] +
|
+ values["double_three"]
|
||||||
values['flyout_bq'] + values['flyout_lf_b'] + values['flyout_rf_b'] + values['groundout_a'] +
|
+ values["double_two"]
|
||||||
values['groundout_b'] + values['groundout_c'])
|
+ values["double_pull"]
|
||||||
|
+ values["single_two"]
|
||||||
|
+ values["single_one"]
|
||||||
|
+ values["single_center"]
|
||||||
|
+ values["bp_single"]
|
||||||
|
+ values["hbp"]
|
||||||
|
+ values["walk"]
|
||||||
|
+ values["strikeout"]
|
||||||
|
+ values["lineout"]
|
||||||
|
+ values["popout"]
|
||||||
|
+ values["flyout_a"]
|
||||||
|
+ values["flyout_bq"]
|
||||||
|
+ values["flyout_lf_b"]
|
||||||
|
+ values["flyout_rf_b"]
|
||||||
|
+ values["groundout_a"]
|
||||||
|
+ values["groundout_b"]
|
||||||
|
+ values["groundout_c"]
|
||||||
|
)
|
||||||
|
|
||||||
if round(total_chances) != 108:
|
if round(total_chances) != 108:
|
||||||
raise ValueError(f'BC {values["battingcard_id"]} must have exactly 108 chances on the card '
|
raise ValueError(
|
||||||
f'{values["vs_hand"]}; {round(total_chances)} listed')
|
f"BC {values['battingcard_id']} must have exactly 108 chances on the card "
|
||||||
|
f"{values['vs_hand']}; {round(total_chances)} listed"
|
||||||
|
)
|
||||||
return values
|
return values
|
||||||
|
|
||||||
|
|
||||||
@ -100,18 +144,23 @@ class RatingsList(pydantic.BaseModel):
|
|||||||
ratings: List[BattingCardRatingsModel]
|
ratings: List[BattingCardRatingsModel]
|
||||||
|
|
||||||
|
|
||||||
@router.get('')
|
@router.get("")
|
||||||
async def get_card_ratings(
|
async def get_card_ratings(
|
||||||
team_id: int, ts: str, battingcard_id: list = Query(default=None), cardset_id: list = Query(default=None),
|
team_id: int,
|
||||||
vs_hand: Literal['R', 'L', 'vR', 'vL'] = None, short_output: bool = False, csv: bool = False):
|
ts: str,
|
||||||
|
battingcard_id: list = Query(default=None),
|
||||||
|
cardset_id: list = Query(default=None),
|
||||||
|
vs_hand: Literal["R", "L", "vR", "vL"] = None,
|
||||||
|
short_output: bool = False,
|
||||||
|
csv: bool = False,
|
||||||
|
):
|
||||||
this_team = Team.get_or_none(Team.id == team_id)
|
this_team = Team.get_or_none(Team.id == team_id)
|
||||||
logging.debug(f'Team: {this_team} / has_guide: {this_team.has_guide}')
|
logging.debug(f"Team: {this_team} / has_guide: {this_team.has_guide}")
|
||||||
if this_team is None or ts != this_team.team_hash() or this_team.has_guide != 1:
|
if this_team is None or ts != this_team.team_hash() or this_team.has_guide != 1:
|
||||||
logging.warning(f'Team_id {team_id} attempted to pull ratings')
|
logging.warning(f"Team_id {team_id} attempted to pull ratings")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to pull card ratings."
|
||||||
detail='You are not authorized to pull card ratings.'
|
|
||||||
)
|
)
|
||||||
# elif not valid_token(token):
|
# elif not valid_token(token):
|
||||||
# logging.warning(f'Bad Token: {token}')
|
# logging.warning(f'Bad Token: {token}')
|
||||||
@ -124,29 +173,40 @@ async def get_card_ratings(
|
|||||||
all_ratings = BattingCardRatings.select()
|
all_ratings = BattingCardRatings.select()
|
||||||
|
|
||||||
if battingcard_id is not None:
|
if battingcard_id is not None:
|
||||||
all_ratings = all_ratings.where(BattingCardRatings.battingcard_id << battingcard_id)
|
all_ratings = all_ratings.where(
|
||||||
|
BattingCardRatings.battingcard_id << battingcard_id
|
||||||
|
)
|
||||||
if vs_hand is not None:
|
if vs_hand is not None:
|
||||||
all_ratings = all_ratings.where(BattingCardRatings.vs_hand == vs_hand[-1])
|
all_ratings = all_ratings.where(BattingCardRatings.vs_hand == vs_hand[-1])
|
||||||
if cardset_id is not None:
|
if cardset_id is not None:
|
||||||
set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id)
|
set_players = Player.select(Player.player_id).where(
|
||||||
set_cards = BattingCard.select(BattingCard.id).where(BattingCard.player << set_players)
|
Player.cardset_id << cardset_id
|
||||||
|
)
|
||||||
|
set_cards = BattingCard.select(BattingCard.id).where(
|
||||||
|
BattingCard.player << set_players
|
||||||
|
)
|
||||||
all_ratings = all_ratings.where(BattingCardRatings.battingcard << set_cards)
|
all_ratings = all_ratings.where(BattingCardRatings.battingcard << set_cards)
|
||||||
|
|
||||||
if csv:
|
if csv:
|
||||||
# return_val = query_to_csv(all_ratings)
|
# return_val = query_to_csv(all_ratings)
|
||||||
return_vals = [model_to_dict(x) for x in all_ratings]
|
return_vals = [model_to_dict(x) for x in all_ratings]
|
||||||
for x in return_vals:
|
for x in return_vals:
|
||||||
x.update(x['battingcard'])
|
x.update(x["battingcard"])
|
||||||
x['player_id'] = x['battingcard']['player']['player_id']
|
x["player_id"] = x["battingcard"]["player"]["player_id"]
|
||||||
del x['battingcard'], x['player']
|
del x["battingcard"], x["player"]
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return Response(content=pd.DataFrame(return_vals).to_csv(index=False), media_type='text/csv')
|
return Response(
|
||||||
|
content=pd.DataFrame(return_vals).to_csv(index=False), media_type="text/csv"
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return_val = {'count': all_ratings.count(), 'ratings': [
|
return_val = {
|
||||||
model_to_dict(x, recurse=not short_output) for x in all_ratings
|
"count": all_ratings.count(),
|
||||||
]}
|
"ratings": [
|
||||||
|
model_to_dict(x, recurse=not short_output) for x in all_ratings
|
||||||
|
],
|
||||||
|
}
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
@ -154,225 +214,298 @@ async def get_card_ratings(
|
|||||||
def get_scouting_dfs(cardset_id: list = None):
|
def get_scouting_dfs(cardset_id: list = None):
|
||||||
all_ratings = BattingCardRatings.select()
|
all_ratings = BattingCardRatings.select()
|
||||||
if cardset_id is not None:
|
if cardset_id is not None:
|
||||||
set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id)
|
set_players = Player.select(Player.player_id).where(
|
||||||
set_cards = BattingCard.select(BattingCard.id).where(BattingCard.player << set_players)
|
Player.cardset_id << cardset_id
|
||||||
|
)
|
||||||
|
set_cards = BattingCard.select(BattingCard.id).where(
|
||||||
|
BattingCard.player << set_players
|
||||||
|
)
|
||||||
all_ratings = all_ratings.where(BattingCardRatings.battingcard << set_cards)
|
all_ratings = all_ratings.where(BattingCardRatings.battingcard << set_cards)
|
||||||
|
|
||||||
vl_query = all_ratings.where(BattingCardRatings.vs_hand == 'L')
|
vl_query = all_ratings.where(BattingCardRatings.vs_hand == "L")
|
||||||
vr_query = all_ratings.where(BattingCardRatings.vs_hand == 'R')
|
vr_query = all_ratings.where(BattingCardRatings.vs_hand == "R")
|
||||||
|
|
||||||
vl_vals = [model_to_dict(x) for x in vl_query]
|
vl_vals = [model_to_dict(x) for x in vl_query]
|
||||||
for x in vl_vals:
|
for x in vl_vals:
|
||||||
x.update(x['battingcard'])
|
x.update(x["battingcard"])
|
||||||
x['player_id'] = x['battingcard']['player']['player_id']
|
x["player_id"] = x["battingcard"]["player"]["player_id"]
|
||||||
x['player_name'] = x['battingcard']['player']['p_name']
|
x["player_name"] = x["battingcard"]["player"]["p_name"]
|
||||||
x['rarity'] = x['battingcard']['player']['rarity']['name']
|
x["rarity"] = x["battingcard"]["player"]["rarity"]["name"]
|
||||||
x['cardset_id'] = x['battingcard']['player']['cardset']['id']
|
x["cardset_id"] = x["battingcard"]["player"]["cardset"]["id"]
|
||||||
x['cardset_name'] = x['battingcard']['player']['cardset']['name']
|
x["cardset_name"] = x["battingcard"]["player"]["cardset"]["name"]
|
||||||
del x['battingcard']
|
del x["battingcard"]
|
||||||
del x['player']
|
del x["player"]
|
||||||
|
|
||||||
vr_vals = [model_to_dict(x) for x in vr_query]
|
vr_vals = [model_to_dict(x) for x in vr_query]
|
||||||
for x in vr_vals:
|
for x in vr_vals:
|
||||||
x['player_id'] = x['battingcard']['player']['player_id']
|
x["player_id"] = x["battingcard"]["player"]["player_id"]
|
||||||
del x['battingcard']
|
del x["battingcard"]
|
||||||
|
|
||||||
vl = pd.DataFrame(vl_vals)
|
vl = pd.DataFrame(vl_vals)
|
||||||
vr = pd.DataFrame(vr_vals)
|
vr = pd.DataFrame(vr_vals)
|
||||||
|
|
||||||
bat_df = pd.merge(vl, vr, on='player_id', suffixes=('_vl', '_vr')).set_index('player_id', drop=False)
|
bat_df = pd.merge(vl, vr, on="player_id", suffixes=("_vl", "_vr")).set_index(
|
||||||
|
"player_id", drop=False
|
||||||
|
)
|
||||||
|
|
||||||
logging.debug(f'bat_df: {bat_df}')
|
logging.debug(f"bat_df: {bat_df}")
|
||||||
|
|
||||||
positions = CardPosition.select()
|
positions = CardPosition.select()
|
||||||
if cardset_id is not None:
|
if cardset_id is not None:
|
||||||
set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id)
|
set_players = Player.select(Player.player_id).where(
|
||||||
|
Player.cardset_id << cardset_id
|
||||||
|
)
|
||||||
positions = positions.where(CardPosition.player << set_players)
|
positions = positions.where(CardPosition.player << set_players)
|
||||||
|
|
||||||
series_list = []
|
series_list = []
|
||||||
for pos_code in ['P', 'C', '1B', '2B', '3B', 'SS', 'LF', 'CF', 'RF']:
|
for pos_code in ["P", "C", "1B", "2B", "3B", "SS", "LF", "CF", "RF"]:
|
||||||
series_list.append(pd.Series(
|
series_list.append(
|
||||||
dict([(x.player.player_id, x.range) for x in positions.where(CardPosition.position == pos_code)]),
|
pd.Series(
|
||||||
name=f'Range {pos_code}'
|
dict(
|
||||||
))
|
[
|
||||||
series_list.append(pd.Series(
|
(x.player.player_id, x.range)
|
||||||
dict([(x.player.player_id, x.error) for x in positions.where(CardPosition.position == pos_code)]),
|
for x in positions.where(CardPosition.position == pos_code)
|
||||||
name=f'Error {pos_code}'
|
]
|
||||||
))
|
),
|
||||||
|
name=f"Range {pos_code}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
series_list.append(
|
||||||
|
pd.Series(
|
||||||
|
dict(
|
||||||
|
[
|
||||||
|
(x.player.player_id, x.error)
|
||||||
|
for x in positions.where(CardPosition.position == pos_code)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
name=f"Error {pos_code}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
series_list.append(pd.Series(
|
series_list.append(
|
||||||
dict([(x.player.player_id, x.arm) for x in positions.where(CardPosition.position << ['LF', 'CF', 'RF'])]),
|
pd.Series(
|
||||||
name=f'Arm OF'
|
dict(
|
||||||
))
|
[
|
||||||
series_list.append(pd.Series(
|
(x.player.player_id, x.arm)
|
||||||
dict([(x.player.player_id, x.arm) for x in positions.where(CardPosition.position == 'C')]),
|
for x in positions.where(
|
||||||
name=f'Arm C'
|
CardPosition.position << ["LF", "CF", "RF"]
|
||||||
))
|
)
|
||||||
series_list.append(pd.Series(
|
]
|
||||||
dict([(x.player.player_id, x.pb) for x in positions.where(CardPosition.position == 'C')]),
|
),
|
||||||
name=f'PB C'
|
name=f"Arm OF",
|
||||||
))
|
)
|
||||||
series_list.append(pd.Series(
|
)
|
||||||
dict([(x.player.player_id, x.overthrow) for x in positions.where(CardPosition.position == 'C')]),
|
series_list.append(
|
||||||
name=f'Throw C'
|
pd.Series(
|
||||||
))
|
dict(
|
||||||
|
[
|
||||||
|
(x.player.player_id, x.arm)
|
||||||
|
for x in positions.where(CardPosition.position == "C")
|
||||||
|
]
|
||||||
|
),
|
||||||
|
name=f"Arm C",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
series_list.append(
|
||||||
|
pd.Series(
|
||||||
|
dict(
|
||||||
|
[
|
||||||
|
(x.player.player_id, x.pb)
|
||||||
|
for x in positions.where(CardPosition.position == "C")
|
||||||
|
]
|
||||||
|
),
|
||||||
|
name=f"PB C",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
series_list.append(
|
||||||
|
pd.Series(
|
||||||
|
dict(
|
||||||
|
[
|
||||||
|
(x.player.player_id, x.overthrow)
|
||||||
|
for x in positions.where(CardPosition.position == "C")
|
||||||
|
]
|
||||||
|
),
|
||||||
|
name=f"Throw C",
|
||||||
|
)
|
||||||
|
)
|
||||||
db.close()
|
db.close()
|
||||||
logging.debug(f'series_list: {series_list}')
|
logging.debug(f"series_list: {series_list}")
|
||||||
|
|
||||||
return bat_df.join(series_list)
|
return bat_df.join(series_list)
|
||||||
|
|
||||||
|
|
||||||
@router.get('/scouting')
|
@router.get("/scouting")
|
||||||
async def get_card_scouting(team_id: int, ts: str):
|
async def get_card_scouting(team_id: int, ts: str):
|
||||||
this_team = Team.get_or_none(Team.id == team_id)
|
this_team = Team.get_or_none(Team.id == team_id)
|
||||||
logging.debug(f'Team: {this_team} / has_guide: {this_team.has_guide}')
|
logging.debug(f"Team: {this_team} / has_guide: {this_team.has_guide}")
|
||||||
if this_team is None or ts != this_team.team_hash() or this_team.has_guide != 1:
|
if this_team is None or ts != this_team.team_hash() or this_team.has_guide != 1:
|
||||||
logging.warning(f'Team_id {team_id} attempted to pull ratings')
|
logging.warning(f"Team_id {team_id} attempted to pull ratings")
|
||||||
db.close()
|
db.close()
|
||||||
return 'Your team does not have the ratings guide enabled. If you have purchased a copy ping Cal to ' \
|
return (
|
||||||
'make sure it is enabled on your team. If you are interested you can pick it up here (thank you!): ' \
|
"Your team does not have the ratings guide enabled. If you have purchased a copy ping Cal to "
|
||||||
'https://ko-fi.com/manticorum/shop'
|
"make sure it is enabled on your team. If you are interested you can pick it up here (thank you!): "
|
||||||
|
"https://ko-fi.com/manticorum/shop"
|
||||||
|
)
|
||||||
|
|
||||||
if os.path.isfile(f'storage/batting-ratings.csv'):
|
if os.path.isfile(f"storage/batting-ratings.csv"):
|
||||||
return FileResponse(
|
return FileResponse(
|
||||||
path=f'storage/batting-ratings.csv',
|
path=f"storage/batting-ratings.csv",
|
||||||
media_type='text/csv',
|
media_type="text/csv",
|
||||||
# headers=headers
|
# headers=headers
|
||||||
)
|
)
|
||||||
|
|
||||||
raise HTTPException(status_code=400, detail='Go pester Cal - the scouting file is missing')
|
raise HTTPException(
|
||||||
|
status_code=400, detail="Go pester Cal - the scouting file is missing"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post('/calculate/scouting', include_in_schema=PRIVATE_IN_SCHEMA)
|
@router.post("/calculate/scouting", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||||
async def post_calc_scouting(token: str = Depends(oauth2_scheme)):
|
async def post_calc_scouting(token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to calculate card ratings."
|
||||||
detail='You are not authorized to calculate card ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.warning(f'Re-calculating batting ratings\n\n')
|
logging.warning(f"Re-calculating batting ratings\n\n")
|
||||||
|
|
||||||
output = get_scouting_dfs()
|
output = get_scouting_dfs()
|
||||||
first = ['player_id', 'player_name', 'cardset_name', 'rarity', 'hand', 'variant']
|
first = ["player_id", "player_name", "cardset_name", "rarity", "hand", "variant"]
|
||||||
exclude = first + ['id_vl', 'id_vr', 'vs_hand_vl', 'vs_hand_vr']
|
exclude = first + ["id_vl", "id_vr", "vs_hand_vl", "vs_hand_vr"]
|
||||||
output = output[first + [col for col in output.columns if col not in exclude]]
|
output = output[first + [col for col in output.columns if col not in exclude]]
|
||||||
|
|
||||||
csv_file = pd.DataFrame(output).to_csv(index=False)
|
csv_file = pd.DataFrame(output).to_csv(index=False)
|
||||||
with open(RATINGS_FILE, 'w') as file:
|
with open(RATINGS_FILE, "w") as file:
|
||||||
file.write(csv_file)
|
file.write(csv_file)
|
||||||
|
|
||||||
return Response(content=csv_file, media_type='text/csv')
|
return Response(content=csv_file, media_type="text/csv")
|
||||||
|
|
||||||
|
|
||||||
@router.get('/basic')
|
@router.get("/basic")
|
||||||
async def get_basic_scouting(cardset_id: list = Query(default=None)):
|
async def get_basic_scouting(cardset_id: list = Query(default=None)):
|
||||||
if os.path.isfile(f'storage/batting-basic.csv'):
|
if os.path.isfile(f"storage/batting-basic.csv"):
|
||||||
return FileResponse(
|
return FileResponse(
|
||||||
path=f'storage/batting-basic.csv',
|
path=f"storage/batting-basic.csv",
|
||||||
media_type='text/csv',
|
media_type="text/csv",
|
||||||
# headers=headers
|
# headers=headers
|
||||||
)
|
)
|
||||||
|
|
||||||
raise HTTPException(status_code=400, detail='Go pester Cal - the scouting file is missing')
|
raise HTTPException(
|
||||||
|
status_code=400, detail="Go pester Cal - the scouting file is missing"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post('/calculate/basic', include_in_schema=PRIVATE_IN_SCHEMA)
|
@router.post("/calculate/basic", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||||
async def post_calc_basic(token: str = Depends(oauth2_scheme)):
|
async def post_calc_basic(token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to calculate basic ratings."
|
||||||
detail='You are not authorized to calculate basic ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.warning(f'Re-calculating basic batting ratings\n\n')
|
logging.warning(f"Re-calculating basic batting ratings\n\n")
|
||||||
|
|
||||||
raw_data = get_scouting_dfs()
|
raw_data = get_scouting_dfs()
|
||||||
logging.debug(f'output: {raw_data}')
|
logging.debug(f"output: {raw_data}")
|
||||||
|
|
||||||
def get_raw_speed(df_data):
|
def get_raw_speed(df_data):
|
||||||
speed_raw = df_data['running'] / 20 + df_data['steal_jump']
|
speed_raw = df_data["running"] / 20 + df_data["steal_jump"]
|
||||||
if df_data['steal_auto']:
|
if df_data["steal_auto"]:
|
||||||
speed_raw += 0.5
|
speed_raw += 0.5
|
||||||
return speed_raw
|
return speed_raw
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_speed, axis=1)
|
raw_series = raw_data.apply(get_raw_speed, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Speed'] = round(rank_series * 100)
|
raw_data["Speed"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_steal(df_data):
|
def get_raw_steal(df_data):
|
||||||
return (
|
return ((df_data["steal_high"] / 20) + (df_data["steal_low"] / 20)) * df_data[
|
||||||
((df_data['steal_high'] / 20) + (df_data['steal_low'] / 20)) * df_data['steal_jump']
|
"steal_jump"
|
||||||
)
|
]
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_steal, axis=1)
|
raw_series = raw_data.apply(get_raw_steal, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Steal'] = round(rank_series * 100)
|
raw_data["Steal"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_reaction(df_data):
|
def get_raw_reaction(df_data):
|
||||||
raw_total = 0
|
raw_total = 0
|
||||||
for pos_range in [df_data['Range C'], df_data['Range 1B'], df_data['Range 2B'], df_data['Range 3B'],
|
for pos_range in [
|
||||||
df_data['Range SS'], df_data['Range LF'], df_data['Range CF'], df_data['Range RF']]:
|
df_data["Range C"],
|
||||||
|
df_data["Range 1B"],
|
||||||
|
df_data["Range 2B"],
|
||||||
|
df_data["Range 3B"],
|
||||||
|
df_data["Range SS"],
|
||||||
|
df_data["Range LF"],
|
||||||
|
df_data["Range CF"],
|
||||||
|
df_data["Range RF"],
|
||||||
|
]:
|
||||||
if pd.notna(pos_range):
|
if pd.notna(pos_range):
|
||||||
raw_total += 10 ** (5 - pos_range)
|
raw_total += 10 ** (5 - pos_range)
|
||||||
return raw_total
|
return raw_total
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_reaction, axis=1)
|
raw_series = raw_data.apply(get_raw_reaction, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Reaction'] = round(rank_series * 100)
|
raw_data["Reaction"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_arm(df_data):
|
def get_raw_arm(df_data):
|
||||||
of_arm = None
|
of_arm = None
|
||||||
of_pos = None
|
of_pos = None
|
||||||
if pd.notna(df_data['Range RF']):
|
if pd.notna(df_data["Range RF"]):
|
||||||
of_pos = 'RF'
|
of_pos = "RF"
|
||||||
elif pd.notna(df_data['Range CF']):
|
elif pd.notna(df_data["Range CF"]):
|
||||||
of_pos = 'CF'
|
of_pos = "CF"
|
||||||
elif pd.notna(df_data['Range LF']):
|
elif pd.notna(df_data["Range LF"]):
|
||||||
of_pos = 'LF'
|
of_pos = "LF"
|
||||||
|
|
||||||
if of_pos is not None:
|
if of_pos is not None:
|
||||||
if df_data['Arm OF'] < 0:
|
if df_data["Arm OF"] < 0:
|
||||||
of_raw = df_data['Arm OF'] * -10
|
of_raw = df_data["Arm OF"] * -10
|
||||||
else:
|
else:
|
||||||
of_raw = (5 - df_data['Arm OF'])
|
of_raw = 5 - df_data["Arm OF"]
|
||||||
|
|
||||||
if of_pos == 'RF':
|
if of_pos == "RF":
|
||||||
of_raw = of_raw * 1.5
|
of_raw = of_raw * 1.5
|
||||||
of_raw += ((6 - df_data['Range RF']) * 4)
|
of_raw += (6 - df_data["Range RF"]) * 4
|
||||||
elif of_pos == 'CF':
|
elif of_pos == "CF":
|
||||||
of_raw += ((6 - df_data['Range CF']) * 3)
|
of_raw += (6 - df_data["Range CF"]) * 3
|
||||||
elif of_pos == 'LF':
|
elif of_pos == "LF":
|
||||||
of_raw = of_raw / 2
|
of_raw = of_raw / 2
|
||||||
of_raw += ((6 - df_data['Range LF']) * 2)
|
of_raw += (6 - df_data["Range LF"]) * 2
|
||||||
|
|
||||||
of_arm = of_raw
|
of_arm = of_raw
|
||||||
|
|
||||||
if_arm = None
|
if_arm = None
|
||||||
if pd.notna(df_data['Range 3B']) or pd.notna(df_data['Range 2B']) or pd.notna(df_data['Range 1B']) or \
|
if (
|
||||||
pd.notna(df_data['Range SS']):
|
pd.notna(df_data["Range 3B"])
|
||||||
|
or pd.notna(df_data["Range 2B"])
|
||||||
|
or pd.notna(df_data["Range 1B"])
|
||||||
|
or pd.notna(df_data["Range SS"])
|
||||||
|
):
|
||||||
range_totals = 0
|
range_totals = 0
|
||||||
if pd.notna(df_data['Range 3B']):
|
if pd.notna(df_data["Range 3B"]):
|
||||||
range_totals += ((6 - df_data['Range 3B']) * 5)
|
range_totals += (6 - df_data["Range 3B"]) * 5
|
||||||
if pd.notna(df_data['Range SS']):
|
if pd.notna(df_data["Range SS"]):
|
||||||
range_totals += ((6 - df_data['Range SS']) * 4)
|
range_totals += (6 - df_data["Range SS"]) * 4
|
||||||
if pd.notna(df_data['Range 2B']):
|
if pd.notna(df_data["Range 2B"]):
|
||||||
range_totals += ((6 - df_data['Range 2B']) * 3)
|
range_totals += (6 - df_data["Range 2B"]) * 3
|
||||||
if pd.notna(df_data['Range 1B']):
|
if pd.notna(df_data["Range 1B"]):
|
||||||
range_totals += (6 - df_data['Range 1B'])
|
range_totals += 6 - df_data["Range 1B"]
|
||||||
if_arm = 100 - (50 - range_totals)
|
if_arm = 100 - (50 - range_totals)
|
||||||
|
|
||||||
c_arm = None
|
c_arm = None
|
||||||
if pd.notna(df_data['Arm C']):
|
if pd.notna(df_data["Arm C"]):
|
||||||
if df_data['Arm C'] == -5:
|
if df_data["Arm C"] == -5:
|
||||||
c_arm = 100
|
c_arm = 100
|
||||||
else:
|
else:
|
||||||
temp_arm = 20 + ((10 - df_data['Arm C']) * 3) + (20 - df_data['PB C']) + (20 - df_data['Throw C']) - \
|
temp_arm = (
|
||||||
df_data['Error C']
|
20
|
||||||
|
+ ((10 - df_data["Arm C"]) * 3)
|
||||||
|
+ (20 - df_data["PB C"])
|
||||||
|
+ (20 - df_data["Throw C"])
|
||||||
|
- df_data["Error C"]
|
||||||
|
)
|
||||||
c_arm = min(100, temp_arm)
|
c_arm = min(100, temp_arm)
|
||||||
|
|
||||||
if c_arm is not None:
|
if c_arm is not None:
|
||||||
@ -386,149 +519,192 @@ async def post_calc_basic(token: str = Depends(oauth2_scheme)):
|
|||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_arm, axis=1)
|
raw_series = raw_data.apply(get_raw_arm, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Arm'] = round(rank_series * 100)
|
raw_data["Arm"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_fielding(df_data):
|
def get_raw_fielding(df_data):
|
||||||
if_error, of_error, c_error = 0, 0, 0
|
if_error, of_error, c_error = 0, 0, 0
|
||||||
denom = 0
|
denom = 0
|
||||||
if pd.notna(df_data['Error 3B']) or pd.notna(df_data['Error 2B']) or pd.notna(df_data['Error 1B']) or \
|
if (
|
||||||
pd.notna(df_data['Error SS']):
|
pd.notna(df_data["Error 3B"])
|
||||||
|
or pd.notna(df_data["Error 2B"])
|
||||||
|
or pd.notna(df_data["Error 1B"])
|
||||||
|
or pd.notna(df_data["Error SS"])
|
||||||
|
):
|
||||||
raw_if = 100
|
raw_if = 100
|
||||||
if pd.notna(df_data['Error 3B']):
|
if pd.notna(df_data["Error 3B"]):
|
||||||
raw_if -= (df_data['Error 3B'] * 2)
|
raw_if -= df_data["Error 3B"] * 2
|
||||||
if pd.notna(df_data['Error SS']):
|
if pd.notna(df_data["Error SS"]):
|
||||||
raw_if -= (df_data['Error SS'] * .75)
|
raw_if -= df_data["Error SS"] * 0.75
|
||||||
if pd.notna(df_data['Error 2B']):
|
if pd.notna(df_data["Error 2B"]):
|
||||||
raw_if -= (df_data['Error 2B'] * 1.25)
|
raw_if -= df_data["Error 2B"] * 1.25
|
||||||
if pd.notna(df_data['Error 1B']):
|
if pd.notna(df_data["Error 1B"]):
|
||||||
raw_if -= (df_data['Error 1B'] * 2)
|
raw_if -= df_data["Error 1B"] * 2
|
||||||
|
|
||||||
if_error = max(1, raw_if)
|
if_error = max(1, raw_if)
|
||||||
denom += 1
|
denom += 1
|
||||||
|
|
||||||
if pd.notna(df_data['Error LF']) or pd.notna(df_data['Error CF']) or pd.notna(df_data['Error RF']):
|
if (
|
||||||
|
pd.notna(df_data["Error LF"])
|
||||||
|
or pd.notna(df_data["Error CF"])
|
||||||
|
or pd.notna(df_data["Error RF"])
|
||||||
|
):
|
||||||
raw_of = 100
|
raw_of = 100
|
||||||
if pd.notna(df_data['Error LF']):
|
if pd.notna(df_data["Error LF"]):
|
||||||
raw_of -= (df_data['Error LF'] * 2)
|
raw_of -= df_data["Error LF"] * 2
|
||||||
if pd.notna(df_data['Error CF']):
|
if pd.notna(df_data["Error CF"]):
|
||||||
raw_of -= (df_data['Error CF'] * .75)
|
raw_of -= df_data["Error CF"] * 0.75
|
||||||
if pd.notna(df_data['Error RF']):
|
if pd.notna(df_data["Error RF"]):
|
||||||
raw_of -= (df_data['Error RF'] * 1.25)
|
raw_of -= df_data["Error RF"] * 1.25
|
||||||
|
|
||||||
of_error = max(1, raw_of)
|
of_error = max(1, raw_of)
|
||||||
denom += 1
|
denom += 1
|
||||||
|
|
||||||
if pd.notna(df_data['Error C']):
|
if pd.notna(df_data["Error C"]):
|
||||||
c_error = max(100 - (df_data['Error C'] * 5) - df_data['Throw C'] - df_data['PB C'], 1)
|
c_error = max(
|
||||||
|
100 - (df_data["Error C"] * 5) - df_data["Throw C"] - df_data["PB C"], 1
|
||||||
|
)
|
||||||
denom += 1
|
denom += 1
|
||||||
|
|
||||||
return sum([if_error, of_error, c_error]) / max(denom, 1)
|
return sum([if_error, of_error, c_error]) / max(denom, 1)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_fielding, axis=1)
|
raw_series = raw_data.apply(get_raw_fielding, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Fielding'] = round(rank_series * 100)
|
raw_data["Fielding"] = round(rank_series * 100)
|
||||||
|
|
||||||
rank_series = raw_data['avg_vl'].rank(pct=True)
|
rank_series = raw_data["avg_vl"].rank(pct=True)
|
||||||
raw_data['Contact L'] = round(rank_series * 100)
|
raw_data["Contact L"] = round(rank_series * 100)
|
||||||
|
|
||||||
rank_series = raw_data['avg_vr'].rank(pct=True)
|
rank_series = raw_data["avg_vr"].rank(pct=True)
|
||||||
raw_data['Contact R'] = round(rank_series * 100)
|
raw_data["Contact R"] = round(rank_series * 100)
|
||||||
|
|
||||||
rank_series = raw_data['slg_vl'].rank(pct=True)
|
rank_series = raw_data["slg_vl"].rank(pct=True)
|
||||||
raw_data['Power L'] = round(rank_series * 100)
|
raw_data["Power L"] = round(rank_series * 100)
|
||||||
|
|
||||||
rank_series = raw_data['slg_vr'].rank(pct=True)
|
rank_series = raw_data["slg_vr"].rank(pct=True)
|
||||||
raw_data['Power R'] = round(rank_series * 100)
|
raw_data["Power R"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_vision(df_data):
|
def get_raw_vision(df_data):
|
||||||
return (
|
return (
|
||||||
((((df_data['obp_vr'] * 0.67) + (df_data['obp_vl'] * 0.33)) -
|
(
|
||||||
((df_data['avg_vr'] * 0.67) + (df_data['avg_vl'] * 0.33))) * 5) -
|
((df_data["obp_vr"] * 0.67) + (df_data["obp_vl"] * 0.33))
|
||||||
(((df_data['strikeout_vl'] * 0.33) + (df_data['strikeout_vr'] * 0.67)) / 208)
|
- ((df_data["avg_vr"] * 0.67) + (df_data["avg_vl"] * 0.33))
|
||||||
|
)
|
||||||
|
* 5
|
||||||
|
) - (
|
||||||
|
((df_data["strikeout_vl"] * 0.33) + (df_data["strikeout_vr"] * 0.67)) / 208
|
||||||
)
|
)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_vision, axis=1)
|
raw_series = raw_data.apply(get_raw_vision, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Vision'] = round(rank_series * 100)
|
raw_data["Vision"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_rating(df_data):
|
def get_raw_rating(df_data):
|
||||||
return (
|
return (
|
||||||
((df_data['Reaction'] + df_data['Arm'] + df_data['Fielding']) * 2) +
|
((df_data["Reaction"] + df_data["Arm"] + df_data["Fielding"]) * 2)
|
||||||
(df_data['Speed'] + df_data['Steal']) +
|
+ (df_data["Speed"] + df_data["Steal"])
|
||||||
((((df_data['Contact R'] + df_data['Power R']) * 0.67) +
|
+ (
|
||||||
((df_data['Contact L'] + df_data['Power L']) * 0.33) + df_data['Vision'] ) * 6
|
(
|
||||||
)
|
((df_data["Contact R"] + df_data["Power R"]) * 0.67)
|
||||||
|
+ ((df_data["Contact L"] + df_data["Power L"]) * 0.33)
|
||||||
|
+ df_data["Vision"]
|
||||||
|
)
|
||||||
|
* 6
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_rating, axis=1)
|
raw_series = raw_data.apply(get_raw_rating, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Rating'] = round(rank_series * 100)
|
raw_data["Rating"] = round(rank_series * 100)
|
||||||
|
|
||||||
output = raw_data[[
|
output = raw_data[
|
||||||
'player_id', 'player_name', 'Rating', 'Contact R', 'Contact L', 'Power R', 'Power L', 'Vision', 'Speed',
|
[
|
||||||
'Steal', 'Reaction', 'Arm', 'Fielding', 'hand', 'cardset_name'
|
"player_id",
|
||||||
]]
|
"player_name",
|
||||||
|
"Rating",
|
||||||
|
"Contact R",
|
||||||
|
"Contact L",
|
||||||
|
"Power R",
|
||||||
|
"Power L",
|
||||||
|
"Vision",
|
||||||
|
"Speed",
|
||||||
|
"Steal",
|
||||||
|
"Reaction",
|
||||||
|
"Arm",
|
||||||
|
"Fielding",
|
||||||
|
"hand",
|
||||||
|
"cardset_name",
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
csv_file = pd.DataFrame(output).to_csv(index=False)
|
csv_file = pd.DataFrame(output).to_csv(index=False)
|
||||||
with open(BASIC_FILE, 'w') as file:
|
with open(BASIC_FILE, "w") as file:
|
||||||
file.write(csv_file)
|
file.write(csv_file)
|
||||||
|
|
||||||
return Response(content=csv_file, media_type='text/csv')
|
return Response(content=csv_file, media_type="text/csv")
|
||||||
|
|
||||||
|
|
||||||
@router.get('/{ratings_id}')
|
@router.get("/{ratings_id}")
|
||||||
async def get_one_rating(ratings_id: int, token: str = Depends(oauth2_scheme)):
|
async def get_one_rating(ratings_id: int, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to pull card ratings."
|
||||||
detail='You are not authorized to pull card ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
this_rating = BattingCardRatings.get_or_none(BattingCardRatings.id == ratings_id)
|
this_rating = BattingCardRatings.get_or_none(BattingCardRatings.id == ratings_id)
|
||||||
if this_rating is None:
|
if this_rating is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'BattingCardRating id {ratings_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"BattingCardRating id {ratings_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
r_data = model_to_dict(this_rating)
|
r_data = model_to_dict(this_rating)
|
||||||
db.close()
|
db.close()
|
||||||
return r_data
|
return r_data
|
||||||
|
|
||||||
|
|
||||||
@router.get('/player/{player_id}')
|
@router.get("/player/{player_id}")
|
||||||
async def get_player_ratings(
|
async def get_player_ratings(
|
||||||
player_id: int, variant: list = Query(default=None), short_output: bool = False,
|
player_id: int,
|
||||||
token: str = Depends(oauth2_scheme)):
|
variant: list = Query(default=None),
|
||||||
|
short_output: bool = False,
|
||||||
|
token: str = Depends(oauth2_scheme),
|
||||||
|
):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to pull card ratings."
|
||||||
detail='You are not authorized to pull card ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
all_cards = BattingCard.select().where(BattingCard.player_id == player_id).order_by(BattingCard.variant)
|
all_cards = (
|
||||||
|
BattingCard.select()
|
||||||
|
.where(BattingCard.player_id == player_id)
|
||||||
|
.order_by(BattingCard.variant)
|
||||||
|
)
|
||||||
if variant is not None:
|
if variant is not None:
|
||||||
all_cards = all_cards.where(BattingCard.variant << variant)
|
all_cards = all_cards.where(BattingCard.variant << variant)
|
||||||
|
|
||||||
all_ratings = BattingCardRatings.select().where(BattingCardRatings.battingcard << all_cards)
|
all_ratings = BattingCardRatings.select().where(
|
||||||
|
BattingCardRatings.battingcard << all_cards
|
||||||
|
)
|
||||||
|
|
||||||
return_val = {'count': all_ratings.count(), 'ratings': [
|
return_val = {
|
||||||
model_to_dict(x, recurse=not short_output) for x in all_ratings
|
"count": all_ratings.count(),
|
||||||
]}
|
"ratings": [model_to_dict(x, recurse=not short_output) for x in all_ratings],
|
||||||
|
}
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
|
|
||||||
@router.put('', include_in_schema=PRIVATE_IN_SCHEMA)
|
@router.put("", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||||
async def put_ratings(ratings: RatingsList, token: str = Depends(oauth2_scheme)):
|
async def put_ratings(ratings: RatingsList, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to post card ratings."
|
||||||
detail='You are not authorized to post card ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
new_ratings = []
|
new_ratings = []
|
||||||
@ -536,44 +712,50 @@ async def put_ratings(ratings: RatingsList, token: str = Depends(oauth2_scheme))
|
|||||||
for x in ratings.ratings:
|
for x in ratings.ratings:
|
||||||
try:
|
try:
|
||||||
BattingCardRatings.get(
|
BattingCardRatings.get(
|
||||||
(BattingCardRatings.battingcard_id == x.battingcard_id) & (BattingCardRatings.vs_hand == x.vs_hand)
|
(BattingCardRatings.battingcard_id == x.battingcard_id)
|
||||||
|
& (BattingCardRatings.vs_hand == x.vs_hand)
|
||||||
|
)
|
||||||
|
updates += (
|
||||||
|
BattingCardRatings.update(x.dict())
|
||||||
|
.where(
|
||||||
|
(BattingCardRatings.battingcard_id == x.battingcard_id)
|
||||||
|
& (BattingCardRatings.vs_hand == x.vs_hand)
|
||||||
|
)
|
||||||
|
.execute()
|
||||||
)
|
)
|
||||||
updates += BattingCardRatings.update(x.dict()).where(
|
|
||||||
(BattingCardRatings.battingcard_id == x.battingcard_id) & (BattingCardRatings.vs_hand == x.vs_hand)
|
|
||||||
).execute()
|
|
||||||
except BattingCardRatings.DoesNotExist:
|
except BattingCardRatings.DoesNotExist:
|
||||||
new_ratings.append(x.dict())
|
new_ratings.append(x.dict())
|
||||||
|
|
||||||
with db.atomic():
|
with db.atomic():
|
||||||
for batch in chunked(new_ratings, 30):
|
# Use PostgreSQL-compatible upsert helper
|
||||||
BattingCardRatings.insert_many(batch).on_conflict_replace().execute()
|
upsert_batting_card_ratings(new_ratings, batch_size=30)
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return f'Updated ratings: {updates}; new ratings: {len(new_ratings)}'
|
return f"Updated ratings: {updates}; new ratings: {len(new_ratings)}"
|
||||||
|
|
||||||
|
|
||||||
@router.delete('/{ratings_id}', include_in_schema=PRIVATE_IN_SCHEMA)
|
@router.delete("/{ratings_id}", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||||
async def delete_rating(
|
async def delete_rating(ratings_id: int, token: str = Depends(oauth2_scheme)):
|
||||||
ratings_id: int, token: str = Depends(oauth2_scheme)):
|
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to post card ratings."
|
||||||
detail='You are not authorized to post card ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
this_rating = BattingCardRatings.get_or_none(BattingCardRatings.id == ratings_id)
|
this_rating = BattingCardRatings.get_or_none(BattingCardRatings.id == ratings_id)
|
||||||
if this_rating is None:
|
if this_rating is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'BattingCardRating id {ratings_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"BattingCardRating id {ratings_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
count = this_rating.delete_instance()
|
count = this_rating.delete_instance()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
if count == 1:
|
if count == 1:
|
||||||
return f'Rating {this_rating} has been deleted'
|
return f"Rating {this_rating} has been deleted"
|
||||||
else:
|
else:
|
||||||
raise HTTPException(status_code=500, detail=f'Rating {this_rating} could not be deleted')
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Rating {this_rating} could not be deleted"
|
||||||
|
)
|
||||||
|
|||||||
@ -6,18 +6,16 @@ import logging
|
|||||||
import pydantic
|
import pydantic
|
||||||
|
|
||||||
from ..db_engine import db, BattingCard, model_to_dict, fn, chunked, Player, MlbPlayer
|
from ..db_engine import db, BattingCard, model_to_dict, fn, chunked, Player, MlbPlayer
|
||||||
|
from ..db_helpers import upsert_batting_cards
|
||||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
filename=LOG_DATA['filename'],
|
filename=LOG_DATA["filename"],
|
||||||
format=LOG_DATA['format'],
|
format=LOG_DATA["format"],
|
||||||
level=LOG_DATA['log_level']
|
level=LOG_DATA["log_level"],
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(prefix="/api/v2/battingcards", tags=["battingcards"])
|
||||||
prefix='/api/v2/battingcards',
|
|
||||||
tags=['battingcards']
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BattingCardModel(pydantic.BaseModel):
|
class BattingCardModel(pydantic.BaseModel):
|
||||||
@ -27,22 +25,26 @@ class BattingCardModel(pydantic.BaseModel):
|
|||||||
steal_high: int = 20
|
steal_high: int = 20
|
||||||
steal_auto: bool = False
|
steal_auto: bool = False
|
||||||
steal_jump: float = 0
|
steal_jump: float = 0
|
||||||
bunting: str = 'C'
|
bunting: str = "C"
|
||||||
hit_and_run: str = 'C'
|
hit_and_run: str = "C"
|
||||||
running: int = 10
|
running: int = 10
|
||||||
offense_col: int = None
|
offense_col: int = None
|
||||||
hand: Literal['R', 'L', 'S'] = 'R'
|
hand: Literal["R", "L", "S"] = "R"
|
||||||
|
|
||||||
|
|
||||||
class BattingCardList(pydantic.BaseModel):
|
class BattingCardList(pydantic.BaseModel):
|
||||||
cards: List[BattingCardModel]
|
cards: List[BattingCardModel]
|
||||||
|
|
||||||
|
|
||||||
@router.get('')
|
@router.get("")
|
||||||
async def get_batting_cards(
|
async def get_batting_cards(
|
||||||
player_id: list = Query(default=None), player_name: list = Query(default=None),
|
player_id: list = Query(default=None),
|
||||||
cardset_id: list = Query(default=None), short_output: bool = False, limit: Optional[int] = None,
|
player_name: list = Query(default=None),
|
||||||
variant: list = Query(default=None)):
|
cardset_id: list = Query(default=None),
|
||||||
|
short_output: bool = False,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
variant: list = Query(default=None),
|
||||||
|
):
|
||||||
all_cards = BattingCard.select()
|
all_cards = BattingCard.select()
|
||||||
if player_id is not None:
|
if player_id is not None:
|
||||||
all_cards = all_cards.where(BattingCard.player_id << player_id)
|
all_cards = all_cards.where(BattingCard.player_id << player_id)
|
||||||
@ -59,102 +61,134 @@ async def get_batting_cards(
|
|||||||
if limit is not None:
|
if limit is not None:
|
||||||
all_cards = all_cards.limit(limit)
|
all_cards = all_cards.limit(limit)
|
||||||
|
|
||||||
return_val = {'count': all_cards.count(), 'cards': [
|
return_val = {
|
||||||
model_to_dict(x, recurse=not short_output) for x in all_cards
|
"count": all_cards.count(),
|
||||||
]}
|
"cards": [model_to_dict(x, recurse=not short_output) for x in all_cards],
|
||||||
|
}
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
|
|
||||||
@router.get('/{card_id}')
|
@router.get("/{card_id}")
|
||||||
async def get_one_card(card_id: int):
|
async def get_one_card(card_id: int):
|
||||||
this_card = BattingCard.get_or_none(BattingCard.id == card_id)
|
this_card = BattingCard.get_or_none(BattingCard.id == card_id)
|
||||||
if this_card is None:
|
if this_card is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'BattingCard id {card_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"BattingCard id {card_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
r_card = model_to_dict(this_card)
|
r_card = model_to_dict(this_card)
|
||||||
db.close()
|
db.close()
|
||||||
return r_card
|
return r_card
|
||||||
|
|
||||||
|
|
||||||
@router.get('/player/{player_id}')
|
@router.get("/player/{player_id}")
|
||||||
async def get_player_cards(player_id: int, variant: list = Query(default=None), short_output: bool = False):
|
async def get_player_cards(
|
||||||
all_cards = BattingCard.select().where(BattingCard.player_id == player_id).order_by(BattingCard.variant)
|
player_id: int, variant: list = Query(default=None), short_output: bool = False
|
||||||
|
):
|
||||||
|
all_cards = (
|
||||||
|
BattingCard.select()
|
||||||
|
.where(BattingCard.player_id == player_id)
|
||||||
|
.order_by(BattingCard.variant)
|
||||||
|
)
|
||||||
if variant is not None:
|
if variant is not None:
|
||||||
all_cards = all_cards.where(BattingCard.variant << variant)
|
all_cards = all_cards.where(BattingCard.variant << variant)
|
||||||
|
|
||||||
return_val = {'count': all_cards.count(), 'cards': [
|
return_val = {
|
||||||
model_to_dict(x, recurse=not short_output) for x in all_cards
|
"count": all_cards.count(),
|
||||||
]}
|
"cards": [model_to_dict(x, recurse=not short_output) for x in all_cards],
|
||||||
|
}
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
|
|
||||||
@router.put('')
|
@router.put("")
|
||||||
async def put_cards(cards: BattingCardList, token: str = Depends(oauth2_scheme)):
|
async def put_cards(cards: BattingCardList, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to post batting cards. This event has been logged.'
|
detail="You are not authorized to post batting cards. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
new_cards = []
|
new_cards = []
|
||||||
updates = 0
|
updates = 0
|
||||||
logging.info(f'here!')
|
logging.info(f"here!")
|
||||||
|
|
||||||
for x in cards.cards:
|
for x in cards.cards:
|
||||||
try:
|
try:
|
||||||
old = BattingCard.get(
|
old = BattingCard.get(
|
||||||
(BattingCard.player_id == x.player_id) & (BattingCard.variant == x.variant)
|
(BattingCard.player_id == x.player_id)
|
||||||
|
& (BattingCard.variant == x.variant)
|
||||||
)
|
)
|
||||||
|
|
||||||
if x.offense_col is None:
|
if x.offense_col is None:
|
||||||
x.offense_col = old.offense_col
|
x.offense_col = old.offense_col
|
||||||
updates += BattingCard.update(x.dict()).where(
|
updates += (
|
||||||
(BattingCard.player_id == x.player_id) & (BattingCard.variant == x.variant)
|
BattingCard.update(x.dict())
|
||||||
).execute()
|
.where(
|
||||||
|
(BattingCard.player_id == x.player_id)
|
||||||
|
& (BattingCard.variant == x.variant)
|
||||||
|
)
|
||||||
|
.execute()
|
||||||
|
)
|
||||||
except BattingCard.DoesNotExist:
|
except BattingCard.DoesNotExist:
|
||||||
if x.offense_col is None:
|
if x.offense_col is None:
|
||||||
this_player = Player.get_or_none(Player.player_id == x.player_id)
|
this_player = Player.get_or_none(Player.player_id == x.player_id)
|
||||||
mlb_player = MlbPlayer.get_or_none(MlbPlayer.key_bbref == this_player.bbref_id)
|
mlb_player = MlbPlayer.get_or_none(
|
||||||
|
MlbPlayer.key_bbref == this_player.bbref_id
|
||||||
|
)
|
||||||
if mlb_player is not None:
|
if mlb_player is not None:
|
||||||
logging.info(f'setting offense_col to {mlb_player.offense_col} for {this_player.p_name}')
|
logging.info(
|
||||||
|
f"setting offense_col to {mlb_player.offense_col} for {this_player.p_name}"
|
||||||
|
)
|
||||||
x.offense_col = mlb_player.offense_col
|
x.offense_col = mlb_player.offense_col
|
||||||
else:
|
else:
|
||||||
logging.info(f'randomly setting offense_col for {this_player.p_name}')
|
logging.info(
|
||||||
|
f"randomly setting offense_col for {this_player.p_name}"
|
||||||
|
)
|
||||||
x.offense_col = random.randint(1, 3)
|
x.offense_col = random.randint(1, 3)
|
||||||
logging.debug(f'x.dict(): {x.dict()}')
|
logging.debug(f"x.dict(): {x.dict()}")
|
||||||
new_cards.append(x.dict())
|
new_cards.append(x.dict())
|
||||||
|
|
||||||
with db.atomic():
|
with db.atomic():
|
||||||
for batch in chunked(new_cards, 30):
|
# Use PostgreSQL-compatible upsert helper
|
||||||
BattingCard.insert_many(batch).on_conflict_replace().execute()
|
upsert_batting_cards(new_cards, batch_size=30)
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return f'Updated cards: {updates}; new cards: {len(new_cards)}'
|
return f"Updated cards: {updates}; new cards: {len(new_cards)}"
|
||||||
|
|
||||||
|
|
||||||
@router.patch('/{card_id}')
|
@router.patch("/{card_id}")
|
||||||
async def patch_card(
|
async def patch_card(
|
||||||
card_id: int, steal_low: Optional[int] = None, steal_high: Optional[int] = None,
|
card_id: int,
|
||||||
steal_auto: Optional[bool] = None, steal_jump: Optional[float] = None, bunting: Optional[str] = None,
|
steal_low: Optional[int] = None,
|
||||||
hit_and_run: Optional[str] = None, running: Optional[int] = None, offense_col: Optional[int] = None,
|
steal_high: Optional[int] = None,
|
||||||
hand: Literal['R', 'L', 'S'] = None, token: str = Depends(oauth2_scheme)):
|
steal_auto: Optional[bool] = None,
|
||||||
|
steal_jump: Optional[float] = None,
|
||||||
|
bunting: Optional[str] = None,
|
||||||
|
hit_and_run: Optional[str] = None,
|
||||||
|
running: Optional[int] = None,
|
||||||
|
offense_col: Optional[int] = None,
|
||||||
|
hand: Literal["R", "L", "S"] = None,
|
||||||
|
token: str = Depends(oauth2_scheme),
|
||||||
|
):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to patch batting cards. This event has been logged.'
|
detail="You are not authorized to patch batting cards. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
this_card = BattingCard.get_or_none(BattingCard.id == card_id)
|
this_card = BattingCard.get_or_none(BattingCard.id == card_id)
|
||||||
if this_card is None:
|
if this_card is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'BattingCard id {card_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"BattingCard id {card_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
if steal_low is not None:
|
if steal_low is not None:
|
||||||
this_card.steal_low = steal_low
|
this_card.steal_low = steal_low
|
||||||
@ -183,45 +217,49 @@ async def patch_card(
|
|||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=418,
|
status_code=418,
|
||||||
detail='Well slap my ass and call me a teapot; I could not save that card'
|
detail="Well slap my ass and call me a teapot; I could not save that card",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.delete('/{card_id}')
|
@router.delete("/{card_id}")
|
||||||
async def delete_card(card_id: int, token: str = Depends(oauth2_scheme)):
|
async def delete_card(card_id: int, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to delete batting cards. This event has been logged.'
|
detail="You are not authorized to delete batting cards. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
this_card = BattingCard.get_or_none(BattingCard.id == card_id)
|
this_card = BattingCard.get_or_none(BattingCard.id == card_id)
|
||||||
if this_card is None:
|
if this_card is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'BattingCard id {card_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"BattingCard id {card_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
count = this_card.delete_instance()
|
count = this_card.delete_instance()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
if count == 1:
|
if count == 1:
|
||||||
return f'Card {this_card} has been deleted'
|
return f"Card {this_card} has been deleted"
|
||||||
else:
|
else:
|
||||||
raise HTTPException(status_code=500, detail=f'Card {this_card} could not be deleted')
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Card {this_card} could not be deleted"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.delete('')
|
@router.delete("")
|
||||||
async def delete_all_cards(token: str = Depends(oauth2_scheme)):
|
async def delete_all_cards(token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to delete batting cards. This event has been logged.'
|
detail="You are not authorized to delete batting cards. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
d_query = BattingCard.delete()
|
d_query = BattingCard.delete()
|
||||||
d_query.execute()
|
d_query.execute()
|
||||||
|
|
||||||
return f'Deleted {d_query.count()} batting cards'
|
return f"Deleted {d_query.count()} batting cards"
|
||||||
|
|||||||
@ -5,24 +5,22 @@ import pydantic
|
|||||||
from pydantic import root_validator
|
from pydantic import root_validator
|
||||||
|
|
||||||
from ..db_engine import db, CardPosition, model_to_dict, chunked, Player, fn
|
from ..db_engine import db, CardPosition, model_to_dict, chunked, Player, fn
|
||||||
|
from ..db_helpers import upsert_card_positions
|
||||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
filename=LOG_DATA['filename'],
|
filename=LOG_DATA["filename"],
|
||||||
format=LOG_DATA['format'],
|
format=LOG_DATA["format"],
|
||||||
level=LOG_DATA['log_level']
|
level=LOG_DATA["log_level"],
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(prefix="/api/v2/cardpositions", tags=["cardpositions"])
|
||||||
prefix='/api/v2/cardpositions',
|
|
||||||
tags=['cardpositions']
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CardPositionModel(pydantic.BaseModel):
|
class CardPositionModel(pydantic.BaseModel):
|
||||||
player_id: int
|
player_id: int
|
||||||
variant: int = 0
|
variant: int = 0
|
||||||
position: Literal['P', 'C', '1B', '2B', '3B', 'SS', 'LF', 'CF', 'RF', 'DH']
|
position: Literal["P", "C", "1B", "2B", "3B", "SS", "LF", "CF", "RF", "DH"]
|
||||||
innings: int = 1
|
innings: int = 1
|
||||||
range: int = 5
|
range: int = 5
|
||||||
error: int = 0
|
error: int = 0
|
||||||
@ -32,10 +30,12 @@ class CardPositionModel(pydantic.BaseModel):
|
|||||||
|
|
||||||
@root_validator(skip_on_failure=True)
|
@root_validator(skip_on_failure=True)
|
||||||
def position_validator(cls, values):
|
def position_validator(cls, values):
|
||||||
if values['position'] in ['C', 'LF', 'CF', 'RF'] and values['arm'] is None:
|
if values["position"] in ["C", "LF", "CF", "RF"] and values["arm"] is None:
|
||||||
raise ValueError(f'{values["position"]} must have an arm rating')
|
raise ValueError(f"{values['position']} must have an arm rating")
|
||||||
if values['position'] == 'C' and (values['pb'] is None or values['overthrow'] is None):
|
if values["position"] == "C" and (
|
||||||
raise ValueError('Catchers must have a pb and overthrow rating')
|
values["pb"] is None or values["overthrow"] is None
|
||||||
|
):
|
||||||
|
raise ValueError("Catchers must have a pb and overthrow rating")
|
||||||
return values
|
return values
|
||||||
|
|
||||||
|
|
||||||
@ -43,14 +43,24 @@ class PositionList(pydantic.BaseModel):
|
|||||||
positions: List[CardPositionModel]
|
positions: List[CardPositionModel]
|
||||||
|
|
||||||
|
|
||||||
@router.get('')
|
@router.get("")
|
||||||
async def get_card_positions(
|
async def get_card_positions(
|
||||||
player_id: list = Query(default=None), position: list = Query(default=None), min_innings: Optional[int] = 1,
|
player_id: list = Query(default=None),
|
||||||
r: list = Query(default=None), e: list = Query(default=None), arm: list = Query(default=None),
|
position: list = Query(default=None),
|
||||||
pb: list = Query(default=None), overthrow: list = Query(default=None), cardset_id: list = Query(default=None),
|
min_innings: Optional[int] = 1,
|
||||||
short_output: Optional[bool] = False, sort: Optional[str] = 'innings-desc'):
|
r: list = Query(default=None),
|
||||||
all_pos = CardPosition.select().where(CardPosition.innings >= min_innings).order_by(
|
e: list = Query(default=None),
|
||||||
CardPosition.player, CardPosition.position, CardPosition.variant
|
arm: list = Query(default=None),
|
||||||
|
pb: list = Query(default=None),
|
||||||
|
overthrow: list = Query(default=None),
|
||||||
|
cardset_id: list = Query(default=None),
|
||||||
|
short_output: Optional[bool] = False,
|
||||||
|
sort: Optional[str] = "innings-desc",
|
||||||
|
):
|
||||||
|
all_pos = (
|
||||||
|
CardPosition.select()
|
||||||
|
.where(CardPosition.innings >= min_innings)
|
||||||
|
.order_by(CardPosition.player, CardPosition.position, CardPosition.variant)
|
||||||
)
|
)
|
||||||
|
|
||||||
if player_id is not None:
|
if player_id is not None:
|
||||||
@ -72,42 +82,45 @@ async def get_card_positions(
|
|||||||
all_players = Player.select().where(Player.cardset_id << cardset_id)
|
all_players = Player.select().where(Player.cardset_id << cardset_id)
|
||||||
all_pos = all_pos.where(CardPosition.player << all_players)
|
all_pos = all_pos.where(CardPosition.player << all_players)
|
||||||
|
|
||||||
if sort == 'innings-desc':
|
if sort == "innings-desc":
|
||||||
all_pos = all_pos.order_by(CardPosition.innings.desc())
|
all_pos = all_pos.order_by(CardPosition.innings.desc())
|
||||||
elif sort == 'innings-asc':
|
elif sort == "innings-asc":
|
||||||
all_pos = all_pos.order_by(CardPosition.innings)
|
all_pos = all_pos.order_by(CardPosition.innings)
|
||||||
elif sort == 'range-desc':
|
elif sort == "range-desc":
|
||||||
all_pos = all_pos.order_by(CardPosition.range.desc())
|
all_pos = all_pos.order_by(CardPosition.range.desc())
|
||||||
elif sort == 'range-asc':
|
elif sort == "range-asc":
|
||||||
all_pos = all_pos.order_by(CardPosition.range)
|
all_pos = all_pos.order_by(CardPosition.range)
|
||||||
|
|
||||||
return_val = {'count': all_pos.count(), 'positions': [
|
return_val = {
|
||||||
model_to_dict(x, recurse=not short_output) for x in all_pos
|
"count": all_pos.count(),
|
||||||
]}
|
"positions": [model_to_dict(x, recurse=not short_output) for x in all_pos],
|
||||||
|
}
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
|
|
||||||
@router.get('/{position_id}')
|
@router.get("/{position_id}")
|
||||||
async def get_one_position(position_id: int):
|
async def get_one_position(position_id: int):
|
||||||
this_pos = CardPosition.get_or_none(CardPosition.id == position_id)
|
this_pos = CardPosition.get_or_none(CardPosition.id == position_id)
|
||||||
if this_pos is None:
|
if this_pos is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'CardPosition id {position_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"CardPosition id {position_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
r_data = model_to_dict(this_pos)
|
r_data = model_to_dict(this_pos)
|
||||||
db.close()
|
db.close()
|
||||||
return r_data
|
return r_data
|
||||||
|
|
||||||
|
|
||||||
@router.put('')
|
@router.put("")
|
||||||
async def put_positions(positions: PositionList, token: str = Depends(oauth2_scheme)):
|
async def put_positions(positions: PositionList, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to post card positions. This event has been logged.'
|
detail="You are not authorized to post card positions. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
new_cards = []
|
new_cards = []
|
||||||
@ -116,43 +129,53 @@ async def put_positions(positions: PositionList, token: str = Depends(oauth2_sch
|
|||||||
for x in positions.positions:
|
for x in positions.positions:
|
||||||
try:
|
try:
|
||||||
CardPosition.get(
|
CardPosition.get(
|
||||||
(CardPosition.player_id == x.player_id) & (CardPosition.variant == x.variant) &
|
(CardPosition.player_id == x.player_id)
|
||||||
(CardPosition.position == x.position)
|
& (CardPosition.variant == x.variant)
|
||||||
|
& (CardPosition.position == x.position)
|
||||||
|
)
|
||||||
|
updates += (
|
||||||
|
CardPosition.update(x.dict())
|
||||||
|
.where(
|
||||||
|
(CardPosition.player_id == x.player_id)
|
||||||
|
& (CardPosition.variant == x.variant)
|
||||||
|
& (CardPosition.position == x.position)
|
||||||
|
)
|
||||||
|
.execute()
|
||||||
)
|
)
|
||||||
updates += CardPosition.update(x.dict()).where(
|
|
||||||
(CardPosition.player_id == x.player_id) & (CardPosition.variant == x.variant) &
|
|
||||||
(CardPosition.position == x.position)
|
|
||||||
).execute()
|
|
||||||
except CardPosition.DoesNotExist:
|
except CardPosition.DoesNotExist:
|
||||||
new_cards.append(x.dict())
|
new_cards.append(x.dict())
|
||||||
|
|
||||||
with db.atomic():
|
with db.atomic():
|
||||||
for batch in chunked(new_cards, 30):
|
# Use PostgreSQL-compatible upsert helper
|
||||||
CardPosition.insert_many(batch).on_conflict_replace().execute()
|
upsert_card_positions(new_cards, batch_size=30)
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return f'Updated cards: {updates}; new cards: {len(new_cards)}'
|
return f"Updated cards: {updates}; new cards: {len(new_cards)}"
|
||||||
|
|
||||||
|
|
||||||
@router.delete('/{position_id}')
|
@router.delete("/{position_id}")
|
||||||
async def delete_position(position_id: int, token: str = Depends(oauth2_scheme)):
|
async def delete_position(position_id: int, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to delete card positions. This event has been logged.'
|
detail="You are not authorized to delete card positions. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
this_pos = CardPosition.get_or_none(CardPosition.id == position_id)
|
this_pos = CardPosition.get_or_none(CardPosition.id == position_id)
|
||||||
if this_pos is None:
|
if this_pos is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'CardPosition id {position_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"CardPosition id {position_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
count = this_pos.delete_instance()
|
count = this_pos.delete_instance()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
if count == 1:
|
if count == 1:
|
||||||
return f'Card Position {this_pos} has been deleted'
|
return f"Card Position {this_pos} has been deleted"
|
||||||
else:
|
else:
|
||||||
raise HTTPException(status_code=500, detail=f'Card Position {this_pos} could not be deleted')
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Card Position {this_pos} could not be deleted"
|
||||||
|
)
|
||||||
|
|||||||
@ -5,19 +5,28 @@ import logging
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pydantic
|
import pydantic
|
||||||
|
|
||||||
from ..db_engine import db, Decision, StratGame, Player, model_to_dict, chunked, fn, Team, Card, StratPlay
|
from ..db_engine import (
|
||||||
|
db,
|
||||||
|
Decision,
|
||||||
|
StratGame,
|
||||||
|
Player,
|
||||||
|
model_to_dict,
|
||||||
|
chunked,
|
||||||
|
fn,
|
||||||
|
Team,
|
||||||
|
Card,
|
||||||
|
StratPlay,
|
||||||
|
)
|
||||||
|
from ..db_helpers import upsert_decisions
|
||||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
filename=LOG_DATA['filename'],
|
filename=LOG_DATA["filename"],
|
||||||
format=LOG_DATA['format'],
|
format=LOG_DATA["format"],
|
||||||
level=LOG_DATA['log_level']
|
level=LOG_DATA["log_level"],
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(prefix="/api/v2/decisions", tags=["decisions"])
|
||||||
prefix='/api/v2/decisions',
|
|
||||||
tags=['decisions']
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DecisionModel(pydantic.BaseModel):
|
class DecisionModel(pydantic.BaseModel):
|
||||||
@ -42,14 +51,26 @@ class DecisionList(pydantic.BaseModel):
|
|||||||
decisions: List[DecisionModel]
|
decisions: List[DecisionModel]
|
||||||
|
|
||||||
|
|
||||||
@router.get('')
|
@router.get("")
|
||||||
async def get_decisions(
|
async def get_decisions(
|
||||||
season: list = Query(default=None), week: list = Query(default=None), team_id: list = Query(default=None),
|
season: list = Query(default=None),
|
||||||
win: Optional[int] = None, loss: Optional[int] = None, hold: Optional[int] = None, save: Optional[int] = None,
|
week: list = Query(default=None),
|
||||||
b_save: Optional[int] = None, irunners: list = Query(default=None), irunners_scored: list = Query(default=None),
|
team_id: list = Query(default=None),
|
||||||
game_type: list = Query(default=None),
|
win: Optional[int] = None,
|
||||||
game_id: list = Query(default=None), player_id: list = Query(default=None), csv: Optional[bool] = False,
|
loss: Optional[int] = None,
|
||||||
limit: Optional[int] = 100, page_num: Optional[int] = 1, short_output: Optional[bool] = False):
|
hold: Optional[int] = None,
|
||||||
|
save: Optional[int] = None,
|
||||||
|
b_save: Optional[int] = None,
|
||||||
|
irunners: list = Query(default=None),
|
||||||
|
irunners_scored: list = Query(default=None),
|
||||||
|
game_type: list = Query(default=None),
|
||||||
|
game_id: list = Query(default=None),
|
||||||
|
player_id: list = Query(default=None),
|
||||||
|
csv: Optional[bool] = False,
|
||||||
|
limit: Optional[int] = 100,
|
||||||
|
page_num: Optional[int] = 1,
|
||||||
|
short_output: Optional[bool] = False,
|
||||||
|
):
|
||||||
all_dec = Decision.select().order_by(-Decision.season, -Decision.week, -Decision.id)
|
all_dec = Decision.select().order_by(-Decision.season, -Decision.week, -Decision.id)
|
||||||
|
|
||||||
if season is not None:
|
if season is not None:
|
||||||
@ -88,40 +109,50 @@ async def get_decisions(
|
|||||||
all_dec = all_dec.paginate(page_num, limit)
|
all_dec = all_dec.paginate(page_num, limit)
|
||||||
|
|
||||||
return_dec = {
|
return_dec = {
|
||||||
'count': all_dec.count(),
|
"count": all_dec.count(),
|
||||||
'decisions': [model_to_dict(x, recurse=not short_output) for x in all_dec]
|
"decisions": [model_to_dict(x, recurse=not short_output) for x in all_dec],
|
||||||
}
|
}
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
if csv:
|
if csv:
|
||||||
return_vals = return_dec['decisions']
|
return_vals = return_dec["decisions"]
|
||||||
if len(return_vals) == 0:
|
if len(return_vals) == 0:
|
||||||
return Response(content=pd.DataFrame().to_csv(index=False), media_type='text/csv')
|
return Response(
|
||||||
|
content=pd.DataFrame().to_csv(index=False), media_type="text/csv"
|
||||||
|
)
|
||||||
|
|
||||||
for x in return_vals:
|
for x in return_vals:
|
||||||
x['game_id'] = x['game']['id']
|
x["game_id"] = x["game"]["id"]
|
||||||
x['game_type'] = x['game']['game_type']
|
x["game_type"] = x["game"]["game_type"]
|
||||||
x['player_id'] = x['pitcher']['player_id']
|
x["player_id"] = x["pitcher"]["player_id"]
|
||||||
x['player_name'] = x['pitcher']['p_name']
|
x["player_name"] = x["pitcher"]["p_name"]
|
||||||
x['player_cardset'] = x['pitcher']['cardset']['name']
|
x["player_cardset"] = x["pitcher"]["cardset"]["name"]
|
||||||
x['team_id'] = x['pitcher_team']['id']
|
x["team_id"] = x["pitcher_team"]["id"]
|
||||||
x['team_abbrev'] = x['pitcher_team']['abbrev']
|
x["team_abbrev"] = x["pitcher_team"]["abbrev"]
|
||||||
del x['pitcher'], x['pitcher_team'], x['game']
|
del x["pitcher"], x["pitcher_team"], x["game"]
|
||||||
|
|
||||||
output = pd.DataFrame(return_vals)
|
output = pd.DataFrame(return_vals)
|
||||||
first = ['player_id', 'player_name', 'player_cardset', 'team_id', 'team_abbrev']
|
first = ["player_id", "player_name", "player_cardset", "team_id", "team_abbrev"]
|
||||||
exclude = first + ['lob_all', 'lob_all_rate', 'lob_2outs', 'rbi%']
|
exclude = first + ["lob_all", "lob_all_rate", "lob_2outs", "rbi%"]
|
||||||
output = output[first + [col for col in output.columns if col not in exclude]]
|
output = output[first + [col for col in output.columns if col not in exclude]]
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return Response(content=pd.DataFrame(output).to_csv(index=False), media_type='text/csv')
|
return Response(
|
||||||
|
content=pd.DataFrame(output).to_csv(index=False), media_type="text/csv"
|
||||||
|
)
|
||||||
|
|
||||||
return return_dec
|
return return_dec
|
||||||
|
|
||||||
|
|
||||||
@router.get('/rest')
|
@router.get("/rest")
|
||||||
async def get_decisions_for_rest(team_id: int, season: int = None, limit: int = 80, native_rest: bool = False):
|
async def get_decisions_for_rest(
|
||||||
all_dec = Decision.select().order_by(-Decision.season, -Decision.week, -Decision.id).paginate(1, limit)
|
team_id: int, season: int = None, limit: int = 80, native_rest: bool = False
|
||||||
|
):
|
||||||
|
all_dec = (
|
||||||
|
Decision.select()
|
||||||
|
.order_by(-Decision.season, -Decision.week, -Decision.id)
|
||||||
|
.paginate(1, limit)
|
||||||
|
)
|
||||||
|
|
||||||
if season is not None:
|
if season is not None:
|
||||||
all_dec = all_dec.where(Decision.season == season)
|
all_dec = all_dec.where(Decision.season == season)
|
||||||
@ -131,41 +162,61 @@ async def get_decisions_for_rest(team_id: int, season: int = None, limit: int =
|
|||||||
return_dec = []
|
return_dec = []
|
||||||
for x in all_dec:
|
for x in all_dec:
|
||||||
this_val = []
|
this_val = []
|
||||||
this_card = Card.get_or_none(Card.player_id == x.pitcher.player_id, Card.team_id == x.pitcher_team.id)
|
this_card = Card.get_or_none(
|
||||||
|
Card.player_id == x.pitcher.player_id, Card.team_id == x.pitcher_team.id
|
||||||
|
)
|
||||||
this_val.append(x.game.id)
|
this_val.append(x.game.id)
|
||||||
this_val.append(x.pitcher.player_id)
|
this_val.append(x.pitcher.player_id)
|
||||||
this_val.append(this_card.id if this_card is not None else -1)
|
this_val.append(this_card.id if this_card is not None else -1)
|
||||||
this_val.append(1 if x.is_start else 0)
|
this_val.append(1 if x.is_start else 0)
|
||||||
if not native_rest:
|
if not native_rest:
|
||||||
this_line = StratPlay.select(
|
this_line = StratPlay.select(
|
||||||
StratPlay.pitcher, StratPlay.game, fn.SUM(StratPlay.outs).alias('sum_outs')
|
StratPlay.pitcher,
|
||||||
|
StratPlay.game,
|
||||||
|
fn.SUM(StratPlay.outs).alias("sum_outs"),
|
||||||
).where((StratPlay.game == x.game) & (StratPlay.pitcher == x.pitcher))
|
).where((StratPlay.game == x.game) & (StratPlay.pitcher == x.pitcher))
|
||||||
logging.info(f'this_line: {this_line[0]}')
|
logging.info(f"this_line: {this_line[0]}")
|
||||||
if this_line[0].sum_outs is None:
|
if this_line[0].sum_outs is None:
|
||||||
this_val.append(0.0)
|
this_val.append(0.0)
|
||||||
else:
|
else:
|
||||||
this_val.append(float(this_line[0].sum_outs // 3) + (float(this_line[0].sum_outs % 3) * .1))
|
this_val.append(
|
||||||
|
float(this_line[0].sum_outs // 3)
|
||||||
|
+ (float(this_line[0].sum_outs % 3) * 0.1)
|
||||||
|
)
|
||||||
|
|
||||||
return_dec.append(this_val)
|
return_dec.append(this_val)
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return Response(content=pd.DataFrame(return_dec).to_csv(index=False, header=False), media_type='text/csv')
|
return Response(
|
||||||
|
content=pd.DataFrame(return_dec).to_csv(index=False, header=False),
|
||||||
|
media_type="text/csv",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.patch('/{decision_id}')
|
@router.patch("/{decision_id}")
|
||||||
async def patch_decision(
|
async def patch_decision(
|
||||||
decision_id: int, win: Optional[int] = None, loss: Optional[int] = None, hold: Optional[int] = None,
|
decision_id: int,
|
||||||
save: Optional[int] = None, b_save: Optional[int] = None, irunners: Optional[int] = None,
|
win: Optional[int] = None,
|
||||||
irunners_scored: Optional[int] = None, rest_ip: Optional[int] = None, rest_required: Optional[int] = None,
|
loss: Optional[int] = None,
|
||||||
token: str = Depends(oauth2_scheme)):
|
hold: Optional[int] = None,
|
||||||
|
save: Optional[int] = None,
|
||||||
|
b_save: Optional[int] = None,
|
||||||
|
irunners: Optional[int] = None,
|
||||||
|
irunners_scored: Optional[int] = None,
|
||||||
|
rest_ip: Optional[int] = None,
|
||||||
|
rest_required: Optional[int] = None,
|
||||||
|
token: str = Depends(oauth2_scheme),
|
||||||
|
):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'patch_decision - Bad Token: {token}')
|
logging.warning(f"patch_decision - Bad Token: {token}")
|
||||||
raise HTTPException(status_code=401, detail='Unauthorized')
|
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||||
|
|
||||||
this_dec = Decision.get_or_none(Decision.id == decision_id)
|
this_dec = Decision.get_or_none(Decision.id == decision_id)
|
||||||
if this_dec is None:
|
if this_dec is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'Decision ID {decision_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"Decision ID {decision_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
if win is not None:
|
if win is not None:
|
||||||
this_dec.win = win
|
this_dec.win = win
|
||||||
@ -192,72 +243,84 @@ async def patch_decision(
|
|||||||
return d_result
|
return d_result
|
||||||
else:
|
else:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=500, detail=f'Unable to patch decision {decision_id}')
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Unable to patch decision {decision_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post('')
|
@router.post("")
|
||||||
async def post_decisions(dec_list: DecisionList, token: str = Depends(oauth2_scheme)):
|
async def post_decisions(dec_list: DecisionList, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'post_decisions - Bad Token: {token}')
|
logging.warning(f"post_decisions - Bad Token: {token}")
|
||||||
raise HTTPException(status_code=401, detail='Unauthorized')
|
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||||
|
|
||||||
new_dec = []
|
new_dec = []
|
||||||
for x in dec_list.decisions:
|
for x in dec_list.decisions:
|
||||||
if StratGame.get_or_none(StratGame.id == x.game_id) is None:
|
if StratGame.get_or_none(StratGame.id == x.game_id) is None:
|
||||||
raise HTTPException(status_code=404, detail=f'Game ID {x.game_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"Game ID {x.game_id} not found"
|
||||||
|
)
|
||||||
if Player.get_or_none(Player.player_id == x.pitcher_id) is None:
|
if Player.get_or_none(Player.player_id == x.pitcher_id) is None:
|
||||||
raise HTTPException(status_code=404, detail=f'Player ID {x.pitcher_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"Player ID {x.pitcher_id} not found"
|
||||||
|
)
|
||||||
if Team.get_or_none(Team.id == x.pitcher_team_id) is None:
|
if Team.get_or_none(Team.id == x.pitcher_team_id) is None:
|
||||||
raise HTTPException(status_code=404, detail=f'Team ID {x.pitcher_team_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"Team ID {x.pitcher_team_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
new_dec.append(x.dict())
|
new_dec.append(x.dict())
|
||||||
|
|
||||||
with db.atomic():
|
with db.atomic():
|
||||||
for batch in chunked(new_dec, 10):
|
# Use PostgreSQL-compatible upsert helper
|
||||||
Decision.insert_many(batch).on_conflict_replace().execute()
|
upsert_decisions(new_dec, batch_size=10)
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
return f'Inserted {len(new_dec)} decisions'
|
return f"Inserted {len(new_dec)} decisions"
|
||||||
|
|
||||||
|
|
||||||
@router.delete('/{decision_id}')
|
@router.delete("/{decision_id}")
|
||||||
async def delete_decision(decision_id: int, token: str = Depends(oauth2_scheme)):
|
async def delete_decision(decision_id: int, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'delete_decision - Bad Token: {token}')
|
logging.warning(f"delete_decision - Bad Token: {token}")
|
||||||
raise HTTPException(status_code=401, detail='Unauthorized')
|
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||||
|
|
||||||
this_dec = Decision.get_or_none(Decision.id == decision_id)
|
this_dec = Decision.get_or_none(Decision.id == decision_id)
|
||||||
if this_dec is None:
|
if this_dec is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'Decision ID {decision_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"Decision ID {decision_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
count = this_dec.delete_instance()
|
count = this_dec.delete_instance()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
if count == 1:
|
if count == 1:
|
||||||
return f'Decision {decision_id} has been deleted'
|
return f"Decision {decision_id} has been deleted"
|
||||||
else:
|
else:
|
||||||
raise HTTPException(status_code=500, detail=f'Decision {decision_id} could not be deleted')
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Decision {decision_id} could not be deleted"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.delete('/game/{game_id}')
|
@router.delete("/game/{game_id}")
|
||||||
async def delete_decisions_game(game_id: int, token: str = Depends(oauth2_scheme)):
|
async def delete_decisions_game(game_id: int, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'delete_decisions_game - Bad Token: {token}')
|
logging.warning(f"delete_decisions_game - Bad Token: {token}")
|
||||||
raise HTTPException(status_code=401, detail='Unauthorized')
|
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||||
|
|
||||||
this_game = StratGame.get_or_none(StratGame.id == game_id)
|
this_game = StratGame.get_or_none(StratGame.id == game_id)
|
||||||
if not this_game:
|
if not this_game:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'Game ID {game_id} not found')
|
raise HTTPException(status_code=404, detail=f"Game ID {game_id} not found")
|
||||||
|
|
||||||
count = Decision.delete().where(Decision.game == this_game).execute()
|
count = Decision.delete().where(Decision.game == this_game).execute()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
if count > 0:
|
if count > 0:
|
||||||
return f'Deleted {count} decisions matching Game ID {game_id}'
|
return f"Deleted {count} decisions matching Game ID {game_id}"
|
||||||
else:
|
else:
|
||||||
raise HTTPException(status_code=500, detail=f'No decisions matching Game ID {game_id} were deleted')
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail=f"No decisions matching Game ID {game_id} were deleted",
|
||||||
|
)
|
||||||
|
|||||||
@ -4,18 +4,16 @@ import logging
|
|||||||
import pydantic
|
import pydantic
|
||||||
|
|
||||||
from ..db_engine import db, GauntletReward, model_to_dict, chunked, DatabaseError
|
from ..db_engine import db, GauntletReward, model_to_dict, chunked, DatabaseError
|
||||||
|
from ..db_helpers import upsert_gauntlet_rewards
|
||||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
filename=LOG_DATA['filename'],
|
filename=LOG_DATA["filename"],
|
||||||
format=LOG_DATA['format'],
|
format=LOG_DATA["format"],
|
||||||
level=LOG_DATA['log_level']
|
level=LOG_DATA["log_level"],
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(prefix="/api/v2/gauntletrewards", tags=["gauntletrewards"])
|
||||||
prefix='/api/v2/gauntletrewards',
|
|
||||||
tags=['gauntletrewards']
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class GauntletRewardModel(pydantic.BaseModel):
|
class GauntletRewardModel(pydantic.BaseModel):
|
||||||
@ -30,10 +28,14 @@ class GauntletRewardList(pydantic.BaseModel):
|
|||||||
rewards: List[GauntletRewardModel]
|
rewards: List[GauntletRewardModel]
|
||||||
|
|
||||||
|
|
||||||
@router.get('')
|
@router.get("")
|
||||||
async def v1_gauntletreward_get(
|
async def v1_gauntletreward_get(
|
||||||
name: Optional[str] = None, gauntlet_id: Optional[int] = None, reward_id: list = Query(default=None),
|
name: Optional[str] = None,
|
||||||
win_num: Optional[int] = None, loss_max: Optional[int] = None):
|
gauntlet_id: Optional[int] = None,
|
||||||
|
reward_id: list = Query(default=None),
|
||||||
|
win_num: Optional[int] = None,
|
||||||
|
loss_max: Optional[int] = None,
|
||||||
|
):
|
||||||
all_rewards = GauntletReward.select()
|
all_rewards = GauntletReward.select()
|
||||||
|
|
||||||
if name is not None:
|
if name is not None:
|
||||||
@ -49,44 +51,52 @@ async def v1_gauntletreward_get(
|
|||||||
|
|
||||||
all_rewards = all_rewards.order_by(-GauntletReward.loss_max, GauntletReward.win_num)
|
all_rewards = all_rewards.order_by(-GauntletReward.loss_max, GauntletReward.win_num)
|
||||||
|
|
||||||
return_val = {'count': all_rewards.count(), 'rewards': []}
|
return_val = {"count": all_rewards.count(), "rewards": []}
|
||||||
for x in all_rewards:
|
for x in all_rewards:
|
||||||
return_val['rewards'].append(model_to_dict(x))
|
return_val["rewards"].append(model_to_dict(x))
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
|
|
||||||
@router.get('/{gauntletreward_id}')
|
@router.get("/{gauntletreward_id}")
|
||||||
async def v1_gauntletreward_get_one(gauntletreward_id):
|
async def v1_gauntletreward_get_one(gauntletreward_id):
|
||||||
try:
|
try:
|
||||||
this_reward = GauntletReward.get_by_id(gauntletreward_id)
|
this_reward = GauntletReward.get_by_id(gauntletreward_id)
|
||||||
except Exception:
|
except Exception:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'No gauntlet reward found with id {gauntletreward_id}')
|
raise HTTPException(
|
||||||
|
status_code=404,
|
||||||
|
detail=f"No gauntlet reward found with id {gauntletreward_id}",
|
||||||
|
)
|
||||||
|
|
||||||
return_val = model_to_dict(this_reward)
|
return_val = model_to_dict(this_reward)
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
|
|
||||||
@router.patch('/{gauntletreward_id}')
|
@router.patch("/{gauntletreward_id}")
|
||||||
async def v1_gauntletreward_patch(
|
async def v1_gauntletreward_patch(
|
||||||
gauntletreward_id, name: Optional[str] = None, gauntlet_id: Optional[int] = None,
|
gauntletreward_id,
|
||||||
reward_id: Optional[int] = None, win_num: Optional[int] = None, loss_max: Optional[int] = None,
|
name: Optional[str] = None,
|
||||||
token: str = Depends(oauth2_scheme)):
|
gauntlet_id: Optional[int] = None,
|
||||||
|
reward_id: Optional[int] = None,
|
||||||
|
win_num: Optional[int] = None,
|
||||||
|
loss_max: Optional[int] = None,
|
||||||
|
token: str = Depends(oauth2_scheme),
|
||||||
|
):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to patch gauntlet rewards. This event has been logged.'
|
detail="You are not authorized to patch gauntlet rewards. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
this_reward = GauntletReward.get_or_none(GauntletReward.id == gauntletreward_id)
|
this_reward = GauntletReward.get_or_none(GauntletReward.id == gauntletreward_id)
|
||||||
if this_reward is None:
|
if this_reward is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise KeyError(f'Gauntlet Reward ID {gauntletreward_id} not found')
|
raise KeyError(f"Gauntlet Reward ID {gauntletreward_id} not found")
|
||||||
|
|
||||||
if gauntlet_id is not None:
|
if gauntlet_id is not None:
|
||||||
this_reward.gauntlet_id = gauntlet_id
|
this_reward.gauntlet_id = gauntlet_id
|
||||||
@ -105,17 +115,19 @@ async def v1_gauntletreward_patch(
|
|||||||
return r_curr
|
return r_curr
|
||||||
else:
|
else:
|
||||||
db.close()
|
db.close()
|
||||||
raise DatabaseError(f'Unable to patch gauntlet reward {gauntletreward_id}')
|
raise DatabaseError(f"Unable to patch gauntlet reward {gauntletreward_id}")
|
||||||
|
|
||||||
|
|
||||||
@router.post('')
|
@router.post("")
|
||||||
async def v1_gauntletreward_post(gauntletreward: GauntletRewardList, token: str = Depends(oauth2_scheme)):
|
async def v1_gauntletreward_post(
|
||||||
|
gauntletreward: GauntletRewardList, token: str = Depends(oauth2_scheme)
|
||||||
|
):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to post gauntlets. This event has been logged.'
|
detail="You are not authorized to post gauntlets. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
all_rewards = []
|
all_rewards = []
|
||||||
@ -123,17 +135,16 @@ async def v1_gauntletreward_post(gauntletreward: GauntletRewardList, token: str
|
|||||||
all_rewards.append(x.dict())
|
all_rewards.append(x.dict())
|
||||||
|
|
||||||
with db.atomic():
|
with db.atomic():
|
||||||
for batch in chunked(all_rewards, 15):
|
# Use PostgreSQL-compatible upsert helper
|
||||||
GauntletReward.insert_many(batch).on_conflict_replace().execute()
|
upsert_gauntlet_rewards(all_rewards, batch_size=15)
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
return f'Inserted {len(all_rewards)} gauntlet rewards'
|
return f"Inserted {len(all_rewards)} gauntlet rewards"
|
||||||
|
|
||||||
|
|
||||||
@router.delete('/{gauntletreward_id}')
|
@router.delete("/{gauntletreward_id}")
|
||||||
async def v1_gauntletreward_delete(gauntletreward_id):
|
async def v1_gauntletreward_delete(gauntletreward_id):
|
||||||
if GauntletReward.delete_by_id(gauntletreward_id) == 1:
|
if GauntletReward.delete_by_id(gauntletreward_id) == 1:
|
||||||
return f'Deleted gauntlet reward ID {gauntletreward_id}'
|
return f"Deleted gauntlet reward ID {gauntletreward_id}"
|
||||||
|
|
||||||
raise DatabaseError(f'Unable to delete gauntlet run {gauntletreward_id}')
|
|
||||||
|
|
||||||
|
raise DatabaseError(f"Unable to delete gauntlet run {gauntletreward_id}")
|
||||||
|
|||||||
@ -7,19 +7,27 @@ import logging
|
|||||||
import pydantic
|
import pydantic
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from ..db_engine import db, MlbPlayer, Player, BattingCard, PitchingCard, model_to_dict, fn, chunked, query_to_csv
|
from ..db_engine import (
|
||||||
|
db,
|
||||||
|
MlbPlayer,
|
||||||
|
Player,
|
||||||
|
BattingCard,
|
||||||
|
PitchingCard,
|
||||||
|
model_to_dict,
|
||||||
|
fn,
|
||||||
|
chunked,
|
||||||
|
query_to_csv,
|
||||||
|
)
|
||||||
|
from ..db_helpers import upsert_mlb_players
|
||||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
filename=LOG_DATA['filename'],
|
filename=LOG_DATA["filename"],
|
||||||
format=LOG_DATA['format'],
|
format=LOG_DATA["format"],
|
||||||
level=LOG_DATA['log_level']
|
level=LOG_DATA["log_level"],
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(prefix="/api/v2/mlbplayers", tags=["mlbplayers"])
|
||||||
prefix='/api/v2/mlbplayers',
|
|
||||||
tags=['mlbplayers']
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PlayerModel(pydantic.BaseModel):
|
class PlayerModel(pydantic.BaseModel):
|
||||||
@ -37,35 +45,50 @@ class PlayerList(pydantic.BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
def update_card_urls(mlbplayer: MlbPlayer):
|
def update_card_urls(mlbplayer: MlbPlayer):
|
||||||
logging.info(f'Updating cards for mlbplayer: {mlbplayer.first_name} {mlbplayer.last_name} ({mlbplayer.key_bbref})')
|
logging.info(
|
||||||
|
f"Updating cards for mlbplayer: {mlbplayer.first_name} {mlbplayer.last_name} ({mlbplayer.key_bbref})"
|
||||||
|
)
|
||||||
now = datetime.datetime.now()
|
now = datetime.datetime.now()
|
||||||
c1_update = Player.update({
|
c1_update = Player.update(
|
||||||
Player.image: Player.image.name.split('?d=')[0] + f'?d={now.year}-{now.month}-{now.day}'
|
{
|
||||||
}).where(Player.mlbplayer_id == mlbplayer.id)
|
Player.image: Player.image.name.split("?d=")[0]
|
||||||
|
+ f"?d={now.year}-{now.month}-{now.day}"
|
||||||
|
}
|
||||||
|
).where(Player.mlbplayer_id == mlbplayer.id)
|
||||||
count = c1_update.execute()
|
count = c1_update.execute()
|
||||||
logging.info(f'Updated {count} image1s')
|
logging.info(f"Updated {count} image1s")
|
||||||
|
|
||||||
c2_update = Player.update({
|
c2_update = Player.update(
|
||||||
Player.image2: Player.image2.name.split('?d=')[0] + f'{now.year}-{now.month}-{now.day}'
|
{
|
||||||
}).where((Player.mlbplayer_id == mlbplayer.id) & (Player.image2.is_null(False)))
|
Player.image2: Player.image2.name.split("?d=")[0]
|
||||||
|
+ f"{now.year}-{now.month}-{now.day}"
|
||||||
|
}
|
||||||
|
).where((Player.mlbplayer_id == mlbplayer.id) & (Player.image2.is_null(False)))
|
||||||
count2 = c2_update.execute()
|
count2 = c2_update.execute()
|
||||||
logging.info(f'Updated {count2} image2s')
|
logging.info(f"Updated {count2} image2s")
|
||||||
|
|
||||||
return count + count2
|
return count + count2
|
||||||
|
|
||||||
|
|
||||||
@router.get('')
|
@router.get("")
|
||||||
async def get_players(
|
async def get_players(
|
||||||
full_name: list = Query(default=None), first_name: list = Query(default=None),
|
full_name: list = Query(default=None),
|
||||||
last_name: list = Query(default=None), key_fangraphs: list = Query(default=None),
|
first_name: list = Query(default=None),
|
||||||
key_bbref: list = Query(default=None), key_retro: list = Query(default=None),
|
last_name: list = Query(default=None),
|
||||||
key_mlbam: list = Query(default=None), offense_col: list = Query(default=None), csv: Optional[bool] = False):
|
key_fangraphs: list = Query(default=None),
|
||||||
|
key_bbref: list = Query(default=None),
|
||||||
|
key_retro: list = Query(default=None),
|
||||||
|
key_mlbam: list = Query(default=None),
|
||||||
|
offense_col: list = Query(default=None),
|
||||||
|
csv: Optional[bool] = False,
|
||||||
|
):
|
||||||
all_players = MlbPlayer.select()
|
all_players = MlbPlayer.select()
|
||||||
|
|
||||||
if full_name is not None:
|
if full_name is not None:
|
||||||
name_list = [x.lower() for x in full_name]
|
name_list = [x.lower() for x in full_name]
|
||||||
all_players = all_players.where(
|
all_players = all_players.where(
|
||||||
fn.lower(MlbPlayer.first_name) + ' ' + fn.lower(MlbPlayer.last_name) << name_list
|
fn.lower(MlbPlayer.first_name) + " " + fn.lower(MlbPlayer.last_name)
|
||||||
|
<< name_list
|
||||||
)
|
)
|
||||||
if first_name is not None:
|
if first_name is not None:
|
||||||
name_list = [x.lower() for x in first_name]
|
name_list = [x.lower() for x in first_name]
|
||||||
@ -89,44 +112,56 @@ async def get_players(
|
|||||||
if csv:
|
if csv:
|
||||||
return_val = query_to_csv(all_players)
|
return_val = query_to_csv(all_players)
|
||||||
db.close()
|
db.close()
|
||||||
return Response(content=return_val, media_type='text/csv')
|
return Response(content=return_val, media_type="text/csv")
|
||||||
|
|
||||||
return_val = {'count': all_players.count(), 'players': [
|
return_val = {
|
||||||
model_to_dict(x) for x in all_players
|
"count": all_players.count(),
|
||||||
]}
|
"players": [model_to_dict(x) for x in all_players],
|
||||||
|
}
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
|
|
||||||
@router.get('/{player_id}')
|
@router.get("/{player_id}")
|
||||||
async def get_one_player(player_id: int):
|
async def get_one_player(player_id: int):
|
||||||
this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id)
|
this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id)
|
||||||
if this_player is None:
|
if this_player is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'MlbPlayer id {player_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"MlbPlayer id {player_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
r_data = model_to_dict(this_player)
|
r_data = model_to_dict(this_player)
|
||||||
db.close()
|
db.close()
|
||||||
return r_data
|
return r_data
|
||||||
|
|
||||||
|
|
||||||
@router.patch('/{player_id}')
|
@router.patch("/{player_id}")
|
||||||
async def patch_player(
|
async def patch_player(
|
||||||
player_id: int, first_name: Optional[str] = None, last_name: Optional[str] = None,
|
player_id: int,
|
||||||
key_fangraphs: Optional[str] = None, key_bbref: Optional[str] = None, key_retro: Optional[str] = None,
|
first_name: Optional[str] = None,
|
||||||
key_mlbam: Optional[str] = None, offense_col: Optional[str] = None, token: str = Depends(oauth2_scheme)):
|
last_name: Optional[str] = None,
|
||||||
|
key_fangraphs: Optional[str] = None,
|
||||||
|
key_bbref: Optional[str] = None,
|
||||||
|
key_retro: Optional[str] = None,
|
||||||
|
key_mlbam: Optional[str] = None,
|
||||||
|
offense_col: Optional[str] = None,
|
||||||
|
token: str = Depends(oauth2_scheme),
|
||||||
|
):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to patch mlb players. This event has been logged.'
|
detail="You are not authorized to patch mlb players. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id)
|
this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id)
|
||||||
if this_player is None:
|
if this_player is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'MlbPlayer id {player_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"MlbPlayer id {player_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
if first_name is not None:
|
if first_name is not None:
|
||||||
this_player.first_name = first_name
|
this_player.first_name = first_name
|
||||||
@ -151,65 +186,69 @@ async def patch_player(
|
|||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=418,
|
status_code=418,
|
||||||
detail='Well slap my ass and call me a teapot; I could not save that player'
|
detail="Well slap my ass and call me a teapot; I could not save that player",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post('')
|
@router.post("")
|
||||||
async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme)):
|
async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to post mlb players. This event has been logged.'
|
detail="You are not authorized to post mlb players. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
new_players = []
|
new_players = []
|
||||||
for x in players.players:
|
for x in players.players:
|
||||||
dupes = MlbPlayer.select().where(
|
dupes = MlbPlayer.select().where(
|
||||||
(MlbPlayer.key_fangraphs == x.key_fangraphs) | (MlbPlayer.key_mlbam == x.key_mlbam) |
|
(MlbPlayer.key_fangraphs == x.key_fangraphs)
|
||||||
(MlbPlayer.key_retro == x.key_retro) | (MlbPlayer.key_bbref == x.key_bbref)
|
| (MlbPlayer.key_mlbam == x.key_mlbam)
|
||||||
|
| (MlbPlayer.key_retro == x.key_retro)
|
||||||
|
| (MlbPlayer.key_bbref == x.key_bbref)
|
||||||
)
|
)
|
||||||
if dupes.count() > 0:
|
if dupes.count() > 0:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=400,
|
status_code=400,
|
||||||
detail=f'{x.first_name} {x.last_name} has a key already in the database'
|
detail=f"{x.first_name} {x.last_name} has a key already in the database",
|
||||||
)
|
)
|
||||||
|
|
||||||
new_players.append(x.dict())
|
new_players.append(x.dict())
|
||||||
|
|
||||||
with db.atomic():
|
with db.atomic():
|
||||||
for batch in chunked(new_players, 15):
|
# Use PostgreSQL-compatible upsert helper
|
||||||
MlbPlayer.insert_many(batch).on_conflict_replace().execute()
|
# Note: Duplicate check is already done above, so this is effectively just insert
|
||||||
|
upsert_mlb_players(new_players, batch_size=15)
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
return f'Inserted {len(new_players)} new MLB players'
|
return f"Inserted {len(new_players)} new MLB players"
|
||||||
|
|
||||||
|
|
||||||
@router.post('/one')
|
@router.post("/one")
|
||||||
async def post_one_player(player: PlayerModel, token: str = Depends(oauth2_scheme)):
|
async def post_one_player(player: PlayerModel, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to post mlb players. This event has been logged.'
|
detail="You are not authorized to post mlb players. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
dupes = MlbPlayer.select().where(
|
dupes = MlbPlayer.select().where(
|
||||||
(MlbPlayer.key_fangraphs == player.key_fangraphs) | (MlbPlayer.key_mlbam == player.key_mlbam) |
|
(MlbPlayer.key_fangraphs == player.key_fangraphs)
|
||||||
(MlbPlayer.key_bbref == player.key_bbref)
|
| (MlbPlayer.key_mlbam == player.key_mlbam)
|
||||||
|
| (MlbPlayer.key_bbref == player.key_bbref)
|
||||||
)
|
)
|
||||||
if dupes.count() > 0:
|
if dupes.count() > 0:
|
||||||
logging.info(f'POST /mlbplayers/one - dupes found:')
|
logging.info(f"POST /mlbplayers/one - dupes found:")
|
||||||
for x in dupes:
|
for x in dupes:
|
||||||
logging.info(f'{x}')
|
logging.info(f"{x}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=400,
|
status_code=400,
|
||||||
detail=f'{player.first_name} {player.last_name} has a key already in the database'
|
detail=f"{player.first_name} {player.last_name} has a key already in the database",
|
||||||
)
|
)
|
||||||
|
|
||||||
new_player = MlbPlayer(**player.dict())
|
new_player = MlbPlayer(**player.dict())
|
||||||
@ -221,43 +260,51 @@ async def post_one_player(player: PlayerModel, token: str = Depends(oauth2_schem
|
|||||||
else:
|
else:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=418,
|
status_code=418,
|
||||||
detail='Well slap my ass and call me a teapot; I could not save that player'
|
detail="Well slap my ass and call me a teapot; I could not save that player",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.delete('/{player_id}')
|
@router.delete("/{player_id}")
|
||||||
async def delete_player(player_id: int, token: str = Depends(oauth2_scheme)):
|
async def delete_player(player_id: int, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to delete mlb players. This event has been logged.'
|
detail="You are not authorized to delete mlb players. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id)
|
this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id)
|
||||||
if this_player is None:
|
if this_player is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'MlbPlayer id {player_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"MlbPlayer id {player_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
count = this_player.delete_instance()
|
count = this_player.delete_instance()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
if count == 1:
|
if count == 1:
|
||||||
raise HTTPException(status_code=200, detail=f'Player {player_id} has been deleted')
|
raise HTTPException(
|
||||||
|
status_code=200, detail=f"Player {player_id} has been deleted"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise HTTPException(status_code=500, detail=f'Player {player_id} was not deleted')
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Player {player_id} was not deleted"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post('/update-cols')
|
@router.post("/update-cols")
|
||||||
@router.post('/update-cols/{mlbplayer_id}')
|
@router.post("/update-cols/{mlbplayer_id}")
|
||||||
async def update_columns(mlbplayer_id: Optional[int] = None, token: str = Depends(oauth2_scheme)):
|
async def update_columns(
|
||||||
|
mlbplayer_id: Optional[int] = None, token: str = Depends(oauth2_scheme)
|
||||||
|
):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to update mlb players. This event has been logged.'
|
detail="You are not authorized to update mlb players. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
p_query = MlbPlayer.select()
|
p_query = MlbPlayer.select()
|
||||||
@ -267,27 +314,34 @@ async def update_columns(mlbplayer_id: Optional[int] = None, token: str = Depend
|
|||||||
total_count = 0
|
total_count = 0
|
||||||
for x in p_query:
|
for x in p_query:
|
||||||
all_players = Player.select().where(Player.mlbplayer == x)
|
all_players = Player.select().where(Player.mlbplayer == x)
|
||||||
bc_update = BattingCard.update({
|
bc_update = BattingCard.update(
|
||||||
BattingCard.offense_col: x.offense_col,
|
{
|
||||||
}).where((BattingCard.player << all_players) & (BattingCard.offense_col != x.offense_col))
|
BattingCard.offense_col: x.offense_col,
|
||||||
|
}
|
||||||
|
).where(
|
||||||
|
(BattingCard.player << all_players)
|
||||||
|
& (BattingCard.offense_col != x.offense_col)
|
||||||
|
)
|
||||||
count = bc_update.execute()
|
count = bc_update.execute()
|
||||||
total_count += count
|
total_count += count
|
||||||
logging.info(f'Updated {count} batting cards for {x.first_name} {x.last_name}')
|
logging.info(f"Updated {count} batting cards for {x.first_name} {x.last_name}")
|
||||||
update_card_urls(x)
|
update_card_urls(x)
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return f'Updated {total_count} batting cards'
|
return f"Updated {total_count} batting cards"
|
||||||
|
|
||||||
|
|
||||||
@router.post('/update-names')
|
@router.post("/update-names")
|
||||||
@router.post('/update-names/{mlbplayer_id}')
|
@router.post("/update-names/{mlbplayer_id}")
|
||||||
async def update_names(mlbplayer_id: Optional[int] = None, token: str = Depends(oauth2_scheme)):
|
async def update_names(
|
||||||
|
mlbplayer_id: Optional[int] = None, token: str = Depends(oauth2_scheme)
|
||||||
|
):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to update mlb players. This event has been logged.'
|
detail="You are not authorized to update mlb players. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
p_query = MlbPlayer.select()
|
p_query = MlbPlayer.select()
|
||||||
@ -296,16 +350,18 @@ async def update_names(mlbplayer_id: Optional[int] = None, token: str = Depends(
|
|||||||
|
|
||||||
total_count = 0
|
total_count = 0
|
||||||
for x in p_query:
|
for x in p_query:
|
||||||
p_update = Player.update({
|
p_update = Player.update(
|
||||||
Player.p_name: f'{x.first_name} {x.last_name}'
|
{Player.p_name: f"{x.first_name} {x.last_name}"}
|
||||||
}).where((Player.mlbplayer == x) & (Player.p_name != f'{x.first_name} {x.last_name}'))
|
).where(
|
||||||
|
(Player.mlbplayer == x) & (Player.p_name != f"{x.first_name} {x.last_name}")
|
||||||
|
)
|
||||||
count = p_update.execute()
|
count = p_update.execute()
|
||||||
total_count += count
|
total_count += count
|
||||||
logging.info(f'Update {count} player records for {x.first_name} {x.last_name}')
|
logging.info(f"Update {count} player records for {x.first_name} {x.last_name}")
|
||||||
update_card_urls(x)
|
update_card_urls(x)
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return f'Updated {total_count} names'
|
return f"Updated {total_count} names"
|
||||||
|
|
||||||
|
|
||||||
# @router.post('/link-players')
|
# @router.post('/link-players')
|
||||||
|
|||||||
@ -8,27 +8,34 @@ import pandas as pd
|
|||||||
import pydantic
|
import pydantic
|
||||||
from pydantic import validator, root_validator
|
from pydantic import validator, root_validator
|
||||||
|
|
||||||
from ..db_engine import db, PitchingCardRatings, model_to_dict, chunked, PitchingCard, Player, query_to_csv, Team, \
|
from ..db_engine import (
|
||||||
CardPosition
|
db,
|
||||||
|
PitchingCardRatings,
|
||||||
|
model_to_dict,
|
||||||
|
chunked,
|
||||||
|
PitchingCard,
|
||||||
|
Player,
|
||||||
|
query_to_csv,
|
||||||
|
Team,
|
||||||
|
CardPosition,
|
||||||
|
)
|
||||||
|
from ..db_helpers import upsert_pitching_card_ratings
|
||||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
filename=LOG_DATA['filename'],
|
filename=LOG_DATA["filename"],
|
||||||
format=LOG_DATA['format'],
|
format=LOG_DATA["format"],
|
||||||
level=LOG_DATA['log_level']
|
level=LOG_DATA["log_level"],
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(prefix="/api/v2/pitchingcardratings", tags=["pitchingcardratings"])
|
||||||
prefix='/api/v2/pitchingcardratings',
|
RATINGS_FILE = "storage/pitching-ratings.csv"
|
||||||
tags=['pitchingcardratings']
|
BASIC_FILE = "storage/pitching-basic.csv"
|
||||||
)
|
|
||||||
RATINGS_FILE = 'storage/pitching-ratings.csv'
|
|
||||||
BASIC_FILE = 'storage/pitching-basic.csv'
|
|
||||||
|
|
||||||
|
|
||||||
class PitchingCardRatingsModel(pydantic.BaseModel):
|
class PitchingCardRatingsModel(pydantic.BaseModel):
|
||||||
pitchingcard_id: int
|
pitchingcard_id: int
|
||||||
vs_hand: Literal['R', 'L', 'vR', 'vL']
|
vs_hand: Literal["R", "L", "vR", "vL"]
|
||||||
homerun: float = 0.0
|
homerun: float = 0.0
|
||||||
bp_homerun: float = 0.0
|
bp_homerun: float = 0.0
|
||||||
triple: float = 0.0
|
triple: float = 0.0
|
||||||
@ -62,30 +69,69 @@ class PitchingCardRatingsModel(pydantic.BaseModel):
|
|||||||
|
|
||||||
@validator("avg", always=True)
|
@validator("avg", always=True)
|
||||||
def avg_validator(cls, v, values, **kwargs):
|
def avg_validator(cls, v, values, **kwargs):
|
||||||
return (values['homerun'] + values['bp_homerun'] / 2 + values['triple'] + values['double_three'] +
|
return (
|
||||||
values['double_two'] + values['double_cf'] + values['single_two'] + values['single_one'] +
|
values["homerun"]
|
||||||
values['single_center'] + values['bp_single'] / 2) / 108
|
+ values["bp_homerun"] / 2
|
||||||
|
+ values["triple"]
|
||||||
|
+ values["double_three"]
|
||||||
|
+ values["double_two"]
|
||||||
|
+ values["double_cf"]
|
||||||
|
+ values["single_two"]
|
||||||
|
+ values["single_one"]
|
||||||
|
+ values["single_center"]
|
||||||
|
+ values["bp_single"] / 2
|
||||||
|
) / 108
|
||||||
|
|
||||||
@validator("obp", always=True)
|
@validator("obp", always=True)
|
||||||
def obp_validator(cls, v, values, **kwargs):
|
def obp_validator(cls, v, values, **kwargs):
|
||||||
return ((values['hbp'] + values['walk']) / 108) + values['avg']
|
return ((values["hbp"] + values["walk"]) / 108) + values["avg"]
|
||||||
|
|
||||||
@validator("slg", always=True)
|
@validator("slg", always=True)
|
||||||
def slg_validator(cls, v, values, **kwargs):
|
def slg_validator(cls, v, values, **kwargs):
|
||||||
return (values['homerun'] * 4 + values['bp_homerun'] * 2 + values['triple'] * 3 + values['double_three'] * 2 +
|
return (
|
||||||
values['double_two'] * 2 + values['double_cf'] * 2 + values['single_two'] + values['single_one'] +
|
values["homerun"] * 4
|
||||||
values['single_center'] + values['bp_single'] / 2) / 108
|
+ values["bp_homerun"] * 2
|
||||||
|
+ values["triple"] * 3
|
||||||
|
+ values["double_three"] * 2
|
||||||
|
+ values["double_two"] * 2
|
||||||
|
+ values["double_cf"] * 2
|
||||||
|
+ values["single_two"]
|
||||||
|
+ values["single_one"]
|
||||||
|
+ values["single_center"]
|
||||||
|
+ values["bp_single"] / 2
|
||||||
|
) / 108
|
||||||
|
|
||||||
@root_validator(skip_on_failure=True)
|
@root_validator(skip_on_failure=True)
|
||||||
def validate_chance_total(cls, values):
|
def validate_chance_total(cls, values):
|
||||||
total_chances = (
|
total_chances = (
|
||||||
values['homerun'] + values['bp_homerun'] + values['triple'] + values['double_three'] +
|
values["homerun"]
|
||||||
values['double_two'] + values['double_cf'] + values['single_two'] + values['single_one'] +
|
+ values["bp_homerun"]
|
||||||
values['single_center'] + values['bp_single'] + values['hbp'] + values['walk'] +
|
+ values["triple"]
|
||||||
values['strikeout'] + values['flyout_lf_b'] + values['flyout_cf_b'] + values['flyout_rf_b'] +
|
+ values["double_three"]
|
||||||
values['groundout_a'] + values['groundout_b'] + values['xcheck_p'] + values['xcheck_c'] +
|
+ values["double_two"]
|
||||||
values['xcheck_1b'] + values['xcheck_2b'] + values['xcheck_3b'] + values['xcheck_ss'] +
|
+ values["double_cf"]
|
||||||
values['xcheck_lf'] + values['xcheck_cf'] + values['xcheck_rf'])
|
+ values["single_two"]
|
||||||
|
+ values["single_one"]
|
||||||
|
+ values["single_center"]
|
||||||
|
+ values["bp_single"]
|
||||||
|
+ values["hbp"]
|
||||||
|
+ values["walk"]
|
||||||
|
+ values["strikeout"]
|
||||||
|
+ values["flyout_lf_b"]
|
||||||
|
+ values["flyout_cf_b"]
|
||||||
|
+ values["flyout_rf_b"]
|
||||||
|
+ values["groundout_a"]
|
||||||
|
+ values["groundout_b"]
|
||||||
|
+ values["xcheck_p"]
|
||||||
|
+ values["xcheck_c"]
|
||||||
|
+ values["xcheck_1b"]
|
||||||
|
+ values["xcheck_2b"]
|
||||||
|
+ values["xcheck_3b"]
|
||||||
|
+ values["xcheck_ss"]
|
||||||
|
+ values["xcheck_lf"]
|
||||||
|
+ values["xcheck_cf"]
|
||||||
|
+ values["xcheck_rf"]
|
||||||
|
)
|
||||||
|
|
||||||
if round(total_chances) != 108:
|
if round(total_chances) != 108:
|
||||||
raise ValueError("Must have exactly 108 chances on the card")
|
raise ValueError("Must have exactly 108 chances on the card")
|
||||||
@ -96,39 +142,51 @@ class RatingsList(pydantic.BaseModel):
|
|||||||
ratings: List[PitchingCardRatingsModel]
|
ratings: List[PitchingCardRatingsModel]
|
||||||
|
|
||||||
|
|
||||||
@router.get('')
|
@router.get("")
|
||||||
async def get_card_ratings(
|
async def get_card_ratings(
|
||||||
pitchingcard_id: list = Query(default=None), vs_hand: Literal['R', 'L', 'vR', 'vL'] = None,
|
pitchingcard_id: list = Query(default=None),
|
||||||
short_output: bool = False, csv: bool = False, cardset_id: list = Query(default=None),
|
vs_hand: Literal["R", "L", "vR", "vL"] = None,
|
||||||
token: str = Depends(oauth2_scheme)):
|
short_output: bool = False,
|
||||||
|
csv: bool = False,
|
||||||
|
cardset_id: list = Query(default=None),
|
||||||
|
token: str = Depends(oauth2_scheme),
|
||||||
|
):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to pull card ratings."
|
||||||
detail='You are not authorized to pull card ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
all_ratings = PitchingCardRatings.select()
|
all_ratings = PitchingCardRatings.select()
|
||||||
|
|
||||||
if pitchingcard_id is not None:
|
if pitchingcard_id is not None:
|
||||||
all_ratings = all_ratings.where(PitchingCardRatings.pitchingcard_id << pitchingcard_id)
|
all_ratings = all_ratings.where(
|
||||||
|
PitchingCardRatings.pitchingcard_id << pitchingcard_id
|
||||||
|
)
|
||||||
if vs_hand is not None:
|
if vs_hand is not None:
|
||||||
all_ratings = all_ratings.where(PitchingCardRatings.vs_hand == vs_hand[-1])
|
all_ratings = all_ratings.where(PitchingCardRatings.vs_hand == vs_hand[-1])
|
||||||
if cardset_id is not None:
|
if cardset_id is not None:
|
||||||
set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id)
|
set_players = Player.select(Player.player_id).where(
|
||||||
set_cards = PitchingCard.select(PitchingCard.id).where(PitchingCard.player << set_players)
|
Player.cardset_id << cardset_id
|
||||||
|
)
|
||||||
|
set_cards = PitchingCard.select(PitchingCard.id).where(
|
||||||
|
PitchingCard.player << set_players
|
||||||
|
)
|
||||||
all_ratings = all_ratings.where(PitchingCardRatings.pitchingcard << set_cards)
|
all_ratings = all_ratings.where(PitchingCardRatings.pitchingcard << set_cards)
|
||||||
|
|
||||||
if csv:
|
if csv:
|
||||||
return_val = query_to_csv(all_ratings)
|
return_val = query_to_csv(all_ratings)
|
||||||
db.close()
|
db.close()
|
||||||
return Response(content=return_val, media_type='text/csv')
|
return Response(content=return_val, media_type="text/csv")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return_val = {'count': all_ratings.count(), 'ratings': [
|
return_val = {
|
||||||
model_to_dict(x, recurse=not short_output) for x in all_ratings
|
"count": all_ratings.count(),
|
||||||
]}
|
"ratings": [
|
||||||
|
model_to_dict(x, recurse=not short_output) for x in all_ratings
|
||||||
|
],
|
||||||
|
}
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
@ -136,282 +194,358 @@ async def get_card_ratings(
|
|||||||
def get_scouting_dfs(cardset_id: list = None):
|
def get_scouting_dfs(cardset_id: list = None):
|
||||||
all_ratings = PitchingCardRatings.select()
|
all_ratings = PitchingCardRatings.select()
|
||||||
if cardset_id is not None:
|
if cardset_id is not None:
|
||||||
set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id)
|
set_players = Player.select(Player.player_id).where(
|
||||||
set_cards = PitchingCard.select(PitchingCard.id).where(PitchingCard.player << set_players)
|
Player.cardset_id << cardset_id
|
||||||
|
)
|
||||||
|
set_cards = PitchingCard.select(PitchingCard.id).where(
|
||||||
|
PitchingCard.player << set_players
|
||||||
|
)
|
||||||
all_ratings = all_ratings.where(PitchingCardRatings.pitchingcard << set_cards)
|
all_ratings = all_ratings.where(PitchingCardRatings.pitchingcard << set_cards)
|
||||||
|
|
||||||
vl_query = all_ratings.where(PitchingCardRatings.vs_hand == 'L')
|
vl_query = all_ratings.where(PitchingCardRatings.vs_hand == "L")
|
||||||
vr_query = all_ratings.where(PitchingCardRatings.vs_hand == 'R')
|
vr_query = all_ratings.where(PitchingCardRatings.vs_hand == "R")
|
||||||
|
|
||||||
vl_vals = [model_to_dict(x) for x in vl_query]
|
vl_vals = [model_to_dict(x) for x in vl_query]
|
||||||
for x in vl_vals:
|
for x in vl_vals:
|
||||||
x.update(x['pitchingcard'])
|
x.update(x["pitchingcard"])
|
||||||
x['player_id'] = x['pitchingcard']['player']['player_id']
|
x["player_id"] = x["pitchingcard"]["player"]["player_id"]
|
||||||
x['player_name'] = x['pitchingcard']['player']['p_name']
|
x["player_name"] = x["pitchingcard"]["player"]["p_name"]
|
||||||
x['rarity'] = x['pitchingcard']['player']['rarity']['name']
|
x["rarity"] = x["pitchingcard"]["player"]["rarity"]["name"]
|
||||||
x['cardset_id'] = x['pitchingcard']['player']['cardset']['id']
|
x["cardset_id"] = x["pitchingcard"]["player"]["cardset"]["id"]
|
||||||
x['cardset_name'] = x['pitchingcard']['player']['cardset']['name']
|
x["cardset_name"] = x["pitchingcard"]["player"]["cardset"]["name"]
|
||||||
x['starter_rating'] = x['pitchingcard']['starter_rating']
|
x["starter_rating"] = x["pitchingcard"]["starter_rating"]
|
||||||
x['relief_rating'] = x['pitchingcard']['relief_rating']
|
x["relief_rating"] = x["pitchingcard"]["relief_rating"]
|
||||||
x['closer_rating'] = x['pitchingcard']['closer_rating']
|
x["closer_rating"] = x["pitchingcard"]["closer_rating"]
|
||||||
del x['pitchingcard'], x['player']
|
del x["pitchingcard"], x["player"]
|
||||||
|
|
||||||
vr_vals = [model_to_dict(x) for x in vr_query]
|
vr_vals = [model_to_dict(x) for x in vr_query]
|
||||||
for x in vr_vals:
|
for x in vr_vals:
|
||||||
x['player_id'] = x['pitchingcard']['player']['player_id']
|
x["player_id"] = x["pitchingcard"]["player"]["player_id"]
|
||||||
del x['pitchingcard']
|
del x["pitchingcard"]
|
||||||
|
|
||||||
vl = pd.DataFrame(vl_vals)
|
vl = pd.DataFrame(vl_vals)
|
||||||
vr = pd.DataFrame(vr_vals)
|
vr = pd.DataFrame(vr_vals)
|
||||||
|
|
||||||
pit_df = pd.merge(vl, vr, on='player_id', suffixes=('_vl', '_vr')).set_index('player_id', drop=False)
|
pit_df = pd.merge(vl, vr, on="player_id", suffixes=("_vl", "_vr")).set_index(
|
||||||
logging.debug(f'pit_df: {pit_df}')
|
"player_id", drop=False
|
||||||
|
)
|
||||||
|
logging.debug(f"pit_df: {pit_df}")
|
||||||
|
|
||||||
positions = CardPosition.select().where(CardPosition.position == 'P')
|
positions = CardPosition.select().where(CardPosition.position == "P")
|
||||||
if cardset_id is not None:
|
if cardset_id is not None:
|
||||||
set_players = Player.select(Player.player_id).where(Player.cardset_id << cardset_id)
|
set_players = Player.select(Player.player_id).where(
|
||||||
|
Player.cardset_id << cardset_id
|
||||||
|
)
|
||||||
positions = positions.where(CardPosition.player << set_players)
|
positions = positions.where(CardPosition.player << set_players)
|
||||||
|
|
||||||
series_list = [pd.Series(
|
series_list = [
|
||||||
dict([(x.player.player_id, x.range) for x in positions]),
|
pd.Series(
|
||||||
name=f'Range P'
|
dict([(x.player.player_id, x.range) for x in positions]), name=f"Range P"
|
||||||
), pd.Series(
|
),
|
||||||
dict([(x.player.player_id, x.error) for x in positions]),
|
pd.Series(
|
||||||
name=f'Error P'
|
dict([(x.player.player_id, x.error) for x in positions]), name=f"Error P"
|
||||||
)]
|
),
|
||||||
|
]
|
||||||
db.close()
|
db.close()
|
||||||
logging.debug(f'series_list: {series_list}')
|
logging.debug(f"series_list: {series_list}")
|
||||||
|
|
||||||
return pit_df.join(series_list)
|
return pit_df.join(series_list)
|
||||||
|
|
||||||
|
|
||||||
@router.get('/scouting')
|
@router.get("/scouting")
|
||||||
async def get_card_scouting(team_id: int, ts: str):
|
async def get_card_scouting(team_id: int, ts: str):
|
||||||
this_team = Team.get_or_none(Team.id == team_id)
|
this_team = Team.get_or_none(Team.id == team_id)
|
||||||
logging.debug(f'Team: {this_team} / has_guide: {this_team.has_guide}')
|
logging.debug(f"Team: {this_team} / has_guide: {this_team.has_guide}")
|
||||||
if this_team is None or ts != this_team.team_hash() or this_team.has_guide != 1:
|
if this_team is None or ts != this_team.team_hash() or this_team.has_guide != 1:
|
||||||
logging.warning(f'Team_id {team_id} attempted to pull ratings')
|
logging.warning(f"Team_id {team_id} attempted to pull ratings")
|
||||||
db.close()
|
db.close()
|
||||||
return 'Your team does not have the ratings guide enabled. If you have purchased a copy ping Cal to ' \
|
return (
|
||||||
'make sure it is enabled on your team. If you are interested you can pick it up here (thank you!): ' \
|
"Your team does not have the ratings guide enabled. If you have purchased a copy ping Cal to "
|
||||||
'https://ko-fi.com/manticorum/shop'
|
"make sure it is enabled on your team. If you are interested you can pick it up here (thank you!): "
|
||||||
|
"https://ko-fi.com/manticorum/shop"
|
||||||
|
)
|
||||||
|
|
||||||
if os.path.isfile(RATINGS_FILE):
|
if os.path.isfile(RATINGS_FILE):
|
||||||
return FileResponse(
|
return FileResponse(
|
||||||
path=RATINGS_FILE,
|
path=RATINGS_FILE,
|
||||||
media_type='text/csv',
|
media_type="text/csv",
|
||||||
# headers=headers
|
# headers=headers
|
||||||
)
|
)
|
||||||
|
|
||||||
raise HTTPException(status_code=400, detail='Go pester Cal - the scouting file is missing')
|
raise HTTPException(
|
||||||
|
status_code=400, detail="Go pester Cal - the scouting file is missing"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post('/calculate/scouting')
|
@router.post("/calculate/scouting")
|
||||||
async def post_calc_scouting(token: str = Depends(oauth2_scheme)):
|
async def post_calc_scouting(token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to calculate card ratings."
|
||||||
detail='You are not authorized to calculate card ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.warning(f'Re-calculating pitching ratings\n\n')
|
logging.warning(f"Re-calculating pitching ratings\n\n")
|
||||||
|
|
||||||
output = get_scouting_dfs()
|
output = get_scouting_dfs()
|
||||||
first = ['player_id', 'player_name', 'cardset_name', 'rarity', 'hand', 'variant']
|
first = ["player_id", "player_name", "cardset_name", "rarity", "hand", "variant"]
|
||||||
exclude = first + ['id_vl', 'id_vr', 'vs_hand_vl', 'vs_hand_vr']
|
exclude = first + ["id_vl", "id_vr", "vs_hand_vl", "vs_hand_vr"]
|
||||||
output = output[first + [col for col in output.columns if col not in exclude]]
|
output = output[first + [col for col in output.columns if col not in exclude]]
|
||||||
|
|
||||||
csv_file = pd.DataFrame(output).to_csv(index=False)
|
csv_file = pd.DataFrame(output).to_csv(index=False)
|
||||||
with open(RATINGS_FILE, 'w') as file:
|
with open(RATINGS_FILE, "w") as file:
|
||||||
file.write(csv_file)
|
file.write(csv_file)
|
||||||
|
|
||||||
return Response(content=csv_file, media_type='text/csv')
|
return Response(content=csv_file, media_type="text/csv")
|
||||||
|
|
||||||
|
|
||||||
@router.get('/basic')
|
@router.get("/basic")
|
||||||
async def get_basic_scouting():
|
async def get_basic_scouting():
|
||||||
if os.path.isfile(BASIC_FILE):
|
if os.path.isfile(BASIC_FILE):
|
||||||
return FileResponse(
|
return FileResponse(
|
||||||
path=BASIC_FILE,
|
path=BASIC_FILE,
|
||||||
media_type='text/csv',
|
media_type="text/csv",
|
||||||
# headers=headers
|
# headers=headers
|
||||||
)
|
)
|
||||||
|
|
||||||
raise HTTPException(status_code=400, detail='Go pester Cal - the scouting file is missing')
|
raise HTTPException(
|
||||||
|
status_code=400, detail="Go pester Cal - the scouting file is missing"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post('/calculate/basic')
|
@router.post("/calculate/basic")
|
||||||
async def post_calc_basic(token: str = Depends(oauth2_scheme)):
|
async def post_calc_basic(token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to calculate basic ratings."
|
||||||
detail='You are not authorized to calculate basic ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.warning(f'Re-calculating basic pitching ratings\n\n')
|
logging.warning(f"Re-calculating basic pitching ratings\n\n")
|
||||||
|
|
||||||
raw_data = get_scouting_dfs()
|
raw_data = get_scouting_dfs()
|
||||||
logging.debug(f'output: {raw_data}')
|
logging.debug(f"output: {raw_data}")
|
||||||
|
|
||||||
def get_raw_leftcontrol(df_data):
|
def get_raw_leftcontrol(df_data):
|
||||||
return ((1 - (df_data['obp_vl'] - df_data['avg_vl'])) * 100) + (1 - (df_data['wild_pitch'] / 20))
|
return ((1 - (df_data["obp_vl"] - df_data["avg_vl"])) * 100) + (
|
||||||
|
1 - (df_data["wild_pitch"] / 20)
|
||||||
|
)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_leftcontrol, axis=1)
|
raw_series = raw_data.apply(get_raw_leftcontrol, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Control L'] = round(rank_series * 100)
|
raw_data["Control L"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_rightcontrol(df_data):
|
def get_raw_rightcontrol(df_data):
|
||||||
return ((1 - (df_data['obp_vr'] - df_data['avg_vr'])) * 100) + (1 - (df_data['wild_pitch'] / 20))
|
return ((1 - (df_data["obp_vr"] - df_data["avg_vr"])) * 100) + (
|
||||||
|
1 - (df_data["wild_pitch"] / 20)
|
||||||
|
)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_rightcontrol, axis=1)
|
raw_series = raw_data.apply(get_raw_rightcontrol, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Control R'] = round(rank_series * 100)
|
raw_data["Control R"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_leftstuff(df_data):
|
def get_raw_leftstuff(df_data):
|
||||||
return 10 - (df_data['slg_vl'] + df_data['slg_vl'] + ((df_data['homerun_vl'] + df_data['bp_homerun_vl']) / 108))
|
return 10 - (
|
||||||
|
df_data["slg_vl"]
|
||||||
|
+ df_data["slg_vl"]
|
||||||
|
+ ((df_data["homerun_vl"] + df_data["bp_homerun_vl"]) / 108)
|
||||||
|
)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_leftstuff, axis=1)
|
raw_series = raw_data.apply(get_raw_leftstuff, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Stuff L'] = round(rank_series * 100)
|
raw_data["Stuff L"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_rightstuff(df_data):
|
def get_raw_rightstuff(df_data):
|
||||||
return 10 - (df_data['slg_vr'] + df_data['slg_vr'] + ((df_data['homerun_vr'] + df_data['bp_homerun_vr']) / 108))
|
return 10 - (
|
||||||
|
df_data["slg_vr"]
|
||||||
|
+ df_data["slg_vr"]
|
||||||
|
+ ((df_data["homerun_vr"] + df_data["bp_homerun_vr"]) / 108)
|
||||||
|
)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_rightstuff, axis=1)
|
raw_series = raw_data.apply(get_raw_rightstuff, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Stuff R'] = round(rank_series * 100)
|
raw_data["Stuff R"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_fielding(df_data):
|
def get_raw_fielding(df_data):
|
||||||
return ((6 - df_data['Range P']) * 10) + (50 - df_data['Error P'])
|
return ((6 - df_data["Range P"]) * 10) + (50 - df_data["Error P"])
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_fielding, axis=1)
|
raw_series = raw_data.apply(get_raw_fielding, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
logging.debug(f'max fld: {raw_series.max()} / min fld: {raw_series.min()}')
|
logging.debug(f"max fld: {raw_series.max()} / min fld: {raw_series.min()}")
|
||||||
raw_data['Fielding'] = round(rank_series * 100)
|
raw_data["Fielding"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_stamina(df_data):
|
def get_raw_stamina(df_data):
|
||||||
spow = df_data['starter_rating'] if pd.isna(df_data['starter_rating']) else -1
|
spow = df_data["starter_rating"] if pd.isna(df_data["starter_rating"]) else -1
|
||||||
rpow = df_data['relief_rating'] if pd.isna(df_data['relief_rating']) else -1
|
rpow = df_data["relief_rating"] if pd.isna(df_data["relief_rating"]) else -1
|
||||||
this_pow = spow if spow > rpow else rpow
|
this_pow = spow if spow > rpow else rpow
|
||||||
|
|
||||||
return (((this_pow * (df_data['obp_vr'] * (2 / 3))) + (this_pow * (df_data['obp_vl'] / 3))) * 4.5) + this_pow
|
return (
|
||||||
|
(
|
||||||
|
(this_pow * (df_data["obp_vr"] * (2 / 3)))
|
||||||
|
+ (this_pow * (df_data["obp_vl"] / 3))
|
||||||
|
)
|
||||||
|
* 4.5
|
||||||
|
) + this_pow
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_stamina, axis=1)
|
raw_series = raw_data.apply(get_raw_stamina, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Stamina'] = round(rank_series * 100)
|
raw_data["Stamina"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_hit(df_data):
|
def get_raw_hit(df_data):
|
||||||
return 1 - (df_data['avg_vr'] * (2 / 3)) + (df_data['avg_vl'] / 3)
|
return 1 - (df_data["avg_vr"] * (2 / 3)) + (df_data["avg_vl"] / 3)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_hit, axis=1)
|
raw_series = raw_data.apply(get_raw_hit, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['H/9'] = round(rank_series * 100)
|
raw_data["H/9"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_k(df_data):
|
def get_raw_k(df_data):
|
||||||
return ((df_data['strikeout_vr'] / 108) * (2 / 3)) + ((df_data['strikeout_vl'] / 108) / 3)
|
return ((df_data["strikeout_vr"] / 108) * (2 / 3)) + (
|
||||||
|
(df_data["strikeout_vl"] / 108) / 3
|
||||||
|
)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_k, axis=1)
|
raw_series = raw_data.apply(get_raw_k, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['K/9'] = round(rank_series * 100)
|
raw_data["K/9"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_bb(df_data):
|
def get_raw_bb(df_data):
|
||||||
return ((df_data['walk_vr'] / 108) * (2 / 3)) + ((df_data['walk_vl'] / 108) / 3)
|
return ((df_data["walk_vr"] / 108) * (2 / 3)) + ((df_data["walk_vl"] / 108) / 3)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_bb, axis=1)
|
raw_series = raw_data.apply(get_raw_bb, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True, ascending=False)
|
rank_series = raw_series.rank(pct=True, ascending=False)
|
||||||
raw_data['BB/9'] = round(rank_series * 100)
|
raw_data["BB/9"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_hr(df_data):
|
def get_raw_hr(df_data):
|
||||||
return 1 - (
|
return 1 - (
|
||||||
(((df_data['homerun_vr'] + df_data['bp_homerun_vr']) / 108) * (2 / 3)) +
|
(((df_data["homerun_vr"] + df_data["bp_homerun_vr"]) / 108) * (2 / 3))
|
||||||
(((df_data['homerun_vl'] + df_data['bp_homerun_vl']) / 108) / 3))
|
+ (((df_data["homerun_vl"] + df_data["bp_homerun_vl"]) / 108) / 3)
|
||||||
|
)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_hr, axis=1)
|
raw_series = raw_data.apply(get_raw_hr, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['HR/9'] = round(rank_series * 100)
|
raw_data["HR/9"] = round(rank_series * 100)
|
||||||
|
|
||||||
def get_raw_rating(df_data):
|
def get_raw_rating(df_data):
|
||||||
spow = df_data['starter_rating'] if pd.isna(df_data['starter_rating']) else -1
|
spow = df_data["starter_rating"] if pd.isna(df_data["starter_rating"]) else -1
|
||||||
rpow = df_data['relief_rating'] if pd.isna(df_data['relief_rating']) else -1
|
rpow = df_data["relief_rating"] if pd.isna(df_data["relief_rating"]) else -1
|
||||||
|
|
||||||
if spow > rpow and spow >= 4:
|
if spow > rpow and spow >= 4:
|
||||||
return (
|
return (
|
||||||
((df_data['H/9'] + df_data['K/9'] + df_data['BB/9'] + df_data['HR/9']) * 5) +
|
(
|
||||||
(df_data['Fielding']) + (df_data['Stamina'] * 5) +
|
(
|
||||||
(((df_data['Stuff L'] / 3) + (df_data['Stuff R'] * (2 / 3))) * 4) +
|
df_data["H/9"]
|
||||||
(((df_data['Control L'] / 3) + (df_data['Control R'] * (2 / 3))) * 2)
|
+ df_data["K/9"]
|
||||||
|
+ df_data["BB/9"]
|
||||||
|
+ df_data["HR/9"]
|
||||||
|
)
|
||||||
|
* 5
|
||||||
|
)
|
||||||
|
+ (df_data["Fielding"])
|
||||||
|
+ (df_data["Stamina"] * 5)
|
||||||
|
+ (((df_data["Stuff L"] / 3) + (df_data["Stuff R"] * (2 / 3))) * 4)
|
||||||
|
+ (((df_data["Control L"] / 3) + (df_data["Control R"] * (2 / 3))) * 2)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return (
|
return (
|
||||||
((df_data['H/9'] + df_data['K/9'] + df_data['BB/9'] + df_data['HR/9']) * 5) +
|
(
|
||||||
(df_data['Fielding']) + (df_data['Stamina'] * 5) +
|
(
|
||||||
(((df_data['Stuff L'] / 3) + (df_data['Stuff R'] * (2 / 3))) * 4) +
|
df_data["H/9"]
|
||||||
(((df_data['Control L'] / 3) + (df_data['Control R'] * (2 / 3))) * 2)
|
+ df_data["K/9"]
|
||||||
|
+ df_data["BB/9"]
|
||||||
|
+ df_data["HR/9"]
|
||||||
|
)
|
||||||
|
* 5
|
||||||
|
)
|
||||||
|
+ (df_data["Fielding"])
|
||||||
|
+ (df_data["Stamina"] * 5)
|
||||||
|
+ (((df_data["Stuff L"] / 3) + (df_data["Stuff R"] * (2 / 3))) * 4)
|
||||||
|
+ (((df_data["Control L"] / 3) + (df_data["Control R"] * (2 / 3))) * 2)
|
||||||
)
|
)
|
||||||
|
|
||||||
raw_series = raw_data.apply(get_raw_rating, axis=1)
|
raw_series = raw_data.apply(get_raw_rating, axis=1)
|
||||||
rank_series = raw_series.rank(pct=True)
|
rank_series = raw_series.rank(pct=True)
|
||||||
raw_data['Rating'] = round(rank_series * 100)
|
raw_data["Rating"] = round(rank_series * 100)
|
||||||
|
|
||||||
output = raw_data[[
|
output = raw_data[
|
||||||
'player_id', 'player_name', 'Rating', 'Control R', 'Control L', 'Stuff R', 'Stuff L', 'Stamina', 'Fielding',
|
[
|
||||||
'H/9', 'K/9', 'BB/9', 'HR/9', 'hand', 'cardset_name'
|
"player_id",
|
||||||
]]
|
"player_name",
|
||||||
|
"Rating",
|
||||||
|
"Control R",
|
||||||
|
"Control L",
|
||||||
|
"Stuff R",
|
||||||
|
"Stuff L",
|
||||||
|
"Stamina",
|
||||||
|
"Fielding",
|
||||||
|
"H/9",
|
||||||
|
"K/9",
|
||||||
|
"BB/9",
|
||||||
|
"HR/9",
|
||||||
|
"hand",
|
||||||
|
"cardset_name",
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
csv_file = pd.DataFrame(output).to_csv(index=False)
|
csv_file = pd.DataFrame(output).to_csv(index=False)
|
||||||
with open(BASIC_FILE, 'w') as file:
|
with open(BASIC_FILE, "w") as file:
|
||||||
file.write(csv_file)
|
file.write(csv_file)
|
||||||
|
|
||||||
return Response(content=csv_file, media_type='text/csv')
|
return Response(content=csv_file, media_type="text/csv")
|
||||||
|
|
||||||
|
|
||||||
@router.get('/{ratings_id}')
|
@router.get("/{ratings_id}")
|
||||||
async def get_one_rating(ratings_id: int, token: str = Depends(oauth2_scheme)):
|
async def get_one_rating(ratings_id: int, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to pull card ratings."
|
||||||
detail='You are not authorized to pull card ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
this_rating = PitchingCardRatings.get_or_none(PitchingCardRatings.id == ratings_id)
|
this_rating = PitchingCardRatings.get_or_none(PitchingCardRatings.id == ratings_id)
|
||||||
if this_rating is None:
|
if this_rating is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'PitchingCardRating id {ratings_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"PitchingCardRating id {ratings_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
r_data = model_to_dict(this_rating)
|
r_data = model_to_dict(this_rating)
|
||||||
db.close()
|
db.close()
|
||||||
return r_data
|
return r_data
|
||||||
|
|
||||||
|
|
||||||
@router.get('/player/{player_id}')
|
@router.get("/player/{player_id}")
|
||||||
async def get_player_ratings(player_id: int, variant: list = Query(default=None), short_output: bool = False):
|
async def get_player_ratings(
|
||||||
all_cards = PitchingCard.select().where(PitchingCard.player_id == player_id).order_by(PitchingCard.variant)
|
player_id: int, variant: list = Query(default=None), short_output: bool = False
|
||||||
|
):
|
||||||
|
all_cards = (
|
||||||
|
PitchingCard.select()
|
||||||
|
.where(PitchingCard.player_id == player_id)
|
||||||
|
.order_by(PitchingCard.variant)
|
||||||
|
)
|
||||||
if variant is not None:
|
if variant is not None:
|
||||||
all_cards = all_cards.where(PitchingCard.variant << variant)
|
all_cards = all_cards.where(PitchingCard.variant << variant)
|
||||||
|
|
||||||
all_ratings = PitchingCardRatings.select().where(PitchingCardRatings.pitchingcard << all_cards)
|
all_ratings = PitchingCardRatings.select().where(
|
||||||
|
PitchingCardRatings.pitchingcard << all_cards
|
||||||
|
)
|
||||||
|
|
||||||
return_val = {'count': all_ratings.count(), 'ratings': [
|
return_val = {
|
||||||
model_to_dict(x, recurse=not short_output) for x in all_ratings
|
"count": all_ratings.count(),
|
||||||
]}
|
"ratings": [model_to_dict(x, recurse=not short_output) for x in all_ratings],
|
||||||
|
}
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
|
|
||||||
@router.put('')
|
@router.put("")
|
||||||
async def put_ratings(ratings: RatingsList, token: str = Depends(oauth2_scheme)):
|
async def put_ratings(ratings: RatingsList, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to post card ratings."
|
||||||
detail='You are not authorized to post card ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
new_ratings = []
|
new_ratings = []
|
||||||
@ -419,43 +553,50 @@ async def put_ratings(ratings: RatingsList, token: str = Depends(oauth2_scheme))
|
|||||||
for x in ratings.ratings:
|
for x in ratings.ratings:
|
||||||
try:
|
try:
|
||||||
PitchingCardRatings.get(
|
PitchingCardRatings.get(
|
||||||
(PitchingCardRatings.pitchingcard_id == x.pitchingcard_id) & (PitchingCardRatings.vs_hand == x.vs_hand)
|
(PitchingCardRatings.pitchingcard_id == x.pitchingcard_id)
|
||||||
|
& (PitchingCardRatings.vs_hand == x.vs_hand)
|
||||||
|
)
|
||||||
|
updates += (
|
||||||
|
PitchingCardRatings.update(x.dict())
|
||||||
|
.where(
|
||||||
|
(PitchingCardRatings.pitchingcard_id == x.pitchingcard_id)
|
||||||
|
& (PitchingCardRatings.vs_hand == x.vs_hand)
|
||||||
|
)
|
||||||
|
.execute()
|
||||||
)
|
)
|
||||||
updates += PitchingCardRatings.update(x.dict()).where(
|
|
||||||
(PitchingCardRatings.pitchingcard_id == x.pitchingcard_id) & (PitchingCardRatings.vs_hand == x.vs_hand)
|
|
||||||
).execute()
|
|
||||||
except PitchingCardRatings.DoesNotExist:
|
except PitchingCardRatings.DoesNotExist:
|
||||||
new_ratings.append(x.dict())
|
new_ratings.append(x.dict())
|
||||||
|
|
||||||
with db.atomic():
|
with db.atomic():
|
||||||
for batch in chunked(new_ratings, 30):
|
# Use PostgreSQL-compatible upsert helper
|
||||||
PitchingCardRatings.insert_many(batch).on_conflict_replace().execute()
|
upsert_pitching_card_ratings(new_ratings, batch_size=30)
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return f'Updated ratings: {updates}; new ratings: {len(new_ratings)}'
|
return f"Updated ratings: {updates}; new ratings: {len(new_ratings)}"
|
||||||
|
|
||||||
|
|
||||||
@router.delete('/{ratings_id}')
|
@router.delete("/{ratings_id}")
|
||||||
async def delete_rating(
|
async def delete_rating(ratings_id: int, token: str = Depends(oauth2_scheme)):
|
||||||
ratings_id: int, token: str = Depends(oauth2_scheme)):
|
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401, detail="You are not authorized to post card ratings."
|
||||||
detail='You are not authorized to post card ratings.'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
this_rating = PitchingCardRatings.get_or_none(PitchingCardRatings.id == ratings_id)
|
this_rating = PitchingCardRatings.get_or_none(PitchingCardRatings.id == ratings_id)
|
||||||
if this_rating is None:
|
if this_rating is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'PitchingCardRating id {ratings_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"PitchingCardRating id {ratings_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
count = this_rating.delete_instance()
|
count = this_rating.delete_instance()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
if count == 1:
|
if count == 1:
|
||||||
return f'Rating {this_rating} has been deleted'
|
return f"Rating {this_rating} has been deleted"
|
||||||
else:
|
else:
|
||||||
raise HTTPException(status_code=500, detail=f'Rating {this_rating} could not be deleted')
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Rating {this_rating} could not be deleted"
|
||||||
|
)
|
||||||
|
|||||||
@ -6,18 +6,16 @@ import logging
|
|||||||
import pydantic
|
import pydantic
|
||||||
|
|
||||||
from ..db_engine import db, PitchingCard, model_to_dict, chunked, Player, fn, MlbPlayer
|
from ..db_engine import db, PitchingCard, model_to_dict, chunked, Player, fn, MlbPlayer
|
||||||
|
from ..db_helpers import upsert_pitching_cards
|
||||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
filename=LOG_DATA['filename'],
|
filename=LOG_DATA["filename"],
|
||||||
format=LOG_DATA['format'],
|
format=LOG_DATA["format"],
|
||||||
level=LOG_DATA['log_level']
|
level=LOG_DATA["log_level"],
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(prefix="/api/v2/pitchingcards", tags=["pitchingcards"])
|
||||||
prefix='/api/v2/pitchingcards',
|
|
||||||
tags=['pitchingcards']
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PitchingCardModel(pydantic.BaseModel):
|
class PitchingCardModel(pydantic.BaseModel):
|
||||||
@ -31,17 +29,21 @@ class PitchingCardModel(pydantic.BaseModel):
|
|||||||
closer_rating: int = None
|
closer_rating: int = None
|
||||||
batting: str = "#1WR-C"
|
batting: str = "#1WR-C"
|
||||||
offense_col: int = None
|
offense_col: int = None
|
||||||
hand: Literal['R', 'L', 'S'] = 'R'
|
hand: Literal["R", "L", "S"] = "R"
|
||||||
|
|
||||||
|
|
||||||
class PitchingCardList(pydantic.BaseModel):
|
class PitchingCardList(pydantic.BaseModel):
|
||||||
cards: List[PitchingCardModel]
|
cards: List[PitchingCardModel]
|
||||||
|
|
||||||
|
|
||||||
@router.get('')
|
@router.get("")
|
||||||
async def get_pitching_cards(
|
async def get_pitching_cards(
|
||||||
player_id: list = Query(default=None), player_name: list = Query(default=None),
|
player_id: list = Query(default=None),
|
||||||
cardset_id: list = Query(default=None), short_output: bool = False, limit: Optional[int] = None):
|
player_name: list = Query(default=None),
|
||||||
|
cardset_id: list = Query(default=None),
|
||||||
|
short_output: bool = False,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
):
|
||||||
all_cards = PitchingCard.select()
|
all_cards = PitchingCard.select()
|
||||||
if player_id is not None:
|
if player_id is not None:
|
||||||
all_cards = all_cards.where(PitchingCard.player_id << player_id)
|
all_cards = all_cards.where(PitchingCard.player_id << player_id)
|
||||||
@ -56,46 +58,56 @@ async def get_pitching_cards(
|
|||||||
if limit is not None:
|
if limit is not None:
|
||||||
all_cards = all_cards.limit(limit)
|
all_cards = all_cards.limit(limit)
|
||||||
|
|
||||||
return_val = {'count': all_cards.count(), 'cards': [
|
return_val = {
|
||||||
model_to_dict(x, recurse=not short_output) for x in all_cards
|
"count": all_cards.count(),
|
||||||
]}
|
"cards": [model_to_dict(x, recurse=not short_output) for x in all_cards],
|
||||||
|
}
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
|
|
||||||
@router.get('/{card_id}')
|
@router.get("/{card_id}")
|
||||||
async def get_one_card(card_id: int):
|
async def get_one_card(card_id: int):
|
||||||
this_card = PitchingCard.get_or_none(PitchingCard.id == card_id)
|
this_card = PitchingCard.get_or_none(PitchingCard.id == card_id)
|
||||||
if this_card is None:
|
if this_card is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'PitchingCard id {card_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"PitchingCard id {card_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
r_card = model_to_dict(this_card)
|
r_card = model_to_dict(this_card)
|
||||||
db.close()
|
db.close()
|
||||||
return r_card
|
return r_card
|
||||||
|
|
||||||
|
|
||||||
@router.get('/player/{player_id}')
|
@router.get("/player/{player_id}")
|
||||||
async def get_player_cards(player_id: int, variant: list = Query(default=None), short_output: bool = False):
|
async def get_player_cards(
|
||||||
all_cards = PitchingCard.select().where(PitchingCard.player_id == player_id).order_by(PitchingCard.variant)
|
player_id: int, variant: list = Query(default=None), short_output: bool = False
|
||||||
|
):
|
||||||
|
all_cards = (
|
||||||
|
PitchingCard.select()
|
||||||
|
.where(PitchingCard.player_id == player_id)
|
||||||
|
.order_by(PitchingCard.variant)
|
||||||
|
)
|
||||||
if variant is not None:
|
if variant is not None:
|
||||||
all_cards = all_cards.where(PitchingCard.variant << variant)
|
all_cards = all_cards.where(PitchingCard.variant << variant)
|
||||||
|
|
||||||
return_val = {'count': all_cards.count(), 'cards': [
|
return_val = {
|
||||||
model_to_dict(x, recurse=not short_output) for x in all_cards
|
"count": all_cards.count(),
|
||||||
]}
|
"cards": [model_to_dict(x, recurse=not short_output) for x in all_cards],
|
||||||
|
}
|
||||||
db.close()
|
db.close()
|
||||||
return return_val
|
return return_val
|
||||||
|
|
||||||
|
|
||||||
@router.put('')
|
@router.put("")
|
||||||
async def put_cards(cards: PitchingCardList, token: str = Depends(oauth2_scheme)):
|
async def put_cards(cards: PitchingCardList, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to post pitching cards. This event has been logged.'
|
detail="You are not authorized to post pitching cards. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
new_cards = []
|
new_cards = []
|
||||||
@ -104,52 +116,73 @@ async def put_cards(cards: PitchingCardList, token: str = Depends(oauth2_scheme)
|
|||||||
for x in cards.cards:
|
for x in cards.cards:
|
||||||
try:
|
try:
|
||||||
old = PitchingCard.get(
|
old = PitchingCard.get(
|
||||||
(PitchingCard.player_id == x.player_id) & (PitchingCard.variant == x.variant)
|
(PitchingCard.player_id == x.player_id)
|
||||||
|
& (PitchingCard.variant == x.variant)
|
||||||
)
|
)
|
||||||
|
|
||||||
if x.offense_col is None:
|
if x.offense_col is None:
|
||||||
x.offense_col = old.offense_col
|
x.offense_col = old.offense_col
|
||||||
updates += PitchingCard.update(x.dict()).where(
|
updates += (
|
||||||
(PitchingCard.player_id == x.player_id) & (PitchingCard.variant == x.variant)
|
PitchingCard.update(x.dict())
|
||||||
).execute()
|
.where(
|
||||||
|
(PitchingCard.player_id == x.player_id)
|
||||||
|
& (PitchingCard.variant == x.variant)
|
||||||
|
)
|
||||||
|
.execute()
|
||||||
|
)
|
||||||
except PitchingCard.DoesNotExist:
|
except PitchingCard.DoesNotExist:
|
||||||
if x.offense_col is None:
|
if x.offense_col is None:
|
||||||
this_player = Player.get_or_none(Player.player_id == x.player_id)
|
this_player = Player.get_or_none(Player.player_id == x.player_id)
|
||||||
mlb_player = MlbPlayer.get_or_none(MlbPlayer.key_bbref == this_player.bbref_id)
|
mlb_player = MlbPlayer.get_or_none(
|
||||||
|
MlbPlayer.key_bbref == this_player.bbref_id
|
||||||
|
)
|
||||||
if mlb_player is not None:
|
if mlb_player is not None:
|
||||||
logging.info(f'setting offense_col to {mlb_player.offense_col} for {this_player.p_name}')
|
logging.info(
|
||||||
|
f"setting offense_col to {mlb_player.offense_col} for {this_player.p_name}"
|
||||||
|
)
|
||||||
x.offense_col = mlb_player.offense_col
|
x.offense_col = mlb_player.offense_col
|
||||||
else:
|
else:
|
||||||
logging.info(f'randomly setting offense_col for {this_player.p_name}')
|
logging.info(
|
||||||
|
f"randomly setting offense_col for {this_player.p_name}"
|
||||||
|
)
|
||||||
x.offense_col = random.randint(1, 3)
|
x.offense_col = random.randint(1, 3)
|
||||||
logging.debug(f'x.dict(): {x.dict()}')
|
logging.debug(f"x.dict(): {x.dict()}")
|
||||||
new_cards.append(x.dict())
|
new_cards.append(x.dict())
|
||||||
|
|
||||||
with db.atomic():
|
with db.atomic():
|
||||||
for batch in chunked(new_cards, 30):
|
# Use PostgreSQL-compatible upsert helper
|
||||||
PitchingCard.insert_many(batch).on_conflict_replace().execute()
|
upsert_pitching_cards(new_cards, batch_size=30)
|
||||||
|
|
||||||
db.close()
|
db.close()
|
||||||
return f'Updated cards: {updates}; new cards: {len(new_cards)}'
|
return f"Updated cards: {updates}; new cards: {len(new_cards)}"
|
||||||
|
|
||||||
|
|
||||||
@router.patch('/{card_id}')
|
@router.patch("/{card_id}")
|
||||||
async def patch_card(
|
async def patch_card(
|
||||||
card_id: int, balk: Optional[int] = None, wild_pitch: Optional[int] = None, hold: Optional[int] = None,
|
card_id: int,
|
||||||
starter_rating: Optional[int] = None, relief_rating: Optional[int] = None, closer_rating: Optional[int] = None,
|
balk: Optional[int] = None,
|
||||||
batting: Optional[int] = None, token: str = Depends(oauth2_scheme)):
|
wild_pitch: Optional[int] = None,
|
||||||
|
hold: Optional[int] = None,
|
||||||
|
starter_rating: Optional[int] = None,
|
||||||
|
relief_rating: Optional[int] = None,
|
||||||
|
closer_rating: Optional[int] = None,
|
||||||
|
batting: Optional[int] = None,
|
||||||
|
token: str = Depends(oauth2_scheme),
|
||||||
|
):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to patch pitching cards. This event has been logged.'
|
detail="You are not authorized to patch pitching cards. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
this_card = PitchingCard.get_or_none(PitchingCard.id == card_id)
|
this_card = PitchingCard.get_or_none(PitchingCard.id == card_id)
|
||||||
if this_card is None:
|
if this_card is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'PitchingCard id {card_id} not found')
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"PitchingCard id {card_id} not found"
|
||||||
|
)
|
||||||
|
|
||||||
if balk is not None:
|
if balk is not None:
|
||||||
this_card.balk = balk
|
this_card.balk = balk
|
||||||
@ -174,45 +207,47 @@ async def patch_card(
|
|||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=418,
|
status_code=418,
|
||||||
detail='Well slap my ass and call me a teapot; I could not save that card'
|
detail="Well slap my ass and call me a teapot; I could not save that card",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.delete('/{card_id}')
|
@router.delete("/{card_id}")
|
||||||
async def delete_card(card_id: int, token: str = Depends(oauth2_scheme)):
|
async def delete_card(card_id: int, token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to delete pitching cards. This event has been logged.'
|
detail="You are not authorized to delete pitching cards. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
this_card = PitchingCard.get_or_none(PitchingCard.id == card_id)
|
this_card = PitchingCard.get_or_none(PitchingCard.id == card_id)
|
||||||
if this_card is None:
|
if this_card is None:
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(status_code=404, detail=f'Pitching id {card_id} not found')
|
raise HTTPException(status_code=404, detail=f"Pitching id {card_id} not found")
|
||||||
|
|
||||||
count = this_card.delete_instance()
|
count = this_card.delete_instance()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
if count == 1:
|
if count == 1:
|
||||||
return f'Card {this_card} has been deleted'
|
return f"Card {this_card} has been deleted"
|
||||||
else:
|
else:
|
||||||
raise HTTPException(status_code=500, detail=f'Card {this_card} could not be deleted')
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Card {this_card} could not be deleted"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.delete('')
|
@router.delete("")
|
||||||
async def delete_all_cards(token: str = Depends(oauth2_scheme)):
|
async def delete_all_cards(token: str = Depends(oauth2_scheme)):
|
||||||
if not valid_token(token):
|
if not valid_token(token):
|
||||||
logging.warning(f'Bad Token: {token}')
|
logging.warning(f"Bad Token: {token}")
|
||||||
db.close()
|
db.close()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
detail='You are not authorized to delete pitching cards. This event has been logged.'
|
detail="You are not authorized to delete pitching cards. This event has been logged.",
|
||||||
)
|
)
|
||||||
|
|
||||||
d_query = PitchingCard.delete()
|
d_query = PitchingCard.delete()
|
||||||
d_query.execute()
|
d_query.execute()
|
||||||
|
|
||||||
return f'Deleted {d_query.count()} pitching cards'
|
return f"Deleted {d_query.count()} pitching cards"
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
92
scripts/audit_results.json
Normal file
92
scripts/audit_results.json
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
{
|
||||||
|
"generated_at": "2026-01-25T23:04:23.049271",
|
||||||
|
"summary": {
|
||||||
|
"total_tables": 29,
|
||||||
|
"total_records": 658963,
|
||||||
|
"total_issues": 3,
|
||||||
|
"critical_issues": 0,
|
||||||
|
"high_issues": 3,
|
||||||
|
"medium_issues": 0
|
||||||
|
},
|
||||||
|
"table_counts": {
|
||||||
|
"award": 0,
|
||||||
|
"battingcard": 6036,
|
||||||
|
"battingcardratings": 12072,
|
||||||
|
"battingstat": 50228,
|
||||||
|
"card": 61746,
|
||||||
|
"cardposition": 18654,
|
||||||
|
"cardset": 29,
|
||||||
|
"current": 1,
|
||||||
|
"decision": 29694,
|
||||||
|
"event": 9,
|
||||||
|
"gamerewards": 10,
|
||||||
|
"gauntletreward": 45,
|
||||||
|
"gauntletrun": 458,
|
||||||
|
"mlbplayer": 4781,
|
||||||
|
"notification": 14362,
|
||||||
|
"pack": 20595,
|
||||||
|
"packtype": 9,
|
||||||
|
"paperdex": 46121,
|
||||||
|
"pitchingcard": 6887,
|
||||||
|
"pitchingcardratings": 13774,
|
||||||
|
"pitchingstat": 13135,
|
||||||
|
"player": 12964,
|
||||||
|
"rarity": 6,
|
||||||
|
"result": 2235,
|
||||||
|
"reward": 8043,
|
||||||
|
"roster": 23,
|
||||||
|
"stratgame": 4208,
|
||||||
|
"stratplay": 332737,
|
||||||
|
"team": 101
|
||||||
|
},
|
||||||
|
"issues": [
|
||||||
|
{
|
||||||
|
"type": "ORPHANED_FK",
|
||||||
|
"severity": "HIGH",
|
||||||
|
"child_table": "battingstat",
|
||||||
|
"child_field": "card_id",
|
||||||
|
"parent_table": "card",
|
||||||
|
"parent_field": "id",
|
||||||
|
"description": "Batting stats referencing non-existent cards",
|
||||||
|
"orphan_count": 1953,
|
||||||
|
"sample_orphan_ids": [
|
||||||
|
1419,
|
||||||
|
1419,
|
||||||
|
1419,
|
||||||
|
1419,
|
||||||
|
1433
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "ORPHANED_FK",
|
||||||
|
"severity": "HIGH",
|
||||||
|
"child_table": "pitchingstat",
|
||||||
|
"child_field": "card_id",
|
||||||
|
"parent_table": "card",
|
||||||
|
"parent_field": "id",
|
||||||
|
"description": "Pitching stats referencing non-existent cards",
|
||||||
|
"orphan_count": 437,
|
||||||
|
"sample_orphan_ids": [
|
||||||
|
1412,
|
||||||
|
1660,
|
||||||
|
2045,
|
||||||
|
2046,
|
||||||
|
2061
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "VARCHAR_TOO_LONG",
|
||||||
|
"severity": "HIGH",
|
||||||
|
"table": "team",
|
||||||
|
"field": "abbrev",
|
||||||
|
"description": "Team abbreviation",
|
||||||
|
"max_found": 13,
|
||||||
|
"expected_max": 10,
|
||||||
|
"sample_values": [
|
||||||
|
"Gauntlet-KC...",
|
||||||
|
"Gauntlet-NCB...",
|
||||||
|
"Gauntlet-SLV..."
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
564
scripts/audit_sqlite.py
Executable file
564
scripts/audit_sqlite.py
Executable file
@ -0,0 +1,564 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Paper Dynasty SQLite Data Integrity Audit
|
||||||
|
|
||||||
|
Pre-migration script to identify potential issues before migrating to PostgreSQL.
|
||||||
|
Based on issues discovered during Major Domo migration (August 2025).
|
||||||
|
|
||||||
|
Checks for:
|
||||||
|
1. NULL values in fields that will be NOT NULL in PostgreSQL
|
||||||
|
2. Orphaned foreign key records
|
||||||
|
3. VARCHAR field max lengths (PostgreSQL is stricter)
|
||||||
|
4. Record counts for baseline comparison
|
||||||
|
5. Primary key gaps or duplicates
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python scripts/audit_sqlite.py
|
||||||
|
python scripts/audit_sqlite.py --fix # Apply safe fixes
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import sqlite3
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def connect_db(db_path: str) -> sqlite3.Connection:
|
||||||
|
"""Connect to SQLite database."""
|
||||||
|
conn = sqlite3.connect(db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
return conn
|
||||||
|
|
||||||
|
|
||||||
|
def get_table_record_counts(conn: sqlite3.Connection) -> dict:
|
||||||
|
"""Get record counts for all tables."""
|
||||||
|
counts = {}
|
||||||
|
cursor = conn.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
|
||||||
|
)
|
||||||
|
for row in cursor:
|
||||||
|
table_name = row["name"]
|
||||||
|
count_cursor = conn.execute(f"SELECT COUNT(*) FROM {table_name}")
|
||||||
|
counts[table_name] = count_cursor.fetchone()[0]
|
||||||
|
return counts
|
||||||
|
|
||||||
|
|
||||||
|
def check_null_values(conn: sqlite3.Connection) -> list:
|
||||||
|
"""
|
||||||
|
Check for NULL values in fields that should not be null.
|
||||||
|
|
||||||
|
These are the fields that Major Domo found issues with.
|
||||||
|
"""
|
||||||
|
issues = []
|
||||||
|
|
||||||
|
# Fields to check - based on Major Domo experience
|
||||||
|
null_checks = [
|
||||||
|
# (table, field, description)
|
||||||
|
("team", "abbrev", "Team abbreviation"),
|
||||||
|
("team", "sname", "Team short name"),
|
||||||
|
("team", "lname", "Team long name"),
|
||||||
|
("player", "p_name", "Player name"),
|
||||||
|
("player", "image", "Player image URL"),
|
||||||
|
("card", "player_id", "Card player reference"),
|
||||||
|
("stratplay", "game_id", "Play game reference"),
|
||||||
|
("stratplay", "pitcher_id", "Play pitcher reference"),
|
||||||
|
("decision", "game_id", "Decision game reference"),
|
||||||
|
("decision", "pitcher_id", "Decision pitcher reference"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for table, field, description in null_checks:
|
||||||
|
try:
|
||||||
|
cursor = conn.execute(f"SELECT COUNT(*) FROM {table} WHERE {field} IS NULL")
|
||||||
|
null_count = cursor.fetchone()[0]
|
||||||
|
if null_count > 0:
|
||||||
|
cursor = conn.execute(f"SELECT COUNT(*) FROM {table}")
|
||||||
|
total_count = cursor.fetchone()[0]
|
||||||
|
issues.append(
|
||||||
|
{
|
||||||
|
"type": "NULL_VALUE",
|
||||||
|
"severity": "HIGH"
|
||||||
|
if null_count > total_count * 0.1
|
||||||
|
else "MEDIUM",
|
||||||
|
"table": table,
|
||||||
|
"field": field,
|
||||||
|
"description": description,
|
||||||
|
"null_count": null_count,
|
||||||
|
"total_count": total_count,
|
||||||
|
"percentage": round(null_count / total_count * 100, 2)
|
||||||
|
if total_count > 0
|
||||||
|
else 0,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
# Table or column doesn't exist
|
||||||
|
pass
|
||||||
|
|
||||||
|
return issues
|
||||||
|
|
||||||
|
|
||||||
|
def check_orphaned_foreign_keys(conn: sqlite3.Connection) -> list:
|
||||||
|
"""
|
||||||
|
Check for orphaned foreign key records.
|
||||||
|
|
||||||
|
These will fail with foreign key constraint violations in PostgreSQL.
|
||||||
|
"""
|
||||||
|
issues = []
|
||||||
|
|
||||||
|
# Foreign key relationships to check
|
||||||
|
fk_checks = [
|
||||||
|
# (child_table, child_field, parent_table, parent_field, description)
|
||||||
|
(
|
||||||
|
"card",
|
||||||
|
"player_id",
|
||||||
|
"player",
|
||||||
|
"player_id",
|
||||||
|
"Cards referencing non-existent players",
|
||||||
|
),
|
||||||
|
("card", "team_id", "team", "id", "Cards referencing non-existent teams"),
|
||||||
|
(
|
||||||
|
"stratplay",
|
||||||
|
"game_id",
|
||||||
|
"stratgame",
|
||||||
|
"id",
|
||||||
|
"Plays referencing non-existent games",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"stratplay",
|
||||||
|
"batter_id",
|
||||||
|
"player",
|
||||||
|
"player_id",
|
||||||
|
"Plays referencing non-existent batters",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"stratplay",
|
||||||
|
"pitcher_id",
|
||||||
|
"player",
|
||||||
|
"player_id",
|
||||||
|
"Plays referencing non-existent pitchers",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"decision",
|
||||||
|
"game_id",
|
||||||
|
"stratgame",
|
||||||
|
"id",
|
||||||
|
"Decisions referencing non-existent games",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"decision",
|
||||||
|
"pitcher_id",
|
||||||
|
"player",
|
||||||
|
"player_id",
|
||||||
|
"Decisions referencing non-existent pitchers",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"battingstat",
|
||||||
|
"card_id",
|
||||||
|
"card",
|
||||||
|
"id",
|
||||||
|
"Batting stats referencing non-existent cards",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"pitchingstat",
|
||||||
|
"card_id",
|
||||||
|
"card",
|
||||||
|
"id",
|
||||||
|
"Pitching stats referencing non-existent cards",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"battingcard",
|
||||||
|
"player_id",
|
||||||
|
"player",
|
||||||
|
"player_id",
|
||||||
|
"Batting cards referencing non-existent players",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"pitchingcard",
|
||||||
|
"player_id",
|
||||||
|
"player",
|
||||||
|
"player_id",
|
||||||
|
"Pitching cards referencing non-existent players",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"cardposition",
|
||||||
|
"player_id",
|
||||||
|
"player",
|
||||||
|
"player_id",
|
||||||
|
"Card positions referencing non-existent players",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"paperdex",
|
||||||
|
"player_id",
|
||||||
|
"player",
|
||||||
|
"player_id",
|
||||||
|
"Paperdex entries referencing non-existent players",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"paperdex",
|
||||||
|
"team_id",
|
||||||
|
"team",
|
||||||
|
"id",
|
||||||
|
"Paperdex entries referencing non-existent teams",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"gauntletrun",
|
||||||
|
"team_id",
|
||||||
|
"team",
|
||||||
|
"id",
|
||||||
|
"Gauntlet runs referencing non-existent teams",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
for child_table, child_field, parent_table, parent_field, description in fk_checks:
|
||||||
|
try:
|
||||||
|
# Use explicit column names to avoid ambiguity
|
||||||
|
query = f"""
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM {child_table} c
|
||||||
|
LEFT JOIN {parent_table} p ON c.{child_field} = p.{parent_field}
|
||||||
|
WHERE c.{child_field} IS NOT NULL AND p.{parent_field} IS NULL
|
||||||
|
"""
|
||||||
|
cursor = conn.execute(query)
|
||||||
|
orphan_count = cursor.fetchone()[0]
|
||||||
|
|
||||||
|
if orphan_count > 0:
|
||||||
|
# Get sample orphaned IDs
|
||||||
|
sample_query = f"""
|
||||||
|
SELECT c.{child_field}
|
||||||
|
FROM {child_table} c
|
||||||
|
LEFT JOIN {parent_table} p ON c.{child_field} = p.{parent_field}
|
||||||
|
WHERE c.{child_field} IS NOT NULL AND p.{parent_field} IS NULL
|
||||||
|
LIMIT 5
|
||||||
|
"""
|
||||||
|
sample_cursor = conn.execute(sample_query)
|
||||||
|
sample_ids = [row[0] for row in sample_cursor.fetchall()]
|
||||||
|
|
||||||
|
issues.append(
|
||||||
|
{
|
||||||
|
"type": "ORPHANED_FK",
|
||||||
|
"severity": "HIGH",
|
||||||
|
"child_table": child_table,
|
||||||
|
"child_field": child_field,
|
||||||
|
"parent_table": parent_table,
|
||||||
|
"parent_field": parent_field,
|
||||||
|
"description": description,
|
||||||
|
"orphan_count": orphan_count,
|
||||||
|
"sample_orphan_ids": sample_ids,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except sqlite3.OperationalError as e:
|
||||||
|
# Table or column doesn't exist
|
||||||
|
print(
|
||||||
|
f"Warning: Could not check {child_table}.{child_field} -> {parent_table}.{parent_field}: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return issues
|
||||||
|
|
||||||
|
|
||||||
|
def check_varchar_lengths(conn: sqlite3.Connection) -> list:
|
||||||
|
"""
|
||||||
|
Check max lengths of string fields.
|
||||||
|
|
||||||
|
PostgreSQL VARCHAR fields have stricter length limits than SQLite.
|
||||||
|
"""
|
||||||
|
issues = []
|
||||||
|
|
||||||
|
# Fields to check with expected max lengths
|
||||||
|
varchar_checks = [
|
||||||
|
# (table, field, expected_max_length, description)
|
||||||
|
("player", "p_name", 255, "Player name"),
|
||||||
|
("player", "image", 1000, "Player image URL"),
|
||||||
|
("player", "image2", 1000, "Player image2 URL"),
|
||||||
|
("player", "headshot", 500, "Player headshot URL"),
|
||||||
|
("player", "vanity_card", 500, "Player vanity card"),
|
||||||
|
("player", "strat_code", 100, "Strat code"),
|
||||||
|
("player", "bbref_id", 50, "Baseball Reference ID"),
|
||||||
|
("player", "description", 1000, "Player description"),
|
||||||
|
("team", "abbrev", 10, "Team abbreviation"),
|
||||||
|
("team", "sname", 100, "Team short name"),
|
||||||
|
("team", "lname", 255, "Team long name"),
|
||||||
|
("notification", "title", 255, "Notification title"),
|
||||||
|
("notification", "message", 2000, "Notification message"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for table, field, expected_max, description in varchar_checks:
|
||||||
|
try:
|
||||||
|
cursor = conn.execute(f"SELECT MAX(LENGTH({field})) FROM {table}")
|
||||||
|
max_length = cursor.fetchone()[0]
|
||||||
|
|
||||||
|
if max_length and max_length > expected_max:
|
||||||
|
# Get sample of long values
|
||||||
|
sample_cursor = conn.execute(
|
||||||
|
f"SELECT {field} FROM {table} WHERE LENGTH({field}) > {expected_max} LIMIT 3"
|
||||||
|
)
|
||||||
|
samples = [
|
||||||
|
row[0][:100] + "..." if row[0] else None
|
||||||
|
for row in sample_cursor.fetchall()
|
||||||
|
]
|
||||||
|
|
||||||
|
issues.append(
|
||||||
|
{
|
||||||
|
"type": "VARCHAR_TOO_LONG",
|
||||||
|
"severity": "HIGH",
|
||||||
|
"table": table,
|
||||||
|
"field": field,
|
||||||
|
"description": description,
|
||||||
|
"max_found": max_length,
|
||||||
|
"expected_max": expected_max,
|
||||||
|
"sample_values": samples,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
elif max_length:
|
||||||
|
# Info: report actual max for reference
|
||||||
|
pass
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return issues
|
||||||
|
|
||||||
|
|
||||||
|
def check_duplicate_primary_keys(conn: sqlite3.Connection) -> list:
|
||||||
|
"""
|
||||||
|
Check for duplicate primary keys (shouldn't happen but good to verify).
|
||||||
|
"""
|
||||||
|
issues = []
|
||||||
|
|
||||||
|
pk_checks = [
|
||||||
|
("player", "player_id"),
|
||||||
|
("team", "id"),
|
||||||
|
("card", "id"),
|
||||||
|
("stratgame", "id"),
|
||||||
|
("stratplay", "id"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for table, pk_field in pk_checks:
|
||||||
|
try:
|
||||||
|
cursor = conn.execute(f"""
|
||||||
|
SELECT {pk_field}, COUNT(*) as cnt
|
||||||
|
FROM {table}
|
||||||
|
GROUP BY {pk_field}
|
||||||
|
HAVING COUNT(*) > 1
|
||||||
|
""")
|
||||||
|
duplicates = cursor.fetchall()
|
||||||
|
|
||||||
|
if duplicates:
|
||||||
|
issues.append(
|
||||||
|
{
|
||||||
|
"type": "DUPLICATE_PK",
|
||||||
|
"severity": "CRITICAL",
|
||||||
|
"table": table,
|
||||||
|
"pk_field": pk_field,
|
||||||
|
"duplicate_ids": [row[0] for row in duplicates[:10]],
|
||||||
|
"duplicate_count": len(duplicates),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return issues
|
||||||
|
|
||||||
|
|
||||||
|
def check_unique_constraints(conn: sqlite3.Connection) -> list:
|
||||||
|
"""
|
||||||
|
Check that composite unique constraints would be satisfied.
|
||||||
|
These are the indexes that on_conflict_replace() depends on.
|
||||||
|
"""
|
||||||
|
issues = []
|
||||||
|
|
||||||
|
unique_checks = [
|
||||||
|
# (table, fields, description)
|
||||||
|
("battingcard", ["player_id", "variant"], "Batting card unique constraint"),
|
||||||
|
("pitchingcard", ["player_id", "variant"], "Pitching card unique constraint"),
|
||||||
|
(
|
||||||
|
"cardposition",
|
||||||
|
["player_id", "variant", "position"],
|
||||||
|
"Card position unique constraint",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"battingcardratings",
|
||||||
|
["battingcard_id", "vs_hand"],
|
||||||
|
"Batting card ratings unique constraint",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"pitchingcardratings",
|
||||||
|
["pitchingcard_id", "vs_hand"],
|
||||||
|
"Pitching card ratings unique constraint",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
for table, fields, description in unique_checks:
|
||||||
|
try:
|
||||||
|
fields_str = ", ".join(fields)
|
||||||
|
cursor = conn.execute(f"""
|
||||||
|
SELECT {fields_str}, COUNT(*) as cnt
|
||||||
|
FROM {table}
|
||||||
|
GROUP BY {fields_str}
|
||||||
|
HAVING COUNT(*) > 1
|
||||||
|
""")
|
||||||
|
duplicates = cursor.fetchall()
|
||||||
|
|
||||||
|
if duplicates:
|
||||||
|
issues.append(
|
||||||
|
{
|
||||||
|
"type": "DUPLICATE_UNIQUE",
|
||||||
|
"severity": "HIGH",
|
||||||
|
"table": table,
|
||||||
|
"fields": fields,
|
||||||
|
"description": description,
|
||||||
|
"duplicate_count": len(duplicates),
|
||||||
|
"sample_duplicates": [
|
||||||
|
dict(zip(fields + ["count"], row)) for row in duplicates[:5]
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except sqlite3.OperationalError as e:
|
||||||
|
print(f"Warning: Could not check unique constraint on {table}: {e}")
|
||||||
|
|
||||||
|
return issues
|
||||||
|
|
||||||
|
|
||||||
|
def generate_report(counts: dict, issues: list, output_path: str = None) -> str:
|
||||||
|
"""Generate audit report."""
|
||||||
|
report = {
|
||||||
|
"generated_at": datetime.now().isoformat(),
|
||||||
|
"summary": {
|
||||||
|
"total_tables": len(counts),
|
||||||
|
"total_records": sum(counts.values()),
|
||||||
|
"total_issues": len(issues),
|
||||||
|
"critical_issues": len(
|
||||||
|
[i for i in issues if i.get("severity") == "CRITICAL"]
|
||||||
|
),
|
||||||
|
"high_issues": len([i for i in issues if i.get("severity") == "HIGH"]),
|
||||||
|
"medium_issues": len([i for i in issues if i.get("severity") == "MEDIUM"]),
|
||||||
|
},
|
||||||
|
"table_counts": counts,
|
||||||
|
"issues": issues,
|
||||||
|
}
|
||||||
|
|
||||||
|
if output_path:
|
||||||
|
with open(output_path, "w") as f:
|
||||||
|
json.dump(report, f, indent=2)
|
||||||
|
|
||||||
|
return json.dumps(report, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Audit SQLite database before PostgreSQL migration"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--db-path",
|
||||||
|
type=str,
|
||||||
|
default="storage/pd_master.db",
|
||||||
|
help="Path to SQLite database",
|
||||||
|
)
|
||||||
|
parser.add_argument("--output", type=str, help="Output JSON file for report")
|
||||||
|
parser.add_argument(
|
||||||
|
"--fix", action="store_true", help="Apply safe fixes (not implemented)"
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
print("=" * 60)
|
||||||
|
print("Paper Dynasty SQLite Data Integrity Audit")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
if not Path(args.db_path).exists():
|
||||||
|
print(f"ERROR: Database not found: {args.db_path}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
conn = connect_db(args.db_path)
|
||||||
|
|
||||||
|
# Run checks
|
||||||
|
print("\n1. Getting table record counts...")
|
||||||
|
counts = get_table_record_counts(conn)
|
||||||
|
print(f" Found {len(counts)} tables with {sum(counts.values()):,} total records")
|
||||||
|
|
||||||
|
print("\n2. Checking for NULL values...")
|
||||||
|
null_issues = check_null_values(conn)
|
||||||
|
print(f" Found {len(null_issues)} NULL value issues")
|
||||||
|
|
||||||
|
print("\n3. Checking for orphaned foreign keys...")
|
||||||
|
fk_issues = check_orphaned_foreign_keys(conn)
|
||||||
|
print(f" Found {len(fk_issues)} orphaned FK issues")
|
||||||
|
|
||||||
|
print("\n4. Checking VARCHAR lengths...")
|
||||||
|
varchar_issues = check_varchar_lengths(conn)
|
||||||
|
print(f" Found {len(varchar_issues)} VARCHAR length issues")
|
||||||
|
|
||||||
|
print("\n5. Checking for duplicate primary keys...")
|
||||||
|
pk_issues = check_duplicate_primary_keys(conn)
|
||||||
|
print(f" Found {len(pk_issues)} duplicate PK issues")
|
||||||
|
|
||||||
|
print("\n6. Checking unique constraints...")
|
||||||
|
unique_issues = check_unique_constraints(conn)
|
||||||
|
print(f" Found {len(unique_issues)} unique constraint issues")
|
||||||
|
|
||||||
|
# Combine all issues
|
||||||
|
all_issues = null_issues + fk_issues + varchar_issues + pk_issues + unique_issues
|
||||||
|
|
||||||
|
# Generate report
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("AUDIT RESULTS")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
if args.output:
|
||||||
|
report = generate_report(counts, all_issues, args.output)
|
||||||
|
print(f"Full report saved to: {args.output}")
|
||||||
|
else:
|
||||||
|
report = generate_report(counts, all_issues)
|
||||||
|
|
||||||
|
# Print summary
|
||||||
|
print(f"\nTotal Issues: {len(all_issues)}")
|
||||||
|
critical = [i for i in all_issues if i.get("severity") == "CRITICAL"]
|
||||||
|
high = [i for i in all_issues if i.get("severity") == "HIGH"]
|
||||||
|
medium = [i for i in all_issues if i.get("severity") == "MEDIUM"]
|
||||||
|
|
||||||
|
if critical:
|
||||||
|
print(f"\n CRITICAL ({len(critical)}):")
|
||||||
|
for issue in critical:
|
||||||
|
print(
|
||||||
|
f" - {issue['type']}: {issue.get('description', issue.get('table', 'Unknown'))}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if high:
|
||||||
|
print(f"\n HIGH ({len(high)}):")
|
||||||
|
for issue in high:
|
||||||
|
desc = issue.get(
|
||||||
|
"description",
|
||||||
|
f"{issue.get('table', 'Unknown')}.{issue.get('field', 'Unknown')}",
|
||||||
|
)
|
||||||
|
print(f" - {issue['type']}: {desc}")
|
||||||
|
|
||||||
|
if medium:
|
||||||
|
print(f"\n MEDIUM ({len(medium)}):")
|
||||||
|
for issue in medium:
|
||||||
|
desc = issue.get(
|
||||||
|
"description",
|
||||||
|
f"{issue.get('table', 'Unknown')}.{issue.get('field', 'Unknown')}",
|
||||||
|
)
|
||||||
|
print(f" - {issue['type']}: {desc}")
|
||||||
|
|
||||||
|
# Table counts
|
||||||
|
print("\n" + "-" * 60)
|
||||||
|
print("TABLE RECORD COUNTS (for baseline comparison)")
|
||||||
|
print("-" * 60)
|
||||||
|
for table, count in sorted(counts.items()):
|
||||||
|
print(f" {table:30} {count:>10,}")
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
# Exit code based on issues
|
||||||
|
if critical:
|
||||||
|
print("\n CRITICAL ISSUES FOUND - Migration may fail!")
|
||||||
|
sys.exit(2)
|
||||||
|
elif high:
|
||||||
|
print("\n HIGH PRIORITY ISSUES FOUND - Review before migration")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print("\n No critical issues found - Ready for migration")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
510
scripts/migrate_to_postgres.py
Executable file
510
scripts/migrate_to_postgres.py
Executable file
@ -0,0 +1,510 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Paper Dynasty SQLite to PostgreSQL Migration Script
|
||||||
|
|
||||||
|
CRITICAL: This script preserves primary key IDs exactly as they exist in SQLite.
|
||||||
|
Failing to preserve IDs will cause all foreign key references to break.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
# Dry run (validate only, no changes)
|
||||||
|
python scripts/migrate_to_postgres.py --dry-run
|
||||||
|
|
||||||
|
# Full migration
|
||||||
|
python scripts/migrate_to_postgres.py
|
||||||
|
|
||||||
|
# Migrate specific table only
|
||||||
|
python scripts/migrate_to_postgres.py --table player
|
||||||
|
|
||||||
|
Environment Variables Required:
|
||||||
|
POSTGRES_HOST, POSTGRES_DB, POSTGRES_USER, POSTGRES_PASSWORD, POSTGRES_PORT
|
||||||
|
|
||||||
|
Based on lessons learned from Major Domo PostgreSQL migration (August 2025).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sqlite3
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
import psycopg2
|
||||||
|
from psycopg2.extras import execute_values
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format="%(asctime)s - %(levelname)s - %(message)s",
|
||||||
|
handlers=[
|
||||||
|
logging.StreamHandler(sys.stdout),
|
||||||
|
logging.FileHandler(
|
||||||
|
f"logs/migration_{datetime.now().strftime('%Y%m%d_%H%M%S')}.log"
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Migration order - tables with no FK dependencies first, then dependent tables
|
||||||
|
# This ensures parent records exist before children are inserted
|
||||||
|
MIGRATION_ORDER = [
|
||||||
|
# Tier 1: No foreign key dependencies
|
||||||
|
"current",
|
||||||
|
"rarity",
|
||||||
|
"event",
|
||||||
|
"packtype",
|
||||||
|
"notification",
|
||||||
|
# Tier 2: Simple FK dependencies (single level)
|
||||||
|
"cardset", # -> event
|
||||||
|
"mlbplayer", # no FKs
|
||||||
|
"gamerewards", # -> packtype, player (but player not created yet, so nullable)
|
||||||
|
# Tier 3: Core entity tables
|
||||||
|
"team", # -> event
|
||||||
|
"player", # -> cardset, rarity, mlbplayer
|
||||||
|
# Tier 4: Dependent on core entities
|
||||||
|
"pack", # -> team, packtype, cardset
|
||||||
|
"card", # -> player, team, pack
|
||||||
|
"roster", # -> team, card (x26)
|
||||||
|
"result", # -> team (x2)
|
||||||
|
"stratgame", # -> team (x2)
|
||||||
|
# Tier 5: Statistics and game data
|
||||||
|
"battingstat", # -> card, team, result
|
||||||
|
"pitchingstat", # -> card, team, result
|
||||||
|
"stratplay", # -> stratgame, player (many), team (many)
|
||||||
|
"decision", # -> stratgame, player, team
|
||||||
|
# Tier 6: Card detail tables
|
||||||
|
"battingcard", # -> player
|
||||||
|
"battingcardratings", # -> battingcard
|
||||||
|
"pitchingcard", # -> player
|
||||||
|
"pitchingcardratings", # -> pitchingcard
|
||||||
|
"cardposition", # -> player
|
||||||
|
# Tier 7: Other dependent tables
|
||||||
|
"award", # -> card, team
|
||||||
|
"paperdex", # -> team, player
|
||||||
|
"reward", # -> team
|
||||||
|
"gauntletreward", # -> event, gamerewards
|
||||||
|
"gauntletrun", # -> team, event
|
||||||
|
]
|
||||||
|
|
||||||
|
# Tables with explicit primary keys (not auto-increment)
|
||||||
|
EXPLICIT_PK_TABLES = {
|
||||||
|
"player": "player_id", # Uses player_id as explicit PK
|
||||||
|
}
|
||||||
|
|
||||||
|
# All other tables use 'id' as auto-increment PK
|
||||||
|
|
||||||
|
|
||||||
|
def get_sqlite_connection(db_path: str) -> sqlite3.Connection:
|
||||||
|
"""Connect to SQLite database."""
|
||||||
|
if not os.path.exists(db_path):
|
||||||
|
raise FileNotFoundError(f"SQLite database not found: {db_path}")
|
||||||
|
|
||||||
|
conn = sqlite3.connect(db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
return conn
|
||||||
|
|
||||||
|
|
||||||
|
def get_postgres_connection() -> psycopg2.extensions.connection:
|
||||||
|
"""Connect to PostgreSQL database using environment variables."""
|
||||||
|
required_vars = [
|
||||||
|
"POSTGRES_HOST",
|
||||||
|
"POSTGRES_DB",
|
||||||
|
"POSTGRES_USER",
|
||||||
|
"POSTGRES_PASSWORD",
|
||||||
|
]
|
||||||
|
missing = [v for v in required_vars if not os.environ.get(v)]
|
||||||
|
if missing:
|
||||||
|
raise EnvironmentError(f"Missing required environment variables: {missing}")
|
||||||
|
|
||||||
|
return psycopg2.connect(
|
||||||
|
host=os.environ["POSTGRES_HOST"],
|
||||||
|
database=os.environ["POSTGRES_DB"],
|
||||||
|
user=os.environ["POSTGRES_USER"],
|
||||||
|
password=os.environ["POSTGRES_PASSWORD"],
|
||||||
|
port=int(os.environ.get("POSTGRES_PORT", "5432")),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_table_columns(sqlite_conn: sqlite3.Connection, table_name: str) -> List[str]:
|
||||||
|
"""Get column names for a table from SQLite."""
|
||||||
|
cursor = sqlite_conn.execute(f"PRAGMA table_info({table_name})")
|
||||||
|
return [row["name"] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
|
||||||
|
def get_primary_key_column(table_name: str) -> str:
|
||||||
|
"""Get the primary key column name for a table."""
|
||||||
|
return EXPLICIT_PK_TABLES.get(table_name, "id")
|
||||||
|
|
||||||
|
|
||||||
|
def get_sequence_name(table_name: str, pk_column: str) -> str:
|
||||||
|
"""Get the PostgreSQL sequence name for a table's primary key."""
|
||||||
|
return f"{table_name}_{pk_column}_seq"
|
||||||
|
|
||||||
|
|
||||||
|
def get_record_count(conn, table_name: str, is_sqlite: bool = True) -> int:
|
||||||
|
"""Get record count for a table."""
|
||||||
|
if is_sqlite:
|
||||||
|
cursor = conn.execute(f"SELECT COUNT(*) FROM {table_name}")
|
||||||
|
return cursor.fetchone()[0]
|
||||||
|
else:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(f"SELECT COUNT(*) FROM {table_name}")
|
||||||
|
return cursor.fetchone()[0]
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_table(
|
||||||
|
sqlite_conn: sqlite3.Connection,
|
||||||
|
pg_conn: psycopg2.extensions.connection,
|
||||||
|
table_name: str,
|
||||||
|
batch_size: int = 500,
|
||||||
|
dry_run: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Migrate a single table from SQLite to PostgreSQL.
|
||||||
|
|
||||||
|
CRITICAL: Preserves primary key IDs exactly.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with migration statistics
|
||||||
|
"""
|
||||||
|
stats = {
|
||||||
|
"table": table_name,
|
||||||
|
"sqlite_count": 0,
|
||||||
|
"postgres_count": 0,
|
||||||
|
"inserted": 0,
|
||||||
|
"skipped": 0,
|
||||||
|
"errors": [],
|
||||||
|
"success": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get column info
|
||||||
|
columns = get_table_columns(sqlite_conn, table_name)
|
||||||
|
pk_column = get_primary_key_column(table_name)
|
||||||
|
|
||||||
|
# Count source records
|
||||||
|
stats["sqlite_count"] = get_record_count(
|
||||||
|
sqlite_conn, table_name, is_sqlite=True
|
||||||
|
)
|
||||||
|
logger.info(f"Table {table_name}: {stats['sqlite_count']} records to migrate")
|
||||||
|
|
||||||
|
if stats["sqlite_count"] == 0:
|
||||||
|
logger.info(f"Table {table_name}: No records to migrate")
|
||||||
|
stats["success"] = True
|
||||||
|
return stats
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
logger.info(
|
||||||
|
f"[DRY RUN] Would migrate {stats['sqlite_count']} records from {table_name}"
|
||||||
|
)
|
||||||
|
stats["success"] = True
|
||||||
|
return stats
|
||||||
|
|
||||||
|
# Read all records from SQLite
|
||||||
|
cursor = sqlite_conn.execute(f"SELECT * FROM {table_name}")
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
|
||||||
|
# Prepare PostgreSQL insert
|
||||||
|
pg_cursor = pg_conn.cursor()
|
||||||
|
|
||||||
|
# Build column list string
|
||||||
|
columns_str = ", ".join(columns)
|
||||||
|
placeholders = ", ".join(["%s"] * len(columns))
|
||||||
|
|
||||||
|
# Process in batches
|
||||||
|
for i in range(0, len(rows), batch_size):
|
||||||
|
batch = rows[i : i + batch_size]
|
||||||
|
batch_values = []
|
||||||
|
|
||||||
|
for row in batch:
|
||||||
|
# Convert sqlite3.Row to tuple, preserving all values including ID
|
||||||
|
values = tuple(row[col] for col in columns)
|
||||||
|
batch_values.append(values)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Use execute_values for efficient batch insert
|
||||||
|
insert_sql = f"INSERT INTO {table_name} ({columns_str}) VALUES %s"
|
||||||
|
execute_values(pg_cursor, insert_sql, batch_values)
|
||||||
|
stats["inserted"] += len(batch)
|
||||||
|
|
||||||
|
except psycopg2.errors.ForeignKeyViolation as e:
|
||||||
|
# Foreign key error - fall back to individual inserts
|
||||||
|
logger.warning(
|
||||||
|
f"FK violation in batch, falling back to individual inserts: {e}"
|
||||||
|
)
|
||||||
|
pg_conn.rollback()
|
||||||
|
|
||||||
|
for values in batch_values:
|
||||||
|
try:
|
||||||
|
pg_cursor.execute(
|
||||||
|
f"INSERT INTO {table_name} ({columns_str}) VALUES ({placeholders})",
|
||||||
|
values,
|
||||||
|
)
|
||||||
|
stats["inserted"] += 1
|
||||||
|
except psycopg2.errors.ForeignKeyViolation as e:
|
||||||
|
stats["skipped"] += 1
|
||||||
|
# Extract ID for logging
|
||||||
|
pk_idx = columns.index(pk_column) if pk_column in columns else 0
|
||||||
|
record_id = values[pk_idx]
|
||||||
|
stats["errors"].append(
|
||||||
|
{
|
||||||
|
"id": record_id,
|
||||||
|
"error": "ForeignKeyViolation",
|
||||||
|
"message": str(e),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
f"Skipped orphaned record {table_name}.{pk_column}={record_id}"
|
||||||
|
)
|
||||||
|
pg_conn.rollback()
|
||||||
|
except Exception as e:
|
||||||
|
stats["skipped"] += 1
|
||||||
|
pk_idx = columns.index(pk_column) if pk_column in columns else 0
|
||||||
|
record_id = values[pk_idx]
|
||||||
|
stats["errors"].append(
|
||||||
|
{
|
||||||
|
"id": record_id,
|
||||||
|
"error": type(e).__name__,
|
||||||
|
"message": str(e),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
logger.error(
|
||||||
|
f"Error inserting {table_name}.{pk_column}={record_id}: {e}"
|
||||||
|
)
|
||||||
|
pg_conn.rollback()
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Table {table_name}: Processed {min(i + batch_size, len(rows))}/{len(rows)} records"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Commit the transaction
|
||||||
|
pg_conn.commit()
|
||||||
|
|
||||||
|
# CRITICAL: Reset the PostgreSQL sequence to MAX(id) + 1
|
||||||
|
# Without this, new inserts will fail with duplicate key errors
|
||||||
|
sequence_name = get_sequence_name(table_name, pk_column)
|
||||||
|
try:
|
||||||
|
pg_cursor.execute(f"""
|
||||||
|
SELECT setval('{sequence_name}', COALESCE((SELECT MAX({pk_column}) FROM {table_name}), 1), true)
|
||||||
|
""")
|
||||||
|
pg_conn.commit()
|
||||||
|
logger.info(f"Table {table_name}: Reset sequence {sequence_name}")
|
||||||
|
except psycopg2.errors.UndefinedTable as e:
|
||||||
|
# Sequence might not exist for explicit PK tables
|
||||||
|
logger.warning(f"Could not reset sequence {sequence_name}: {e}")
|
||||||
|
pg_conn.rollback()
|
||||||
|
|
||||||
|
# Verify counts
|
||||||
|
stats["postgres_count"] = get_record_count(pg_conn, table_name, is_sqlite=False)
|
||||||
|
|
||||||
|
if stats["postgres_count"] == stats["sqlite_count"]:
|
||||||
|
logger.info(
|
||||||
|
f"Table {table_name}: SUCCESS - {stats['postgres_count']} records migrated"
|
||||||
|
)
|
||||||
|
stats["success"] = True
|
||||||
|
elif stats["postgres_count"] == stats["inserted"]:
|
||||||
|
logger.warning(
|
||||||
|
f"Table {table_name}: PARTIAL - {stats['inserted']} inserted, "
|
||||||
|
f"{stats['skipped']} skipped (orphaned FK records)"
|
||||||
|
)
|
||||||
|
stats["success"] = (
|
||||||
|
True # Partial success is acceptable for orphaned records
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Table {table_name}: MISMATCH - SQLite: {stats['sqlite_count']}, "
|
||||||
|
f"PostgreSQL: {stats['postgres_count']}"
|
||||||
|
)
|
||||||
|
stats["success"] = False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Table {table_name}: FAILED - {e}")
|
||||||
|
stats["errors"].append({"error": type(e).__name__, "message": str(e)})
|
||||||
|
stats["success"] = False
|
||||||
|
pg_conn.rollback()
|
||||||
|
|
||||||
|
return stats
|
||||||
|
|
||||||
|
|
||||||
|
def verify_id_preservation(
|
||||||
|
sqlite_conn: sqlite3.Connection,
|
||||||
|
pg_conn: psycopg2.extensions.connection,
|
||||||
|
sample_tables: List[str] = None,
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Verify that primary key IDs were preserved correctly.
|
||||||
|
|
||||||
|
This is a CRITICAL check - if IDs don't match, the migration has failed.
|
||||||
|
"""
|
||||||
|
if sample_tables is None:
|
||||||
|
sample_tables = ["player", "team", "card", "stratgame"]
|
||||||
|
|
||||||
|
all_match = True
|
||||||
|
|
||||||
|
for table_name in sample_tables:
|
||||||
|
pk_column = get_primary_key_column(table_name)
|
||||||
|
|
||||||
|
# Get first and last 5 IDs from SQLite
|
||||||
|
sqlite_cursor = sqlite_conn.execute(
|
||||||
|
f"SELECT {pk_column} FROM {table_name} ORDER BY {pk_column} LIMIT 5"
|
||||||
|
)
|
||||||
|
sqlite_first = [row[0] for row in sqlite_cursor.fetchall()]
|
||||||
|
|
||||||
|
sqlite_cursor = sqlite_conn.execute(
|
||||||
|
f"SELECT {pk_column} FROM {table_name} ORDER BY {pk_column} DESC LIMIT 5"
|
||||||
|
)
|
||||||
|
sqlite_last = [row[0] for row in sqlite_cursor.fetchall()]
|
||||||
|
|
||||||
|
# Get same IDs from PostgreSQL
|
||||||
|
pg_cursor = pg_conn.cursor()
|
||||||
|
pg_cursor.execute(
|
||||||
|
f"SELECT {pk_column} FROM {table_name} ORDER BY {pk_column} LIMIT 5"
|
||||||
|
)
|
||||||
|
pg_first = [row[0] for row in pg_cursor.fetchall()]
|
||||||
|
|
||||||
|
pg_cursor.execute(
|
||||||
|
f"SELECT {pk_column} FROM {table_name} ORDER BY {pk_column} DESC LIMIT 5"
|
||||||
|
)
|
||||||
|
pg_last = [row[0] for row in pg_cursor.fetchall()]
|
||||||
|
|
||||||
|
if sqlite_first == pg_first and sqlite_last == pg_last:
|
||||||
|
logger.info(f"ID Verification {table_name}: PASS - IDs match")
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"ID Verification {table_name}: FAIL - "
|
||||||
|
f"SQLite first: {sqlite_first}, PG first: {pg_first}, "
|
||||||
|
f"SQLite last: {sqlite_last}, PG last: {pg_last}"
|
||||||
|
)
|
||||||
|
all_match = False
|
||||||
|
|
||||||
|
return all_match
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Migrate Paper Dynasty from SQLite to PostgreSQL"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry-run", action="store_true", help="Validate without making changes"
|
||||||
|
)
|
||||||
|
parser.add_argument("--table", type=str, help="Migrate only this table")
|
||||||
|
parser.add_argument(
|
||||||
|
"--sqlite-path",
|
||||||
|
type=str,
|
||||||
|
default="storage/pd_master.db",
|
||||||
|
help="Path to SQLite database",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--batch-size", type=int, default=500, help="Batch size for inserts"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--skip-verification", action="store_true", help="Skip ID verification"
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info("Paper Dynasty SQLite to PostgreSQL Migration")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
|
||||||
|
if args.dry_run:
|
||||||
|
logger.info("DRY RUN MODE - No changes will be made")
|
||||||
|
|
||||||
|
# Connect to databases
|
||||||
|
try:
|
||||||
|
sqlite_conn = get_sqlite_connection(args.sqlite_path)
|
||||||
|
logger.info(f"Connected to SQLite: {args.sqlite_path}")
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
logger.error(str(e))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pg_conn = get_postgres_connection()
|
||||||
|
logger.info(
|
||||||
|
f"Connected to PostgreSQL: {os.environ['POSTGRES_HOST']}/{os.environ['POSTGRES_DB']}"
|
||||||
|
)
|
||||||
|
except EnvironmentError as e:
|
||||||
|
logger.error(str(e))
|
||||||
|
sys.exit(1)
|
||||||
|
except psycopg2.Error as e:
|
||||||
|
logger.error(f"PostgreSQL connection failed: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Determine tables to migrate
|
||||||
|
tables_to_migrate = [args.table] if args.table else MIGRATION_ORDER
|
||||||
|
|
||||||
|
# Validate tables exist
|
||||||
|
available_tables = set()
|
||||||
|
cursor = sqlite_conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||||
|
for row in cursor:
|
||||||
|
available_tables.add(row[0])
|
||||||
|
|
||||||
|
for table in tables_to_migrate:
|
||||||
|
if table not in available_tables:
|
||||||
|
logger.warning(f"Table {table} not found in SQLite, skipping")
|
||||||
|
tables_to_migrate.remove(table)
|
||||||
|
|
||||||
|
# Migration summary
|
||||||
|
results = []
|
||||||
|
start_time = datetime.now()
|
||||||
|
|
||||||
|
logger.info(f"Migrating {len(tables_to_migrate)} tables...")
|
||||||
|
logger.info("-" * 60)
|
||||||
|
|
||||||
|
for table_name in tables_to_migrate:
|
||||||
|
stats = migrate_table(
|
||||||
|
sqlite_conn,
|
||||||
|
pg_conn,
|
||||||
|
table_name,
|
||||||
|
batch_size=args.batch_size,
|
||||||
|
dry_run=args.dry_run,
|
||||||
|
)
|
||||||
|
results.append(stats)
|
||||||
|
logger.info("-" * 60)
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
elapsed = datetime.now() - start_time
|
||||||
|
successful = sum(1 for r in results if r["success"])
|
||||||
|
total_records = sum(r["inserted"] for r in results)
|
||||||
|
total_skipped = sum(r["skipped"] for r in results)
|
||||||
|
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info("MIGRATION SUMMARY")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info(f"Tables: {successful}/{len(results)} successful")
|
||||||
|
logger.info(f"Records: {total_records} inserted, {total_skipped} skipped")
|
||||||
|
logger.info(f"Duration: {elapsed}")
|
||||||
|
|
||||||
|
# Failed tables
|
||||||
|
failed = [r for r in results if not r["success"]]
|
||||||
|
if failed:
|
||||||
|
logger.error("FAILED TABLES:")
|
||||||
|
for r in failed:
|
||||||
|
logger.error(f" - {r['table']}: {r['errors']}")
|
||||||
|
|
||||||
|
# ID Verification (CRITICAL)
|
||||||
|
if not args.dry_run and not args.skip_verification:
|
||||||
|
logger.info("-" * 60)
|
||||||
|
logger.info("VERIFYING ID PRESERVATION...")
|
||||||
|
if verify_id_preservation(sqlite_conn, pg_conn):
|
||||||
|
logger.info("ID VERIFICATION: PASS - All IDs preserved correctly")
|
||||||
|
else:
|
||||||
|
logger.error("ID VERIFICATION: FAIL - IDs do not match!")
|
||||||
|
logger.error(
|
||||||
|
"THIS IS A CRITICAL FAILURE - Foreign key references may be broken"
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Close connections
|
||||||
|
sqlite_conn.close()
|
||||||
|
pg_conn.close()
|
||||||
|
|
||||||
|
if all(r["success"] for r in results):
|
||||||
|
logger.info("MIGRATION COMPLETE - SUCCESS")
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
logger.warning("MIGRATION COMPLETE - PARTIAL SUCCESS (some tables failed)")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Loading…
Reference in New Issue
Block a user