paper-dynasty-card-creation/upload_lefty_cards_to_s3.py
Cal Corum 0a17745389 Run black and ruff across entire codebase
Standardize formatting with black and apply ruff auto-fixes.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-08 14:24:33 -05:00

166 lines
4.5 KiB
Python

"""
Fetch updated card images for the 20 fixed left-handed players,
upload to AWS S3, and update player image URLs
"""
import asyncio
import boto3
import aiohttp
from db_calls import db_get, db_patch, DB_URL
# AWS Configuration
AWS_BUCKET_NAME = "paper-dynasty"
AWS_REGION = "us-east-1"
S3_BASE_URL = f"https://{AWS_BUCKET_NAME}.s3.{AWS_REGION}.amazonaws.com"
CARDSET_ID = 27
# Initialize S3 client
s3_client = boto3.client("s3", region_name=AWS_REGION)
# List of player IDs that were fixed
FIXED_PLAYER_IDS = [
13015,
13017,
13020,
13030,
13032,
13034,
13037,
13045,
13047,
13048,
13053,
13058,
13062,
13068,
13070,
13071,
13077,
13082,
13084,
13090,
]
async def fetch_card_image(session, card_url: str, timeout: int = 6) -> bytes:
"""Fetch card image from URL and return raw bytes."""
try:
async with session.get(card_url, timeout=timeout) as r:
if r.status == 200:
return await r.read()
else:
error_text = await r.text()
raise ValueError(f"Status {r.status}: {error_text}")
except Exception as e:
raise ValueError(f"Failed to fetch card: {str(e)}")
def upload_to_s3(
image_bytes: bytes, s3_key: str, content_type: str = "image/png"
) -> str:
"""Upload image bytes to S3 and return the URL."""
try:
s3_client.put_object(
Bucket=AWS_BUCKET_NAME,
Key=s3_key,
Body=image_bytes,
ContentType=content_type,
CacheControl="public, max-age=31536000", # 1 year cache
)
s3_url = f"{S3_BASE_URL}/{s3_key}"
return s3_url
except Exception as e:
raise ValueError(f"Failed to upload to S3: {str(e)}")
async def process_player(session, player_id: int, release_date: str):
"""Fetch card, upload to S3, and update player URL."""
try:
print(f"\nProcessing player {player_id}...")
# Fetch current player data
player_data = await db_get("players", object_id=player_id)
player_name = player_data.get("p_name", "Unknown")
print(f" Name: {player_name}")
# Build card URL (API endpoint)
card_api_url = f"{DB_URL}/v2/players/{player_id}/battingcard?d={release_date}"
# Fetch the card image
print(" Fetching card image...")
image_bytes = await fetch_card_image(session, card_api_url)
print(f" ✅ Fetched {len(image_bytes)} bytes")
# Build S3 key (without query parameters!)
s3_key = f"cards/cardset-{CARDSET_ID:03d}/player-{player_id}/battingcard.png"
# Upload to S3
print(" Uploading to S3...")
s3_url_base = upload_to_s3(image_bytes, s3_key)
# Add cache-busting query parameter to the URL
s3_url = f"{s3_url_base}?d={release_date}"
print(" ✅ Uploaded to S3")
# Update player record with S3 URL
print(" Updating player image URL...")
await db_patch("players", object_id=player_id, params=[("image", s3_url)])
print(" ✅ Updated player record")
return {
"success": True,
"player_id": player_id,
"name": player_name,
"s3_url": s3_url,
}
except Exception as e:
print(f" ❌ Error: {str(e)}")
return {"success": False, "player_id": player_id, "error": str(e)}
async def main():
# Use timestamp to bust cache completely
import time
timestamp = int(time.time())
release_date = f"2025-11-25-{timestamp}"
print(f"{'='*60}")
print("Uploading cards to S3 for 20 left-handed players")
print(f"Release date: {release_date}")
print(f"{'='*60}")
successes = []
errors = []
async with aiohttp.ClientSession() as session:
for player_id in FIXED_PLAYER_IDS:
result = await process_player(session, player_id, release_date)
if result["success"]:
successes.append(result)
else:
errors.append(result)
print(f"\n{'='*60}")
print("SUMMARY")
print(f"{'='*60}")
print(f"Successes: {len(successes)}")
print(f"Errors: {len(errors)}")
print(f"Total: {len(FIXED_PLAYER_IDS)}")
if errors:
print("\nErrors:")
for err in errors:
print(f" Player {err['player_id']}: {err.get('error', 'Unknown error')}")
if successes:
print(f"\nFirst S3 URL: {successes[0]['s3_url']}")
print(f"{'='*60}")
if __name__ == "__main__":
asyncio.run(main())