Compare commits

..

No commits in common. "main" and "dev" have entirely different histories.
main ... dev

12 changed files with 110 additions and 564 deletions

View File

@ -44,10 +44,6 @@ else:
pragmas={"journal_mode": "wal", "cache_size": -1 * 64000, "synchronous": 0},
)
# Refractor stat accumulation starts at this season — stats from earlier seasons
# are excluded from evaluation queries. Override via REFRACTOR_START_SEASON env var.
REFRACTOR_START_SEASON = int(os.environ.get("REFRACTOR_START_SEASON", "11"))
# 2025, 2005
ranked_cardsets = [24, 25, 26, 27, 28, 29]
LIVE_CARDSET_ID = 27

View File

@ -79,8 +79,8 @@ async def get_cards(
all_cards = all_cards.where(Card.pack == this_pack)
if value is not None:
all_cards = all_cards.where(Card.value == value)
if variant is not None:
all_cards = all_cards.where(Card.variant == variant)
# if variant is not None:
# all_cards = all_cards.where(Card.variant == variant)
if min_value is not None:
all_cards = all_cards.where(Card.value >= min_value)
if max_value is not None:
@ -114,8 +114,8 @@ async def get_cards(
if csv:
data_list = [
["id", "player", "cardset", "rarity", "team", "pack", "value", "variant"]
]
["id", "player", "cardset", "rarity", "team", "pack", "value"]
] # , 'variant']]
for line in all_cards:
data_list.append(
[
@ -125,8 +125,7 @@ async def get_cards(
line.player.rarity,
line.team.abbrev,
line.pack,
line.value,
line.variant,
line.value, # line.variant
]
)
return_val = DataFrame(data_list).to_csv(header=False, index=False)

View File

@ -10,7 +10,10 @@ from ..db_engine import db, Cardset, model_to_dict, Pack, Team, PackType, DoesNo
from ..dependencies import oauth2_scheme, valid_token
router = APIRouter(prefix="/api/v2/packs", tags=["packs"])
router = APIRouter(
prefix='/api/v2/packs',
tags=['packs']
)
class PackPydantic(pydantic.BaseModel):
@ -25,58 +28,46 @@ class PackModel(pydantic.BaseModel):
packs: List[PackPydantic]
@router.get("")
@router.get('')
async def get_packs(
team_id: Optional[int] = None,
pack_type_id: Optional[int] = None,
opened: Optional[bool] = None,
limit: Optional[int] = None,
new_to_old: Optional[bool] = None,
pack_team_id: Optional[int] = None,
pack_cardset_id: Optional[int] = None,
exact_match: Optional[bool] = False,
csv: Optional[bool] = None,
):
team_id: Optional[int] = None, pack_type_id: Optional[int] = None, opened: Optional[bool] = None,
limit: Optional[int] = None, new_to_old: Optional[bool] = None, pack_team_id: Optional[int] = None,
pack_cardset_id: Optional[int] = None, exact_match: Optional[bool] = False, csv: Optional[bool] = None):
all_packs = Pack.select()
if all_packs.count() == 0:
raise HTTPException(status_code=404, detail=f'There are no packs to filter')
if team_id is not None:
try:
this_team = Team.get_by_id(team_id)
except DoesNotExist:
raise HTTPException(
status_code=404, detail=f"No team found with id {team_id}"
)
raise HTTPException(status_code=404, detail=f'No team found with id {team_id}')
all_packs = all_packs.where(Pack.team == this_team)
if pack_type_id is not None:
try:
this_pack_type = PackType.get_by_id(pack_type_id)
except DoesNotExist:
raise HTTPException(
status_code=404, detail=f"No pack type found with id {pack_type_id}"
)
raise HTTPException(status_code=404, detail=f'No pack type found with id {pack_type_id}')
all_packs = all_packs.where(Pack.pack_type == this_pack_type)
if pack_team_id is not None:
try:
this_pack_team = Team.get_by_id(pack_team_id)
except DoesNotExist:
raise HTTPException(
status_code=404, detail=f"No team found with id {pack_team_id}"
)
raise HTTPException(status_code=404, detail=f'No team found with id {pack_team_id}')
all_packs = all_packs.where(Pack.pack_team == this_pack_team)
elif exact_match:
all_packs = all_packs.where(Pack.pack_team == None) # noqa: E711
all_packs = all_packs.where(Pack.pack_team == None)
if pack_cardset_id is not None:
try:
this_pack_cardset = Cardset.get_by_id(pack_cardset_id)
except DoesNotExist:
raise HTTPException(
status_code=404, detail=f"No cardset found with id {pack_cardset_id}"
)
raise HTTPException(status_code=404, detail=f'No cardset found with id {pack_cardset_id}')
all_packs = all_packs.where(Pack.pack_cardset == this_pack_cardset)
elif exact_match:
all_packs = all_packs.where(Pack.pack_cardset == None) # noqa: E711
all_packs = all_packs.where(Pack.pack_cardset == None)
if opened is not None:
all_packs = all_packs.where(Pack.open_time.is_null(not opened))
@ -87,62 +78,60 @@ async def get_packs(
else:
all_packs = all_packs.order_by(Pack.id)
# if all_packs.count() == 0:
# db.close()
# raise HTTPException(status_code=404, detail=f'No packs found')
if csv:
data_list = [["id", "team", "pack_type", "open_time"]]
data_list = [['id', 'team', 'pack_type', 'open_time']]
for line in all_packs:
data_list.append(
[
line.id,
line.team.abbrev,
line.pack_type.name,
line.open_time, # Already datetime in PostgreSQL
line.id, line.team.abbrev, line.pack_type.name,
line.open_time # Already datetime in PostgreSQL
]
)
return_val = DataFrame(data_list).to_csv(header=False, index=False)
return Response(content=return_val, media_type="text/csv")
return Response(content=return_val, media_type='text/csv')
else:
return_val = {"count": all_packs.count(), "packs": []}
return_val = {'count': all_packs.count(), 'packs': []}
for x in all_packs:
return_val["packs"].append(model_to_dict(x))
return_val['packs'].append(model_to_dict(x))
return return_val
@router.get("/{pack_id}")
@router.get('/{pack_id}')
async def get_one_pack(pack_id: int, csv: Optional[bool] = False):
try:
this_pack = Pack.get_by_id(pack_id)
except DoesNotExist:
raise HTTPException(status_code=404, detail=f"No pack found with id {pack_id}")
raise HTTPException(status_code=404, detail=f'No pack found with id {pack_id}')
if csv:
data_list = [
["id", "team", "pack_type", "open_time"],
[
this_pack.id,
this_pack.team.abbrev,
this_pack.pack_type.name,
this_pack.open_time,
], # Already datetime in PostgreSQL
['id', 'team', 'pack_type', 'open_time'],
[this_pack.id, this_pack.team.abbrev, this_pack.pack_type.name,
this_pack.open_time] # Already datetime in PostgreSQL
]
return_val = DataFrame(data_list).to_csv(header=False, index=False)
return Response(content=return_val, media_type="text/csv")
return Response(content=return_val, media_type='text/csv')
else:
return_val = model_to_dict(this_pack)
return return_val
@router.post("")
@router.post('')
async def post_pack(packs: PackModel, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning("Bad Token: [REDACTED]")
logging.warning('Bad Token: [REDACTED]')
raise HTTPException(
status_code=401,
detail="You are not authorized to post packs. This event has been logged.",
detail='You are not authorized to post packs. This event has been logged.'
)
new_packs = []
@ -152,27 +141,23 @@ async def post_pack(packs: PackModel, token: str = Depends(oauth2_scheme)):
pack_type_id=x.pack_type_id,
pack_team_id=x.pack_team_id,
pack_cardset_id=x.pack_cardset_id,
open_time=datetime.fromtimestamp(x.open_time / 1000)
if x.open_time
else None,
open_time=datetime.fromtimestamp(x.open_time / 1000) if x.open_time else None
)
new_packs.append(this_player)
with db.atomic():
Pack.bulk_create(new_packs, batch_size=15)
raise HTTPException(
status_code=200, detail=f"{len(new_packs)} packs have been added"
)
raise HTTPException(status_code=200, detail=f'{len(new_packs)} packs have been added')
@router.post("/one")
@router.post('/one')
async def post_one_pack(pack: PackPydantic, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning("Bad Token: [REDACTED]")
logging.warning('Bad Token: [REDACTED]')
raise HTTPException(
status_code=401,
detail="You are not authorized to post packs. This event has been logged.",
detail='You are not authorized to post packs. This event has been logged.'
)
this_pack = Pack(
@ -180,9 +165,7 @@ async def post_one_pack(pack: PackPydantic, token: str = Depends(oauth2_scheme))
pack_type_id=pack.pack_type_id,
pack_team_id=pack.pack_team_id,
pack_cardset_id=pack.pack_cardset_id,
open_time=datetime.fromtimestamp(pack.open_time / 1000)
if pack.open_time
else None,
open_time=datetime.fromtimestamp(pack.open_time / 1000) if pack.open_time else None
)
saved = this_pack.save()
@ -192,30 +175,24 @@ async def post_one_pack(pack: PackPydantic, token: str = Depends(oauth2_scheme))
else:
raise HTTPException(
status_code=418,
detail="Well slap my ass and call me a teapot; I could not save that cardset",
detail='Well slap my ass and call me a teapot; I could not save that cardset'
)
@router.patch("/{pack_id}")
@router.patch('/{pack_id}')
async def patch_pack(
pack_id,
team_id: Optional[int] = None,
pack_type_id: Optional[int] = None,
open_time: Optional[int] = None,
pack_team_id: Optional[int] = None,
pack_cardset_id: Optional[int] = None,
token: str = Depends(oauth2_scheme),
):
pack_id, team_id: Optional[int] = None, pack_type_id: Optional[int] = None, open_time: Optional[int] = None,
pack_team_id: Optional[int] = None, pack_cardset_id: Optional[int] = None, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning("Bad Token: [REDACTED]")
logging.warning('Bad Token: [REDACTED]')
raise HTTPException(
status_code=401,
detail="You are not authorized to patch packs. This event has been logged.",
detail='You are not authorized to patch packs. This event has been logged.'
)
try:
this_pack = Pack.get_by_id(pack_id)
except DoesNotExist:
raise HTTPException(status_code=404, detail=f"No pack found with id {pack_id}")
raise HTTPException(status_code=404, detail=f'No pack found with id {pack_id}')
if team_id is not None:
this_pack.team_id = team_id
@ -243,26 +220,26 @@ async def patch_pack(
else:
raise HTTPException(
status_code=418,
detail="Well slap my ass and call me a teapot; I could not save that rarity",
detail='Well slap my ass and call me a teapot; I could not save that rarity'
)
@router.delete("/{pack_id}")
@router.delete('/{pack_id}')
async def delete_pack(pack_id, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning("Bad Token: [REDACTED]")
logging.warning('Bad Token: [REDACTED]')
raise HTTPException(
status_code=401,
detail="You are not authorized to delete packs. This event has been logged.",
detail='You are not authorized to delete packs. This event has been logged.'
)
try:
this_pack = Pack.get_by_id(pack_id)
except DoesNotExist:
raise HTTPException(status_code=404, detail=f"No packs found with id {pack_id}")
raise HTTPException(status_code=404, detail=f'No packs found with id {pack_id}')
count = this_pack.delete_instance()
if count == 1:
raise HTTPException(status_code=200, detail=f"Pack {pack_id} has been deleted")
raise HTTPException(status_code=200, detail=f'Pack {pack_id} has been deleted')
else:
raise HTTPException(status_code=500, detail=f"Pack {pack_id} was not deleted")
raise HTTPException(status_code=500, detail=f'Pack {pack_id} was not deleted')

View File

@ -40,7 +40,7 @@ from ..db_engine import (
)
from ..db_helpers import upsert_players
from ..dependencies import oauth2_scheme, valid_token
from ..services.card_storage import backfill_variant_image_url, upload_variant_apng
from ..services.card_storage import backfill_variant_image_url
from ..services.refractor_boost import compute_variant_hash
from ..services.apng_generator import apng_cache_path, generate_animated_card
@ -143,6 +143,14 @@ def normalize_franchise(franchise: str) -> str:
return FRANCHISE_NORMALIZE.get(titled, titled)
TIER_DIAMOND_COLORS = {
1: "linear-gradient(135deg, #40b040 0%, #1a6b1a 50%, #145214 100%)",
2: "linear-gradient(135deg, #50a0e8 0%, #2070b0 50%, #185488 100%)",
3: "linear-gradient(135deg, #e85050 0%, #a82020 50%, #7e1818 100%)",
4: "linear-gradient(135deg, #a060d0 0%, #6b2d8e 50%, #50226a 100%)",
}
def resolve_refractor_tier(player_id: int, variant: int) -> int:
"""Determine the refractor tier (0-4) from a player's variant hash.
@ -740,7 +748,6 @@ async def get_one_player(player_id: int, csv: Optional[bool] = False):
@router.get("/{player_id}/{card_type}card/{d}/{variant}/animated")
async def get_animated_card(
request: Request,
background_tasks: BackgroundTasks,
player_id: int,
card_type: Literal["batting", "pitching"],
variant: int,
@ -771,7 +778,7 @@ async def get_animated_card(
headers = {"Cache-Control": "public, max-age=86400"}
if os.path.isfile(cache_path) and tier is None:
return FileResponse(path=cache_path, media_type="image/apng", headers=headers)
return FileResponse(path=cache_path, media_type="image/png", headers=headers)
all_pos = (
CardPosition.select()
@ -810,6 +817,7 @@ async def get_animated_card(
else:
card_data["cardset_name"] = this_player.description
card_data["refractor_tier"] = refractor_tier
card_data["filled_bg"] = TIER_DIAMOND_COLORS.get(refractor_tier, "")
card_data["request"] = request
html_response = templates.TemplateResponse("player_card.html", card_data)
@ -846,6 +854,7 @@ async def get_animated_card(
else:
card_data["cardset_name"] = this_player.description
card_data["refractor_tier"] = refractor_tier
card_data["filled_bg"] = TIER_DIAMOND_COLORS.get(refractor_tier, "")
card_data["request"] = request
html_response = templates.TemplateResponse("player_card.html", card_data)
@ -861,17 +870,7 @@ async def get_animated_card(
finally:
await page.close()
if tier is None:
background_tasks.add_task(
upload_variant_apng,
player_id=player_id,
variant=variant,
card_type=card_type,
cardset_id=this_player.cardset.id,
apng_path=cache_path,
)
return FileResponse(path=cache_path, media_type="image/apng", headers=headers)
return FileResponse(path=cache_path, media_type="image/png", headers=headers)
@router.get("/{player_id}/{card_type}card")
@ -955,6 +954,9 @@ async def get_batter_card(
card_data["refractor_tier"] = (
tier if tier is not None else resolve_refractor_tier(player_id, variant)
)
card_data["filled_bg"] = TIER_DIAMOND_COLORS.get(
card_data["refractor_tier"], ""
)
card_data["request"] = request
html_response = templates.TemplateResponse("player_card.html", card_data)
@ -995,6 +997,9 @@ async def get_batter_card(
card_data["refractor_tier"] = (
tier if tier is not None else resolve_refractor_tier(player_id, variant)
)
card_data["filled_bg"] = TIER_DIAMOND_COLORS.get(
card_data["refractor_tier"], ""
)
card_data["request"] = request
html_response = templates.TemplateResponse("player_card.html", card_data)

View File

@ -1,5 +1,4 @@
import os
from datetime import date
from fastapi import APIRouter, Depends, HTTPException, Query
import logging
@ -24,12 +23,8 @@ _NEXT_THRESHOLD_ATTR = {
4: None,
}
# Sentinel used by _build_card_state_response to distinguish "caller did not
# pass image_url" (do the DB lookup) from "caller passed None" (use None).
_UNSET = object()
def _build_card_state_response(state, player_name=None, image_url=_UNSET) -> dict:
def _build_card_state_response(state, player_name=None) -> dict:
"""Serialise a RefractorCardState into the standard API response shape.
Produces a flat dict with player_id and team_id as plain integers,
@ -72,27 +67,22 @@ def _build_card_state_response(state, player_name=None, image_url=_UNSET) -> dic
if player_name is not None:
result["player_name"] = player_name
# Resolve image_url from the variant card row.
# When image_url is pre-fetched by the caller (batch list path), it is
# passed directly and the per-row DB query is skipped entirely.
if image_url is _UNSET:
image_url = None
if state.variant and state.variant > 0:
card_type = (
state.track.card_type
if hasattr(state, "track") and state.track
else None
)
if card_type:
CardModel = BattingCard if card_type == "batter" else PitchingCard
try:
variant_card = CardModel.get(
(CardModel.player_id == state.player_id)
& (CardModel.variant == state.variant)
)
image_url = variant_card.image_url
except CardModel.DoesNotExist:
pass
# Resolve image_url from the variant card row
image_url = None
if state.variant and state.variant > 0:
card_type = (
state.track.card_type if hasattr(state, "track") and state.track else None
)
if card_type:
CardModel = BattingCard if card_type == "batter" else PitchingCard
try:
variant_card = CardModel.get(
(CardModel.player_id == state.player_id)
& (CardModel.variant == state.variant)
)
image_url = variant_card.image_url
except CardModel.DoesNotExist:
pass
result["image_url"] = image_url
return result
@ -240,43 +230,14 @@ async def list_card_states(
query = query.where(RefractorCardState.last_evaluated_at.is_null(False))
total = query.count() or 0
states_page = list(query.offset(offset).limit(limit))
# Pre-fetch image_urls in at most 2 bulk queries (one per card table) so
# that _build_card_state_response never issues a per-row CardModel.get().
batter_pids: set[int] = set()
pitcher_pids: set[int] = set()
for state in states_page:
if state.variant and state.variant > 0:
card_type = state.track.card_type if state.track else None
if card_type == "batter":
batter_pids.add(state.player_id)
elif card_type in ("sp", "rp"):
pitcher_pids.add(state.player_id)
image_url_map: dict[tuple[int, int], str | None] = {}
if batter_pids:
for card in BattingCard.select().where(BattingCard.player_id.in_(batter_pids)):
image_url_map[(card.player_id, card.variant)] = card.image_url
if pitcher_pids:
for card in PitchingCard.select().where(
PitchingCard.player_id.in_(pitcher_pids)
):
image_url_map[(card.player_id, card.variant)] = card.image_url
items = []
for state in states_page:
for state in query.offset(offset).limit(limit):
player_name = None
try:
player_name = state.player.p_name
except Exception:
pass
img_url = image_url_map.get((state.player_id, state.variant))
items.append(
_build_card_state_response(
state, player_name=player_name, image_url=img_url
)
)
items.append(_build_card_state_response(state, player_name=player_name))
return {"count": total, "items": items}
@ -477,15 +438,9 @@ async def evaluate_game(game_id: int, token: str = Depends(oauth2_scheme)):
# Non-breaking addition: include boost info when available.
if boost_result:
variant_num = boost_result.get("variant_created")
tier_up_entry["variant_created"] = variant_num
if computed_tier >= 3 and variant_num and card_type:
d = date.today().strftime("%Y-%m-%d")
api_base = os.environ.get("API_BASE_URL", "").rstrip("/")
tier_up_entry["animated_url"] = (
f"{api_base}/api/v2/players/{player_id}/{card_type}card"
f"/{d}/{variant_num}/animated"
)
tier_up_entry["variant_created"] = boost_result.get(
"variant_created"
)
tier_ups.append(tier_up_entry)

View File

@ -8,10 +8,7 @@ get_s3_client()
(environment variables or instance profile).
build_s3_key(cardset_id, player_id, variant, card_type)
Construct the S3 object key for a variant card PNG image.
build_apng_s3_key(cardset_id, player_id, variant, card_type)
Construct the S3 object key for a variant animated card APNG.
Construct the S3 object key for a variant card image.
build_s3_url(s3_key, render_date)
Return the full HTTPS S3 URL with a cache-busting date query param.
@ -19,19 +16,11 @@ build_s3_url(s3_key, render_date)
upload_card_to_s3(s3_client, png_bytes, s3_key)
Upload raw PNG bytes to S3 with correct ContentType and CacheControl headers.
upload_apng_to_s3(s3_client, apng_bytes, s3_key)
Upload raw APNG bytes to S3 with correct ContentType and CacheControl headers.
backfill_variant_image_url(player_id, variant, card_type, cardset_id, png_path)
End-to-end: read PNG from disk, upload to S3, update BattingCard or
PitchingCard.image_url in the database. All exceptions are caught and
logged; this function never raises (safe to call as a background task).
upload_variant_apng(player_id, variant, card_type, cardset_id, apng_path)
End-to-end: read APNG from disk and upload to S3. No DB update (no
animated_url column exists yet). All exceptions are caught and logged;
this function never raises (safe to call as a background task).
Design notes
------------
- S3 credentials are resolved from the environment by boto3 at call time;
@ -108,29 +97,6 @@ def build_s3_url(s3_key: str, render_date: date) -> str:
return f"{base_url}/{s3_key}?d={date_str}"
def build_apng_s3_key(
cardset_id: int, player_id: int, variant: int, card_type: str
) -> str:
"""Construct the S3 object key for a variant animated card APNG.
Key format:
cards/cardset-{csid:03d}/player-{pid}/v{variant}/{card_type}card.apng
Args:
cardset_id: Numeric cardset ID (zero-padded to 3 digits).
player_id: Player ID.
variant: Variant number (1-4 = refractor tiers).
card_type: Either "batting" or "pitching".
Returns:
The S3 object key string.
"""
return (
f"cards/cardset-{cardset_id:03d}/player-{player_id}"
f"/v{variant}/{card_type}card.apng"
)
def upload_card_to_s3(s3_client, png_bytes: bytes, s3_key: str) -> None:
"""Upload raw PNG bytes to S3 with the standard card image headers.
@ -230,81 +196,3 @@ def backfill_variant_image_url(
variant,
card_type,
)
def upload_apng_to_s3(s3_client, apng_bytes: bytes, s3_key: str) -> None:
"""Upload raw APNG bytes to S3 with the standard animated card headers.
Sets ContentType=image/apng and CacheControl=public, max-age=86400 (1 day)
matching the animated endpoint's own Cache-Control header.
Args:
s3_client: A boto3 S3 client (from get_s3_client).
apng_bytes: Raw APNG image bytes.
s3_key: S3 object key (from build_apng_s3_key).
Returns:
None
"""
s3_client.put_object(
Bucket=S3_BUCKET,
Key=s3_key,
Body=apng_bytes,
ContentType="image/apng",
CacheControl="public, max-age=86400",
)
def upload_variant_apng(
player_id: int,
variant: int,
card_type: str,
cardset_id: int,
apng_path: str,
) -> None:
"""Read a rendered APNG from disk and upload it to S3.
Intended to be called as a background task after a new animated card is
rendered. No DB update is performed (no animated_url column exists yet).
All exceptions are caught and logged this function is intended to be
called as a background task and must never propagate exceptions.
Args:
player_id: Player ID used for the S3 key.
variant: Variant number (matches the refractor tier variant).
card_type: "batting" or "pitching" selects the S3 key.
cardset_id: Cardset ID used for the S3 key.
apng_path: Absolute path to the rendered APNG file on disk.
Returns:
None
"""
try:
with open(apng_path, "rb") as f:
apng_bytes = f.read()
s3_key = build_apng_s3_key(
cardset_id=cardset_id,
player_id=player_id,
variant=variant,
card_type=card_type,
)
s3_client = get_s3_client()
upload_apng_to_s3(s3_client, apng_bytes, s3_key)
logger.info(
"upload_variant_apng: uploaded %s animated card player=%s variant=%s key=%s",
card_type,
player_id,
variant,
s3_key,
)
except Exception:
logger.exception(
"upload_variant_apng: failed for player=%s variant=%s card_type=%s",
player_id,
variant,
card_type,
)

View File

@ -148,11 +148,10 @@ def evaluate_card(
strikeouts=sum(r.strikeouts for r in rows),
)
else:
from app.db_engine import ( # noqa: PLC0415
from app.db_engine import (
BattingSeasonStats,
PitchingSeasonStats,
REFRACTOR_START_SEASON,
)
) # noqa: PLC0415
card_type = card_state.track.card_type
if card_type == "batter":
@ -160,7 +159,6 @@ def evaluate_card(
BattingSeasonStats.select().where(
(BattingSeasonStats.player == player_id)
& (BattingSeasonStats.team == team_id)
& (BattingSeasonStats.season >= REFRACTOR_START_SEASON)
)
)
totals = _CareerTotals(
@ -177,7 +175,6 @@ def evaluate_card(
PitchingSeasonStats.select().where(
(PitchingSeasonStats.player == player_id)
& (PitchingSeasonStats.team == team_id)
& (PitchingSeasonStats.season >= REFRACTOR_START_SEASON)
)
)
totals = _CareerTotals(

View File

@ -1,55 +0,0 @@
# Scripts
Operational scripts for the Paper Dynasty Database API.
## deploy.sh
Deploy the API by tagging a commit and triggering CI/CD.
```bash
./scripts/deploy.sh dev # Tag HEAD as 'dev', CI builds :dev image
./scripts/deploy.sh prod # Create CalVer tag + 'latest' + 'production'
./scripts/deploy.sh dev abc1234 # Tag a specific commit
./scripts/deploy.sh dev --sync-templates # Deploy + push changed templates to server
```
**Template drift check** runs automatically on every deploy. Compares local `storage/templates/*.html` against the target server via md5sum and warns if any files differ. Templates are volume-mounted (not baked into the Docker image), so code deploys alone won't update them.
**Cached image report** also runs automatically, showing PNG and APNG counts on the target server.
| Environment | SSH Host | Template Path |
|---|---|---|
| dev | `pd-database` | `/home/cal/container-data/dev-pd-database/storage/templates` |
| prod | `akamai` | `/root/container-data/paper-dynasty/storage/templates` |
## clear-card-cache.sh
Inspect or clear cached rendered card images inside the API container.
```bash
./scripts/clear-card-cache.sh dev # Report cache size (dry run)
./scripts/clear-card-cache.sh dev --apng-only # Delete animated card cache only
./scripts/clear-card-cache.sh dev --all # Delete all cached card images
```
Cached images regenerate on demand when next requested. APNG files (T3/T4 animated cards) are the most likely to go stale after template CSS changes. Both destructive modes prompt for confirmation before deleting.
| Environment | SSH Host | Container | Cache Path |
|---|---|---|---|
| dev | `pd-database` | `dev_pd_database` | `/app/storage/cards/` |
| prod | `akamai` | `pd_api` | `/app/storage/cards/` |
## Migration Scripts
| Script | Purpose |
|---|---|
| `migrate_to_postgres.py` | One-time SQLite to PostgreSQL migration |
| `migrate_missing_data.py` | Backfill missing data after migration |
| `db_migrations.py` (in repo root) | Schema migrations |
## Utility Scripts
| Script | Purpose |
|---|---|
| `wipe_gauntlet_team.py` | Reset a gauntlet team's state |
| `audit_sqlite.py` | Audit legacy SQLite database |

View File

@ -1,89 +0,0 @@
#!/bin/bash
# Clear cached card images from the API container
# Usage: ./scripts/clear-card-cache.sh <dev|prod> [--apng-only|--all]
#
# With no flags: reports cache size only (dry run)
# --apng-only: delete only .apng files (animated cards)
# --all: delete all cached card images (.png + .apng)
set -euo pipefail
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
NC='\033[0m'
declare -A DEPLOY_HOST=([dev]="pd-database" [prod]="akamai")
declare -A CONTAINER=([dev]="dev_pd_database" [prod]="pd_api")
usage() {
echo "Usage: $0 <dev|prod> [--apng-only|--all]"
echo ""
echo " No flag Report cache size (dry run)"
echo " --apng-only Delete only .apng files (animated cards)"
echo " --all Delete all cached card images"
exit 1
}
[[ $# -lt 1 ]] && usage
ENV="$1"
ACTION="${2:-report}"
if [[ "$ENV" != "dev" && "$ENV" != "prod" ]]; then
usage
fi
HOST="${DEPLOY_HOST[$ENV]}"
CTR="${CONTAINER[$ENV]}"
CACHE_PATH="/app/storage/cards"
report() {
echo -e "${CYAN}Card image cache on ${HOST} (${CTR}):${NC}"
ssh "$HOST" "
png_count=\$(docker exec $CTR find $CACHE_PATH -name '*.png' 2>/dev/null | wc -l)
apng_count=\$(docker exec $CTR find $CACHE_PATH -name '*.apng' 2>/dev/null | wc -l)
echo \" PNG: \${png_count} files\"
echo \" APNG: \${apng_count} files\"
echo \" Total: \$((\${png_count} + \${apng_count})) files\"
" 2>/dev/null || {
echo -e "${RED}Could not reach ${HOST}.${NC}"
exit 1
}
}
report
case "$ACTION" in
report)
echo -e "${GREEN}Dry run — no files deleted. Pass --apng-only or --all to clear.${NC}"
;;
--apng-only)
echo -e "${YELLOW}Deleting all .apng files from ${CTR}...${NC}"
read -rp "Proceed? [y/N] " confirm
[[ "$confirm" =~ ^[Yy]$ ]] || {
echo "Aborted."
exit 0
}
deleted=$(ssh "$HOST" "docker exec $CTR find $CACHE_PATH -name '*.apng' -delete -print 2>/dev/null | wc -l")
echo -e "${GREEN}Deleted ${deleted} .apng files.${NC}"
;;
--all)
echo -e "${RED}Deleting ALL cached card images from ${CTR}...${NC}"
read -rp "This will clear PNG and APNG caches. Proceed? [y/N] " confirm
[[ "$confirm" =~ ^[Yy]$ ]] || {
echo "Aborted."
exit 0
}
deleted=$(ssh "$HOST" "docker exec $CTR find $CACHE_PATH -type f \( -name '*.png' -o -name '*.apng' \) -delete -print 2>/dev/null | wc -l")
echo -e "${GREEN}Deleted ${deleted} cached card images.${NC}"
;;
*)
usage
;;
esac

View File

@ -1,71 +1,31 @@
#!/bin/bash
# Deploy Paper Dynasty Database API
# Usage: ./scripts/deploy.sh <dev|prod> [--sync-templates] [commit]
# Usage: ./scripts/deploy.sh <dev|prod> [commit]
#
# Dev: Force-updates the "dev" git tag → CI builds :dev Docker image
# Prod: Creates CalVer tag + force-updates "latest" and "production" git tags
# → CI builds :<calver>, :latest, :production Docker images
#
# Options:
# --sync-templates Upload changed templates to the target server via scp
#
# Templates are volume-mounted (not in the Docker image). The script always
# checks for template drift and warns if local/remote differ. Pass
# --sync-templates to actually push the changed files.
set -euo pipefail
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
NC='\033[0m'
REMOTE="origin"
SYNC_TEMPLATES=false
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
TEMPLATE_DIR="$SCRIPT_DIR/../storage/templates"
# Server config
declare -A DEPLOY_HOST=([dev]="pd-database" [prod]="akamai")
declare -A TEMPLATE_PATH=(
[dev]="/home/cal/container-data/dev-pd-database/storage/templates"
[prod]="/root/container-data/paper-dynasty/storage/templates"
)
usage() {
echo "Usage: $0 <dev|prod> [--sync-templates] [commit]"
echo "Usage: $0 <dev|prod> [commit]"
echo ""
echo " dev [commit] Force-update 'dev' tag on HEAD or specified commit"
echo " prod [commit] Create CalVer + 'latest' + 'production' tags on HEAD or specified commit"
echo ""
echo "Options:"
echo " --sync-templates Upload changed templates to the target server"
exit 1
}
[[ $# -lt 1 ]] && usage
ENV="$1"
shift
# Parse optional flags
COMMIT="HEAD"
while [[ $# -gt 0 ]]; do
case "$1" in
--sync-templates)
SYNC_TEMPLATES=true
shift
;;
--*)
echo -e "${RED}Unknown option: $1${NC}"
usage
;;
*)
COMMIT="$1"
shift
;;
esac
done
COMMIT="${2:-HEAD}"
SHA=$(git rev-parse "$COMMIT" 2>/dev/null) || {
echo -e "${RED}Invalid commit: $COMMIT${NC}"
@ -80,79 +40,6 @@ if ! git branch -a --contains "$SHA" 2>/dev/null | grep -qE '(^|\s)(main|remotes
exit 1
fi
# --- Template drift check ---
check_templates() {
local host="${DEPLOY_HOST[$ENV]}"
local remote_path="${TEMPLATE_PATH[$ENV]}"
echo -e "${CYAN}Checking templates against ${host}:${remote_path}...${NC}"
local local_hashes remote_hashes
local_hashes=$(cd "$TEMPLATE_DIR" && md5sum *.html 2>/dev/null | sort -k2)
remote_hashes=$(ssh "$host" "cd '$remote_path' && md5sum *.html 2>/dev/null | sort -k2" 2>/dev/null) || {
echo -e "${YELLOW} Could not reach ${host} — skipping template check.${NC}"
return 0
}
local changed=()
local missing_remote=()
while IFS= read -r line; do
local hash file remote_hash
hash=$(echo "$line" | awk '{print $1}')
file=$(echo "$line" | awk '{print $2}')
remote_hash=$(echo "$remote_hashes" | awk -v f="$file" '$2 == f {print $1}')
if [[ -z "$remote_hash" ]]; then
missing_remote+=("$file")
elif [[ "$hash" != "$remote_hash" ]]; then
changed+=("$file")
fi
done <<<"$local_hashes"
if [[ ${#changed[@]} -eq 0 && ${#missing_remote[@]} -eq 0 ]]; then
echo -e "${GREEN} Templates in sync.${NC}"
return 0
fi
echo -e "${YELLOW} Template drift detected:${NC}"
for f in "${changed[@]+"${changed[@]}"}"; do
[[ -n "$f" ]] && echo -e " ${YELLOW}CHANGED${NC} $f"
done
for f in "${missing_remote[@]+"${missing_remote[@]}"}"; do
[[ -n "$f" ]] && echo -e " ${YELLOW}MISSING${NC} $f (not on server)"
done
if [[ "$SYNC_TEMPLATES" == true ]]; then
echo -e "${CYAN} Syncing templates...${NC}"
for f in "${changed[@]+"${changed[@]}"}" "${missing_remote[@]+"${missing_remote[@]}"}"; do
[[ -n "$f" ]] && scp "$TEMPLATE_DIR/$f" "${host}:${remote_path}/$f"
done
echo -e "${GREEN} Templates synced to ${host}.${NC}"
else
echo -e "${YELLOW} Run with --sync-templates to push changes.${NC}"
fi
}
check_templates
# --- Cached image report ---
declare -A API_CONTAINER=([dev]="dev_pd_database" [prod]="pd_api")
report_cache() {
local host="${DEPLOY_HOST[$ENV]}"
local container="${API_CONTAINER[$ENV]}"
echo -e "${CYAN}Cached card images on ${host} (${container}):${NC}"
ssh "$host" "
png_count=\$(docker exec $container find /app/storage/cards -name '*.png' 2>/dev/null | wc -l)
apng_count=\$(docker exec $container find /app/storage/cards -name '*.apng' 2>/dev/null | wc -l)
echo \" PNG: \${png_count} files\"
echo \" APNG: \${apng_count} files\"
echo \" Total: \$((\${png_count} + \${apng_count})) files\"
" 2>/dev/null || echo -e "${YELLOW} Could not reach ${host} — skipping cache report.${NC}"
}
report_cache
case "$ENV" in
dev)
echo -e "${YELLOW}Deploying to dev...${NC}"

View File

@ -15,7 +15,6 @@
} -%}
{%- set dc = diamond_colors[refractor_tier] -%}
{%- set filled_bg = 'linear-gradient(135deg, ' ~ dc.highlight ~ ' 0%, ' ~ dc.color ~ ' 50%, ' ~ dc.color ~ ' 100%)' -%}
<div class="tier-diamond-backing"></div>
<div class="tier-diamond{% if refractor_tier == 4 %} diamond-glow{% endif %}">
<div class="diamond-quad{% if refractor_tier >= 2 %} filled{% endif %}" {% if refractor_tier >= 2 %}style="background: {{ filled_bg }};"{% endif %}></div>
<div class="diamond-quad{% if refractor_tier >= 1 %} filled{% endif %}" {% if refractor_tier >= 1 %}style="background: {{ filled_bg }};"{% endif %}></div>

View File

@ -6,30 +6,17 @@
</style>
{% if refractor_tier is defined and refractor_tier > 0 %}
<style>
.tier-diamond-backing,
.tier-diamond {
position: absolute;
left: 597px;
top: 78.5px;
transform: translate(-50%, -50%) rotate(45deg);
border-radius: 2px;
pointer-events: none;
}
.tier-diamond-backing {
width: 44px;
height: 44px;
background: rgba(200,210,220,0.9);
z-index: 19;
}
.tier-diamond {
display: grid;
grid-template: 1fr 1fr / 1fr 1fr;
gap: 2px;
z-index: 20;
pointer-events: none;
background: transparent;
background: rgba(0,0,0,0.75);
border-radius: 2px;
box-shadow: 0 0 0 1.5px rgba(0,0,0,0.7), 0 2px 5px rgba(0,0,0,0.5);
}
@ -37,7 +24,7 @@
.diamond-quad {
width: 19px;
height: 19px;
background: rgba(0,0,0,0.55);
background: rgba(0,0,0,0.3);
}
.diamond-quad.filled {