paper-dynasty-database/app/routers_v2/packs.py
Cal Corum 40c512c665 Add PostgreSQL compatibility fixes for query ordering
- Add explicit ORDER BY id to all queries for consistent results across SQLite and PostgreSQL
- PostgreSQL does not guarantee row order without ORDER BY, unlike SQLite
- Skip table creation when DATABASE_TYPE=postgresql (production tables already exist)
- Fix datetime handling in notifications (PostgreSQL native datetime vs SQLite timestamp)
- Fix grouped query count() calls that don't work in PostgreSQL
- Update .gitignore to include storage/templates/ directory

This completes the PostgreSQL migration compatibility layer while maintaining
backwards compatibility with SQLite for local development.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-03 10:39:14 -06:00

271 lines
8.8 KiB
Python

from datetime import datetime
from fastapi import APIRouter, Depends, HTTPException, Response
from typing import Optional, List
import logging
import pydantic
from pandas import DataFrame
from ..db_engine import db, Cardset, model_to_dict, Pack, Team, PackType
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
logging.basicConfig(
filename=LOG_DATA['filename'],
format=LOG_DATA['format'],
level=LOG_DATA['log_level']
)
router = APIRouter(
prefix='/api/v2/packs',
tags=['packs']
)
class PackPydantic(pydantic.BaseModel):
team_id: int
pack_type_id: int
pack_team_id: Optional[int] = None
pack_cardset_id: Optional[int] = None
open_time: Optional[int] = None
class PackModel(pydantic.BaseModel):
packs: List[PackPydantic]
@router.get('')
async def get_packs(
team_id: Optional[int] = None, pack_type_id: Optional[int] = None, opened: Optional[bool] = None,
limit: Optional[int] = None, new_to_old: Optional[bool] = None, pack_team_id: Optional[int] = None,
pack_cardset_id: Optional[int] = None, exact_match: Optional[bool] = False, csv: Optional[bool] = None):
all_packs = Pack.select()
if all_packs.count() == 0:
db.close()
raise HTTPException(status_code=404, detail=f'There are no packs to filter')
if team_id is not None:
try:
this_team = Team.get_by_id(team_id)
except Exception:
db.close()
raise HTTPException(status_code=404, detail=f'No team found with id {team_id}')
all_packs = all_packs.where(Pack.team == this_team)
if pack_type_id is not None:
try:
this_pack_type = PackType.get_by_id(pack_type_id)
except Exception:
db.close()
raise HTTPException(status_code=404, detail=f'No pack type found with id {pack_type_id}')
all_packs = all_packs.where(Pack.pack_type == this_pack_type)
if pack_team_id is not None:
try:
this_pack_team = Team.get_by_id(pack_team_id)
except Exception:
db.close()
raise HTTPException(status_code=404, detail=f'No team found with id {pack_team_id}')
all_packs = all_packs.where(Pack.pack_team == this_pack_team)
elif exact_match:
all_packs = all_packs.where(Pack.pack_team == None)
if pack_cardset_id is not None:
try:
this_pack_cardset = Cardset.get_by_id(pack_cardset_id)
except Exception:
db.close()
raise HTTPException(status_code=404, detail=f'No cardset found with id {pack_cardset_id}')
all_packs = all_packs.where(Pack.pack_cardset == this_pack_cardset)
elif exact_match:
all_packs = all_packs.where(Pack.pack_cardset == None)
if opened is not None:
all_packs = all_packs.where(Pack.open_time.is_null(not opened))
if limit is not None:
all_packs = all_packs.limit(limit)
if new_to_old:
all_packs = all_packs.order_by(-Pack.id)
else:
all_packs = all_packs.order_by(Pack.id)
# if all_packs.count() == 0:
# db.close()
# raise HTTPException(status_code=404, detail=f'No packs found')
if csv:
data_list = [['id', 'team', 'pack_type', 'open_time']]
for line in all_packs:
data_list.append(
[
line.id, line.team.abbrev, line.pack_type.name,
line.open_time # Already datetime in PostgreSQL
]
)
return_val = DataFrame(data_list).to_csv(header=False, index=False)
db.close()
return Response(content=return_val, media_type='text/csv')
else:
return_val = {'count': all_packs.count(), 'packs': []}
for x in all_packs:
return_val['packs'].append(model_to_dict(x))
db.close()
return return_val
@router.get('/{pack_id}')
async def get_one_pack(pack_id, csv: Optional[bool] = False):
try:
this_pack = Pack.get_by_id(pack_id)
except Exception:
db.close()
raise HTTPException(status_code=404, detail=f'No pack found with id {pack_id}')
if csv:
data_list = [
['id', 'team', 'pack_type', 'open_time'],
[this_pack.id, this_pack.team.abbrev, this_pack.pack_type.name,
this_pack.open_time] # Already datetime in PostgreSQL
]
return_val = DataFrame(data_list).to_csv(header=False, index=False)
db.close()
return Response(content=return_val, media_type='text/csv')
else:
return_val = model_to_dict(this_pack)
db.close()
return return_val
@router.post('')
async def post_pack(packs: PackModel, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning(f'Bad Token: {token}')
db.close()
raise HTTPException(
status_code=401,
detail='You are not authorized to post packs. This event has been logged.'
)
new_packs = []
for x in packs.packs:
this_player = Pack(
team_id=x.team_id,
pack_type_id=x.pack_type_id,
pack_team_id=x.pack_team_id,
pack_cardset_id=x.pack_cardset_id,
open_time=datetime.fromtimestamp(x.open_time / 1000) if x.open_time else None
)
new_packs.append(this_player)
with db.atomic():
Pack.bulk_create(new_packs, batch_size=15)
db.close()
raise HTTPException(status_code=200, detail=f'{len(new_packs)} packs have been added')
@router.post('/one')
async def post_one_pack(pack: PackPydantic, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning(f'Bad Token: {token}')
db.close()
raise HTTPException(
status_code=401,
detail='You are not authorized to post packs. This event has been logged.'
)
this_pack = Pack(
team_id=pack.team_id,
pack_type_id=pack.pack_type_id,
pack_team_id=pack.pack_team_id,
pack_cardset_id=pack.pack_cardset_id,
open_time=datetime.fromtimestamp(pack.open_time / 1000) if pack.open_time else None
)
saved = this_pack.save()
if saved == 1:
return_val = model_to_dict(this_pack)
db.close()
return return_val
else:
raise HTTPException(
status_code=418,
detail='Well slap my ass and call me a teapot; I could not save that cardset'
)
@router.patch('/{pack_id}')
async def patch_pack(
pack_id, team_id: Optional[int] = None, pack_type_id: Optional[int] = None, open_time: Optional[int] = None,
pack_team_id: Optional[int] = None, pack_cardset_id: Optional[int] = None, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning(f'Bad Token: {token}')
db.close()
raise HTTPException(
status_code=401,
detail='You are not authorized to patch packs. This event has been logged.'
)
try:
this_pack = Pack.get_by_id(pack_id)
except Exception:
db.close()
raise HTTPException(status_code=404, detail=f'No pack found with id {pack_id}')
if team_id is not None:
this_pack.team_id = team_id
if pack_type_id is not None:
this_pack.pack_type_id = pack_type_id
if pack_team_id is not None:
if pack_team_id < 0:
this_pack.pack_team_id = None
else:
this_pack.pack_team_id = pack_team_id
if pack_cardset_id is not None:
if pack_cardset_id < 0:
this_pack.pack_cardset_id = None
else:
this_pack.pack_cardset_id = pack_cardset_id
if open_time is not None:
if open_time < 0:
this_pack.open_time = None
else:
this_pack.open_time = datetime.fromtimestamp(open_time / 1000)
if this_pack.save() == 1:
return_val = model_to_dict(this_pack)
db.close()
return return_val
else:
raise HTTPException(
status_code=418,
detail='Well slap my ass and call me a teapot; I could not save that rarity'
)
@router.delete('/{pack_id}')
async def delete_pack(pack_id, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning(f'Bad Token: {token}')
db.close()
raise HTTPException(
status_code=401,
detail='You are not authorized to delete packs. This event has been logged.'
)
try:
this_pack = Pack.get_by_id(pack_id)
except Exception:
db.close()
raise HTTPException(status_code=404, detail=f'No packs found with id {pack_id}')
count = this_pack.delete_instance()
db.close()
if count == 1:
raise HTTPException(status_code=200, detail=f'Pack {pack_id} has been deleted')
else:
raise HTTPException(status_code=500, detail=f'Pack {pack_id} was not deleted')