Compare commits

..

10 Commits

Author SHA1 Message Date
Cal Corum
6130eb993f fix: use Field(default_factory) for offense_col random default (#24)
Pydantic evaluates bare `random.randint(1, 3)` once at class definition
time, so every PlayerModel instance shared the same value. Replaced with
`pydantic.Field(default_factory=...)` so a new random value is generated
per instance.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-03 21:46:03 +00:00
cal
215c48a53e Merge pull request 'fix: remove dead roster fields from CSV in v1_cards_get_one (#25)' (#35) from ai/paper-dynasty-database-25 into next-release
Reviewed-on: #35
2026-03-03 21:45:14 +00:00
Cal Corum
6a2e8a2d2a fix: remove dead roster fields from CSV in v1_cards_get_one (#25)
Card model has no roster1/2/3 fields. Accessing them would raise
AttributeError at runtime. Removed the non-existent columns from
the CSV header and data row.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-03 21:44:59 +00:00
cal
d031553e03 Merge pull request 'fix: correct inverted TESTING env check and leading space in .env (#23)' (#40) from ai/paper-dynasty-database#23 into next-release
Reviewed-on: #40
2026-03-03 21:44:25 +00:00
Cal Corum
9fc7a9449e fix: correct inverted TESTING env check and leading space in .env (#23)
- Change `== 'False'` to `== 'True'` so TESTING=True routes to dev URL
- Fix leading space on TESTING=TRUE in .env so the var is actually set
- Update .env comment to match corrected logic

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-03 21:43:54 +00:00
cal
7b18962033 Merge pull request 'fix: respect is_ai=False in get_teams filter (#22)' (#41) from ai/paper-dynasty-database#22 into next-release
Reviewed-on: #41
2026-03-03 21:42:00 +00:00
Cal Corum
3e15acbb9d fix: respect is_ai=False in get_teams filter (#22)
`all_teams.where(Team.is_ai)` always filtered for AI teams regardless
of the caller's intent. Match the existing has_guide pattern and use
explicit boolean comparison so False is handled correctly.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-03 21:38:50 +00:00
Cal Corum
2a660e9c19 docs: add next-release branch workflow to CLAUDE.md
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-03 10:56:03 -06:00
cal
4a4ed0ff2b Merge pull request 'fix: remove debug print(req.scope) from get_docs route (#31)' (#32) from ai/paper-dynasty-database#31 into next-release
Reviewed-on: #32
2026-03-03 16:52:38 +00:00
Cal Corum
65ad72c299 fix: remove debug print(req.scope) from get_docs route (#31)
All checks were successful
Build Docker Image / build (pull_request) Successful in 9m50s
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-02 22:36:15 -06:00
6 changed files with 163 additions and 72 deletions

4
.env
View File

@ -59,9 +59,9 @@ API_TOKEN=Tp3aO3jhYve5NJF1IqOmJTmk
# PRIVATE_IN_SCHEMA=true
# Testing mode
# Set to 'False' to use development database URL (pddev.manticorum.com)
# Set to 'True' to use development database URL (pddev.manticorum.com)
# Leave unset or set to any other value for production
TESTING=TRUE
TESTING=True
# =============================================================================
# EXAMPLE CONFIGURATIONS

View File

@ -51,6 +51,13 @@ docker build -t paper-dynasty-db . # Build image
- DB connection errors → verify `POSTGRES_HOST` points to correct container name
- **CI/CD**: Gitea Actions on PR to `main` — builds Docker image, auto-generates CalVer version (`YYYY.MM.BUILD`) on merge
### Release Workflow
1. Create feature/fix branches off `next-release` (e.g., `fix/card-pricing`)
2. When done, merge the branch into `next-release` — this is the staging branch where changes accumulate
3. When ready to release, open a PR from `next-release``main`
4. CI builds Docker image on PR; CalVer tag is created on merge
5. Deploy the new image to production
## Important
- Docker image installs only Playwright Chromium (not all browsers) to optimize size

View File

@ -5,33 +5,37 @@ import os
import requests
from fastapi.security import OAuth2PasswordBearer
date = f'{datetime.datetime.now().year}-{datetime.datetime.now().month}-{datetime.datetime.now().day}'
date = f"{datetime.datetime.now().year}-{datetime.datetime.now().month}-{datetime.datetime.now().day}"
LOG_DATA = {
'filename': f'logs/database/{date}.log',
'format': '%(asctime)s - database - %(levelname)s - %(message)s',
'log_level': logging.INFO if os.environ.get('LOG_LEVEL') == 'INFO' else 'WARN'
"filename": f"logs/database/{date}.log",
"format": "%(asctime)s - database - %(levelname)s - %(message)s",
"log_level": logging.INFO if os.environ.get("LOG_LEVEL") == "INFO" else "WARN",
}
logging.basicConfig(
filename=LOG_DATA['filename'],
format=LOG_DATA['format'],
level=LOG_DATA['log_level']
filename=LOG_DATA["filename"],
format=LOG_DATA["format"],
level=LOG_DATA["log_level"],
)
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
master_debug = False
DB_URL = 'https://pd.manticorum.com/api/'
DB_URL = "https://pd.manticorum.com/api/"
AUTH_TOKEN = f'{os.environ.get("API_TOKEN")}'
AUTH_HEADER = {'Authorization': f'Bearer {AUTH_TOKEN}'}
AUTH_HEADER = {"Authorization": f"Bearer {AUTH_TOKEN}"}
priv_help = False if not os.environ.get('PRIVATE_IN_SCHEMA') else os.environ.get('PRIVATE_IN_SCHEMA').upper()
priv_help = (
False
if not os.environ.get("PRIVATE_IN_SCHEMA")
else os.environ.get("PRIVATE_IN_SCHEMA").upper()
)
PRIVATE_IN_SCHEMA = True if priv_help else False
if os.environ.get('TESTING') == 'False':
DB_URL = 'https://pddev.manticorum.com/api/'
if os.environ.get("TESTING") == "True":
DB_URL = "https://pddev.manticorum.com/api/"
def valid_token(token):
@ -42,33 +46,41 @@ def int_timestamp(datetime_obj: datetime) -> int:
return int(datetime.datetime.timestamp(datetime_obj) * 1000)
def mround(x, prec=2, base=.05):
def mround(x, prec=2, base=0.05):
return round(base * round(float(x) / base), prec)
def param_char(other_params):
if other_params:
return '&'
return "&"
else:
return '?'
return "?"
def get_req_url(endpoint: str, api_ver: int = 2, object_id: int = None, params: list = None):
def get_req_url(
endpoint: str, api_ver: int = 2, object_id: int = None, params: list = None
):
req_url = f'{DB_URL}/v{api_ver}/{endpoint}{"/" if object_id is not None else ""}{object_id if object_id is not None else ""}'
if params:
other_params = False
for x in params:
req_url += f'{param_char(other_params)}{x[0]}={x[1]}'
req_url += f"{param_char(other_params)}{x[0]}={x[1]}"
other_params = True
return req_url
async def db_get(endpoint: str, api_ver: int = 2, object_id: int = None, params: list = None, none_okay: bool = True,
timeout: int = 3):
async def db_get(
endpoint: str,
api_ver: int = 2,
object_id: int = None,
params: list = None,
none_okay: bool = True,
timeout: int = 3,
):
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
log_string = f'get:\n{endpoint} id: {object_id} params: {params}'
log_string = f"get:\n{endpoint} id: {object_id} params: {params}"
logging.info(log_string) if master_debug else logging.debug(log_string)
retries = 0
@ -77,37 +89,51 @@ async def db_get(endpoint: str, api_ver: int = 2, object_id: int = None, params:
resp = requests.get(req_url, timeout=timeout)
break
except requests.ReadTimeout as e:
logging.error(f'Get Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
logging.error(
f"Get Timeout: {req_url} / retries: {retries} / timeout: {timeout}"
)
if retries > 1:
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
f'hang on a few extra seconds and try again.')
raise ConnectionError(
f"DB: The internet was a bit too slow for me to grab the data I needed. Please "
f"hang on a few extra seconds and try again."
)
timeout += [2, 5][retries]
retries += 1
if resp.status_code == 200:
data = resp.json()
log_string = f'{data}'
log_string = f"{data}"
if master_debug:
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
logging.info(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}'
)
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
logging.debug(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}'
)
return data
elif none_okay:
data = resp.json()
log_string = f'{data}'
log_string = f"{data}"
if master_debug:
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
logging.info(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}'
)
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
logging.debug(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}'
)
return None
else:
logging.warning(resp.text)
raise ValueError(f'DB: {resp.text}')
raise ValueError(f"DB: {resp.text}")
async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2, timeout: int = 3):
async def db_patch(
endpoint: str, object_id: int, params: list, api_ver: int = 2, timeout: int = 3
):
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
log_string = f'patch:\n{endpoint} {params}'
log_string = f"patch:\n{endpoint} {params}"
logging.info(log_string) if master_debug else logging.debug(log_string)
retries = 0
@ -116,60 +142,80 @@ async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2
resp = requests.patch(req_url, headers=AUTH_HEADER, timeout=timeout)
break
except requests.Timeout as e:
logging.error(f'Patch Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
logging.error(
f"Patch Timeout: {req_url} / retries: {retries} / timeout: {timeout}"
)
if retries > 1:
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
f'hang on a few extra seconds and try again.')
raise ConnectionError(
f"DB: The internet was a bit too slow for me to grab the data I needed. Please "
f"hang on a few extra seconds and try again."
)
timeout += [min(3, timeout), min(5, timeout)][retries]
retries += 1
if resp.status_code == 200:
data = resp.json()
log_string = f'{data}'
log_string = f"{data}"
if master_debug:
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
logging.info(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}'
)
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
logging.debug(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}'
)
return data
else:
logging.warning(resp.text)
raise ValueError(f'DB: {resp.text}')
raise ValueError(f"DB: {resp.text}")
async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
async def db_post(
endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3
):
req_url = get_req_url(endpoint, api_ver=api_ver)
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
log_string = f"post:\n{endpoint} payload: {payload}\ntype: {type(payload)}"
logging.info(log_string) if master_debug else logging.debug(log_string)
retries = 0
while True:
try:
resp = requests.post(req_url, json=payload, headers=AUTH_HEADER, timeout=timeout)
resp = requests.post(
req_url, json=payload, headers=AUTH_HEADER, timeout=timeout
)
break
except requests.Timeout as e:
logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
logging.error(
f"Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}"
)
if retries > 1:
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
f'hang on a few extra seconds and try again.')
raise ConnectionError(
f"DB: The internet was a bit too slow for me to grab the data I needed. Please "
f"hang on a few extra seconds and try again."
)
timeout += [min(3, timeout), min(5, timeout)][retries]
retries += 1
if resp.status_code == 200:
data = resp.json()
log_string = f'{data}'
log_string = f"{data}"
if master_debug:
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
logging.info(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}'
)
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
logging.debug(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}'
)
return data
else:
logging.warning(resp.text)
raise ValueError(f'DB: {resp.text}')
raise ValueError(f"DB: {resp.text}")
async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id)
log_string = f'delete:\n{endpoint} {object_id}'
log_string = f"delete:\n{endpoint} {object_id}"
logging.info(log_string) if master_debug else logging.debug(log_string)
retries = 0
@ -178,21 +224,29 @@ async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
resp = requests.delete(req_url, headers=AUTH_HEADER, timeout=timeout)
break
except requests.ReadTimeout as e:
logging.error(f'Delete Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
logging.error(
f"Delete Timeout: {req_url} / retries: {retries} / timeout: {timeout}"
)
if retries > 1:
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
f'hang on a few extra seconds and try again.')
raise ConnectionError(
f"DB: The internet was a bit too slow for me to grab the data I needed. Please "
f"hang on a few extra seconds and try again."
)
timeout += [min(3, timeout), min(5, timeout)][retries]
retries += 1
if resp.status_code == 200:
data = resp.json()
log_string = f'{data}'
log_string = f"{data}"
if master_debug:
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
logging.info(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}'
)
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
logging.debug(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}'
)
return True
else:
logging.warning(resp.text)
raise ValueError(f'DB: {resp.text}')
raise ValueError(f"DB: {resp.text}")

View File

@ -6,15 +6,42 @@ from fastapi.openapi.utils import get_openapi
# from fastapi.templating import Jinja2Templates
from .routers_v2 import (
current, awards, teams, rarity, cardsets, players, packtypes, packs, cards, events, results, rewards, decisions,
batstats, pitstats, notifications, paperdex, gamerewards, gauntletrewards, gauntletruns, battingcards,
battingcardratings, pitchingcards, pitchingcardratings, cardpositions, scouting, mlbplayers, stratgame, stratplays)
current,
awards,
teams,
rarity,
cardsets,
players,
packtypes,
packs,
cards,
events,
results,
rewards,
decisions,
batstats,
pitstats,
notifications,
paperdex,
gamerewards,
gauntletrewards,
gauntletruns,
battingcards,
battingcardratings,
pitchingcards,
pitchingcardratings,
cardpositions,
scouting,
mlbplayers,
stratgame,
stratplays,
)
app = FastAPI(
# root_path='/api',
responses={404: {'description': 'Not found'}},
docs_url='/api/docs',
redoc_url='/api/redoc'
responses={404: {"description": "Not found"}},
docs_url="/api/docs",
redoc_url="/api/redoc",
)
# app.mount("/static", StaticFiles(directory="storage/static"), name="static")
@ -53,10 +80,11 @@ app.include_router(decisions.router)
@app.get("/api/docs", include_in_schema=False)
async def get_docs(req: Request):
print(req.scope)
return get_swagger_ui_html(openapi_url=req.scope.get('root_path')+'/openapi.json', title='Swagger')
return get_swagger_ui_html(
openapi_url=req.scope.get("root_path") + "/openapi.json", title="Swagger"
)
@app.get("/api/openapi.json", include_in_schema=False)
async def openapi():
return get_openapi(title='Paper Dynasty API', version=f'0.1.1', routes=app.routes)
return get_openapi(title="Paper Dynasty API", version=f"0.1.1", routes=app.routes)

View File

@ -141,9 +141,8 @@ async def v1_cards_get_one(card_id, csv: Optional[bool] = False):
if csv:
data_list = [
['id', 'player', 'team', 'pack', 'value', 'roster1', 'roster2', 'roster3'],
[this_card.id, this_card.player, this_card.team.abbrev, this_card.pack, this_card.value,
this_card.roster1.name, this_card.roster2.name, this_card.roster3.name]
['id', 'player', 'team', 'pack', 'value'],
[this_card.id, this_card.player, this_card.team.abbrev, this_card.pack, this_card.value]
]
return_val = DataFrame(data_list).to_csv(header=False, index=False)

View File

@ -150,7 +150,10 @@ async def get_teams(
all_teams = all_teams.where(Team.has_guide == True)
if is_ai is not None:
all_teams = all_teams.where(Team.is_ai)
if not is_ai:
all_teams = all_teams.where(Team.is_ai == False)
else:
all_teams = all_teams.where(Team.is_ai == True)
if event_id is not None:
all_teams = all_teams.where(Team.event_id == event_id)