paper-dynasty-discord/api_calls.py
Cal Corum 4be6afb541 Add API timeout/retry logic and fix get_team_by_owner for PostgreSQL
- Add APITimeoutError exception and retry logic to db_get
- Add timeout handling to db_post, db_put, db_patch, db_delete
- Fix get_team_by_owner to prefer non-gauntlet team (PostgreSQL migration fix)
- Code formatting cleanup (black)
2026-01-31 15:52:14 -06:00

304 lines
10 KiB
Python

import asyncio
import datetime
from dataclasses import dataclass
from typing import Optional
import logging
import aiohttp
from aiohttp import ClientTimeout
import os
from exceptions import DatabaseError, APITimeoutError
AUTH_TOKEN = {"Authorization": f"Bearer {os.environ.get('API_TOKEN')}"}
ENV_DATABASE = os.getenv("DATABASE", "dev").lower()
DB_URL = (
"https://pd.manticorum.com/api"
if "prod" in ENV_DATABASE
else "https://pddev.manticorum.com/api"
)
master_debug = True
PLAYER_CACHE = {}
logger = logging.getLogger("discord_app")
def param_char(other_params):
if other_params:
return "&"
else:
return "?"
def get_req_url(
endpoint: str,
api_ver: int = 2,
object_id: Optional[int] = None,
params: Optional[list] = None,
):
req_url = f"{DB_URL}/v{api_ver}/{endpoint}{'/' if object_id is not None else ''}{object_id if object_id is not None else ''}"
if params:
other_params = False
for x in params:
req_url += f"{param_char(other_params)}{x[0]}={x[1]}"
other_params = True
return req_url
def log_return_value(log_string: str):
start = 0
end = 3000
while end < 300000:
line = log_string[start:end]
if len(line) == 0:
return
logger.info(f"{'\n\nreturn: ' if start == 0 else ''}{log_string[start:end]}")
start += 3000
end += 3000
logger.warning("[ S N I P P E D ]")
# if master_debug:
# logger.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n')
# else:
# logger.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n')
async def db_get(
endpoint: str,
api_ver: int = 2,
object_id: Optional[int] = None,
params: Optional[list] = None,
none_okay: bool = True,
timeout: int = 5,
retries: int = 3,
):
"""
GET request to the API with timeout and retry logic.
Args:
endpoint: API endpoint path
api_ver: API version (default 2)
object_id: Optional object ID to append to URL
params: Optional list of (key, value) tuples for query params
none_okay: If True, return None on non-200 response; if False, raise DatabaseError
timeout: Request timeout in seconds (default 5)
retries: Number of retry attempts on timeout (default 3)
Returns:
JSON response or None if none_okay and request failed
Raises:
APITimeoutError: If all retry attempts fail due to timeout
DatabaseError: If response is non-200 and none_okay is False
"""
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
log_string = f"db_get - get: {endpoint} id: {object_id} params: {params}"
logger.info(log_string) if master_debug else logger.debug(log_string)
for attempt in range(retries):
try:
client_timeout = ClientTimeout(total=timeout)
async with aiohttp.ClientSession(
headers=AUTH_TOKEN, timeout=client_timeout
) as session:
async with session.get(req_url) as r:
if r.status == 200:
js = await r.json()
log_return_value(f"{js}")
return js
elif none_okay:
e = await r.text()
logger.error(e)
return None
else:
e = await r.text()
logger.error(e)
raise DatabaseError(e)
except asyncio.TimeoutError:
if attempt < retries - 1:
wait_time = 2**attempt # 1s, 2s, 4s
logger.warning(
f"Timeout on GET {endpoint}, retry {attempt + 1}/{retries} in {wait_time}s"
)
await asyncio.sleep(wait_time)
else:
logger.error(
f"Connection timeout to host {req_url} after {retries} attempts"
)
raise APITimeoutError(f"Connection timeout to host {req_url}")
async def db_patch(
endpoint: str, object_id: int, params: list, api_ver: int = 2, timeout: int = 5
):
"""
PATCH request to the API with timeout (no retry - not safe for mutations).
Args:
endpoint: API endpoint path
object_id: Object ID to patch
params: List of (key, value) tuples for query params
api_ver: API version (default 2)
timeout: Request timeout in seconds (default 5)
Raises:
APITimeoutError: If request times out
DatabaseError: If response is non-200
"""
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
log_string = f"db_patch - patch: {endpoint} {params}"
logger.info(log_string) if master_debug else logger.debug(log_string)
try:
client_timeout = ClientTimeout(total=timeout)
async with aiohttp.ClientSession(
headers=AUTH_TOKEN, timeout=client_timeout
) as session:
async with session.patch(req_url) as r:
if r.status == 200:
js = await r.json()
log_return_value(f"{js}")
return js
else:
e = await r.text()
logger.error(e)
raise DatabaseError(e)
except asyncio.TimeoutError:
logger.error(f"Connection timeout to host {req_url}")
raise APITimeoutError(f"Connection timeout to host {req_url}")
async def db_post(
endpoint: str, api_ver: int = 2, payload: Optional[dict] = None, timeout: int = 5
):
"""
POST request to the API with timeout (no retry - not safe for mutations).
Args:
endpoint: API endpoint path
api_ver: API version (default 2)
payload: Optional JSON payload
timeout: Request timeout in seconds (default 5)
Raises:
APITimeoutError: If request times out
DatabaseError: If response is non-200
"""
req_url = get_req_url(endpoint, api_ver=api_ver)
log_string = f"db_post - post: {endpoint} payload: {payload}\ntype: {type(payload)}"
logger.info(log_string) if master_debug else logger.debug(log_string)
try:
client_timeout = ClientTimeout(total=timeout)
async with aiohttp.ClientSession(
headers=AUTH_TOKEN, timeout=client_timeout
) as session:
async with session.post(req_url, json=payload) as r:
if r.status == 200:
js = await r.json()
log_return_value(f"{js}")
return js
else:
e = await r.text()
logger.error(e)
raise DatabaseError(e)
except asyncio.TimeoutError:
logger.error(f"Connection timeout to host {req_url}")
raise APITimeoutError(f"Connection timeout to host {req_url}")
async def db_put(
endpoint: str, api_ver: int = 2, payload: Optional[dict] = None, timeout: int = 5
):
"""
PUT request to the API with timeout (no retry - not safe for mutations).
Args:
endpoint: API endpoint path
api_ver: API version (default 2)
payload: Optional JSON payload
timeout: Request timeout in seconds (default 5)
Raises:
APITimeoutError: If request times out
DatabaseError: If response is non-200
"""
req_url = get_req_url(endpoint, api_ver=api_ver)
log_string = f"db_put - put: {endpoint} payload: {payload}\ntype: {type(payload)}"
logger.info(log_string) if master_debug else logger.debug(log_string)
try:
client_timeout = ClientTimeout(total=timeout)
async with aiohttp.ClientSession(
headers=AUTH_TOKEN, timeout=client_timeout
) as session:
async with session.put(req_url, json=payload) as r:
if r.status == 200:
js = await r.json()
log_return_value(f"{js}")
return js
else:
e = await r.text()
logger.error(e)
raise DatabaseError(e)
except asyncio.TimeoutError:
logger.error(f"Connection timeout to host {req_url}")
raise APITimeoutError(f"Connection timeout to host {req_url}")
async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout: int = 5):
"""
DELETE request to the API with timeout (no retry - not safe for mutations).
Args:
endpoint: API endpoint path
object_id: Object ID to delete
api_ver: API version (default 2)
timeout: Request timeout in seconds (default 5)
Raises:
APITimeoutError: If request times out
DatabaseError: If response is non-200
"""
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id)
log_string = f"db_delete - delete: {endpoint} {object_id}"
logger.info(log_string) if master_debug else logger.debug(log_string)
try:
client_timeout = ClientTimeout(total=timeout)
async with aiohttp.ClientSession(
headers=AUTH_TOKEN, timeout=client_timeout
) as session:
async with session.delete(req_url) as r:
if r.status == 200:
js = await r.json()
log_return_value(f"{js}")
return js
else:
e = await r.text()
logger.error(e)
raise DatabaseError(e)
except asyncio.TimeoutError:
logger.error(f"Connection timeout to host {req_url}")
raise APITimeoutError(f"Connection timeout to host {req_url}")
async def get_team_by_abbrev(abbrev: str):
all_teams = await db_get("teams", params=[("abbrev", abbrev)])
if not all_teams or not all_teams["count"]:
return None
return all_teams["teams"][0]
async def post_to_dex(player, team):
return await db_post(
"paperdex", payload={"team_id": team["id"], "player_id": player["id"]}
)
def team_hash(team):
hash_string = f"{team['sname'][-1]}{team['gmid'] / 6950123:.0f}{team['sname'][-2]}{team['gmid'] / 42069123:.0f}"
return hash_string