Compare commits

..

10 Commits

Author SHA1 Message Date
cal
d809590f0e Merge pull request 'fix: correct column references in season pitching stats SQL' (#105) from fix/pitching-stats-column-name into main
All checks were successful
Build Docker Image / build (push) Successful in 2m11s
2026-04-02 16:57:30 +00:00
cal
0d8e666a75 Merge pull request 'fix: let HTTPException pass through @handle_db_errors' (#104) from fix/handle-db-errors-passthrough-http into main
Some checks failed
Build Docker Image / build (push) Has been cancelled
2026-04-02 16:57:12 +00:00
Cal Corum
bd19b7d913 fix: correct column references in season pitching stats view
All checks were successful
Build Docker Image / build (pull_request) Successful in 2m4s
sp.on_first/on_second/on_third don't exist — the actual columns are
on_first_id/on_second_id/on_third_id. This caused failures when
updating season pitching stats after games.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-02 11:54:56 -05:00
Cal Corum
c49f91cc19 test: update test_get_nonexistent_play to expect 404 after HTTPException fix
All checks were successful
Build Docker Image / build (pull_request) Successful in 1m3s
After handle_db_errors no longer catches HTTPException, GET /plays/999999999
correctly returns 404 instead of 500. Update the assertion and docstring
to reflect the fixed behavior.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-02 09:30:39 -05:00
Cal Corum
215085b326 fix: let HTTPException pass through @handle_db_errors unchanged
All checks were successful
Build Docker Image / build (pull_request) Successful in 2m34s
The decorator was catching all exceptions including intentional
HTTPException (401, 404, etc.) and re-wrapping them as 500 "Database
error". This masked auth failures and other deliberate HTTP errors.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-02 08:30:22 -05:00
cal
c063f5c4ef Merge pull request 'hotfix: remove output caps from GET /players' (#103) from hotfix/remove-players-output-caps into main
All checks were successful
Build Docker Image / build (push) Successful in 1m3s
2026-04-02 01:19:51 +00:00
Cal Corum
d92f571960 hotfix: remove output caps from GET /players endpoint
All checks were successful
Build Docker Image / build (pull_request) Successful in 2m29s
The MAX_LIMIT/DEFAULT_LIMIT caps added in 16f3f8d are too restrictive
for the /players endpoint — bot and website consumers need full player
lists without pagination. Reverts limit param to Optional[int] with no
ceiling while keeping caps on all other endpoints.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 20:14:35 -05:00
cal
81baa54681 Merge pull request 'Fix unbounded API queries causing worker timeouts' (#99) from bugfix/limit-caps into main
All checks were successful
Build Docker Image / build (push) Successful in 1m9s
Reviewed-on: #99
2026-04-01 22:44:38 +00:00
Cal Corum
67e87a893a Fix fieldingstats count computed after limit applied
All checks were successful
Build Docker Image / build (pull_request) Successful in 2m9s
Capture total_count before .limit() so the response count reflects
all matching rows, not just the capped page size. Resolves #100.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 17:40:02 -05:00
Cal Corum
16f3f8d8de Fix unbounded API queries causing Gunicorn worker timeouts
All checks were successful
Build Docker Image / build (pull_request) Successful in 2m32s
Add MAX_LIMIT=500 cap across all list endpoints, empty string
stripping middleware, and limit/offset to /transactions. Resolves #98.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 17:23:25 -05:00
33 changed files with 723 additions and 189 deletions

View File

@ -57,6 +57,9 @@ priv_help = (
)
PRIVATE_IN_SCHEMA = True if priv_help == "TRUE" else False
MAX_LIMIT = 500
DEFAULT_LIMIT = 200
def valid_token(token):
return token == os.environ.get("API_TOKEN")
@ -376,14 +379,14 @@ def update_season_pitching_stats(player_ids, season, db_connection):
-- RBI allowed (excluding HR) per runner opportunity
CASE
WHEN (SUM(CASE WHEN sp.on_first IS NOT NULL THEN 1 ELSE 0 END) +
SUM(CASE WHEN sp.on_second IS NOT NULL THEN 1 ELSE 0 END) +
SUM(CASE WHEN sp.on_third IS NOT NULL THEN 1 ELSE 0 END)) > 0
WHEN (SUM(CASE WHEN sp.on_first_id IS NOT NULL THEN 1 ELSE 0 END) +
SUM(CASE WHEN sp.on_second_id IS NOT NULL THEN 1 ELSE 0 END) +
SUM(CASE WHEN sp.on_third_id IS NOT NULL THEN 1 ELSE 0 END)) > 0
THEN ROUND(
(SUM(sp.rbi) - SUM(sp.homerun))::DECIMAL /
(SUM(CASE WHEN sp.on_first IS NOT NULL THEN 1 ELSE 0 END) +
SUM(CASE WHEN sp.on_second IS NOT NULL THEN 1 ELSE 0 END) +
SUM(CASE WHEN sp.on_third IS NOT NULL THEN 1 ELSE 0 END)),
(SUM(CASE WHEN sp.on_first_id IS NOT NULL THEN 1 ELSE 0 END) +
SUM(CASE WHEN sp.on_second_id IS NOT NULL THEN 1 ELSE 0 END) +
SUM(CASE WHEN sp.on_third_id IS NOT NULL THEN 1 ELSE 0 END)),
3
)
ELSE 0.000
@ -804,6 +807,10 @@ def handle_db_errors(func):
return result
except HTTPException:
# Let intentional HTTP errors (401, 404, etc.) pass through unchanged
raise
except Exception as e:
elapsed_time = time.time() - start_time
error_trace = traceback.format_exc()

View File

@ -2,6 +2,7 @@ import datetime
import logging
from logging.handlers import RotatingFileHandler
import os
from urllib.parse import parse_qsl, urlencode
from fastapi import Depends, FastAPI, Request
from fastapi.openapi.docs import get_swagger_ui_html
@ -71,16 +72,16 @@ logger.info(f"Starting up now...")
@app.middleware("http")
async def db_connection_middleware(request: Request, call_next):
from .db_engine import db
db.connect(reuse_if_open=True)
try:
response = await call_next(request)
return response
finally:
if not db.is_closed():
db.close()
async def strip_empty_query_params(request: Request, call_next):
qs = request.scope.get("query_string", b"")
if qs:
pairs = parse_qsl(qs.decode(), keep_blank_values=True)
filtered = [(k, v) for k, v in pairs if v != ""]
new_qs = urlencode(filtered).encode()
request.scope["query_string"] = new_qs
if hasattr(request, "_query_params"):
del request._query_params
return await call_next(request)
app.include_router(current.router)

View File

@ -9,6 +9,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -43,6 +45,8 @@ async def get_awards(
team_id: list = Query(default=None),
short_output: Optional[bool] = False,
player_name: list = Query(default=None),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
all_awards = Award.select()
@ -67,10 +71,14 @@ async def get_awards(
all_players = Player.select().where(fn.Lower(Player.name) << pname_list)
all_awards = all_awards.where(Award.player << all_players)
total_count = all_awards.count()
all_awards = all_awards.offset(offset).limit(limit)
return_awards = {
"count": all_awards.count(),
"count": total_count,
"awards": [model_to_dict(x, recurse=not short_output) for x in all_awards],
}
db.close()
return return_awards
@ -79,8 +87,10 @@ async def get_awards(
async def get_one_award(award_id: int, short_output: Optional[bool] = False):
this_award = Award.get_or_none(Award.id == award_id)
if this_award is None:
db.close()
raise HTTPException(status_code=404, detail=f"Award ID {award_id} not found")
db.close()
return model_to_dict(this_award, recurse=not short_output)
@ -104,6 +114,7 @@ async def patch_award(
this_award = Award.get_or_none(Award.id == award_id)
if this_award is None:
db.close()
raise HTTPException(status_code=404, detail=f"Award ID {award_id} not found")
if name is not None:
@ -125,8 +136,10 @@ async def patch_award(
if this_award.save() == 1:
r_award = model_to_dict(this_award)
db.close()
return r_award
else:
db.close()
raise HTTPException(status_code=500, detail=f"Unable to patch award {award_id}")
@ -170,6 +183,7 @@ async def post_award(award_list: AwardList, token: str = Depends(oauth2_scheme))
with db.atomic():
for batch in chunked(new_awards, 15):
Award.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_awards)} awards"
@ -183,9 +197,11 @@ async def delete_award(award_id: int, token: str = Depends(oauth2_scheme)):
this_award = Award.get_or_none(Award.id == award_id)
if this_award is None:
db.close()
raise HTTPException(status_code=404, detail=f"Award ID {award_id} not found")
count = this_award.delete_instance()
db.close()
if count == 1:
return f"Award {award_id} has been deleted"

View File

@ -19,6 +19,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -84,21 +86,24 @@ async def get_batstats(
week_end: Optional[int] = None,
game_num: list = Query(default=None),
position: list = Query(default=None),
limit: Optional[int] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
sort: Optional[str] = None,
short_output: Optional[bool] = True,
):
if "post" in s_type.lower():
all_stats = BattingStat.post_season(season)
if all_stats.count() == 0:
db.close()
return {"count": 0, "stats": []}
elif s_type.lower() in ["combined", "total", "all"]:
all_stats = BattingStat.combined_season(season)
if all_stats.count() == 0:
db.close()
return {"count": 0, "stats": []}
else:
all_stats = BattingStat.regular_season(season)
if all_stats.count() == 0:
db.close()
return {"count": 0, "stats": []}
if position is not None:
@ -124,14 +129,14 @@ async def get_batstats(
if week_end is not None:
end = min(week_end, end)
if start > end:
db.close()
raise HTTPException(
status_code=404,
detail=f"Start week {start} is after end week {end} - cannot pull stats",
)
all_stats = all_stats.where((BattingStat.week >= start) & (BattingStat.week <= end))
if limit:
all_stats = all_stats.limit(limit)
all_stats = all_stats.limit(limit)
if sort:
if sort == "newest":
all_stats = all_stats.order_by(-BattingStat.week, -BattingStat.game)
@ -142,6 +147,7 @@ async def get_batstats(
# 'stats': [{'id': x.id} for x in all_stats]
}
db.close()
return return_stats
@ -163,6 +169,8 @@ async def get_totalstats(
short_output: Optional[bool] = False,
min_pa: Optional[int] = 1,
week: list = Query(default=None),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
if sum(1 for x in [s_type, (week_start or week_end), week] if x is not None) > 1:
raise HTTPException(
@ -296,7 +304,10 @@ async def get_totalstats(
all_players = Player.select().where(Player.id << player_id)
all_stats = all_stats.where(BattingStat.player << all_players)
return_stats = {"count": all_stats.count(), "stats": []}
total_count = all_stats.count()
all_stats = all_stats.offset(offset).limit(limit)
return_stats = {"count": total_count, "stats": []}
for x in all_stats:
# Handle player field based on grouping with safe access
@ -339,6 +350,7 @@ async def get_totalstats(
"bplo": x.sum_bplo,
}
)
db.close()
return return_stats
@ -362,6 +374,7 @@ async def patch_batstats(
BattingStat.update(**new_stats.dict()).where(BattingStat.id == stat_id).execute()
r_stat = model_to_dict(BattingStat.get_by_id(stat_id))
db.close()
return r_stat
@ -405,4 +418,5 @@ async def post_batstats(s_list: BatStatList, token: str = Depends(oauth2_scheme)
# Update career stats
db.close()
return f"Added {len(all_stats)} batting lines"

View File

@ -41,6 +41,7 @@ async def get_current(season: Optional[int] = None):
if current is not None:
r_curr = model_to_dict(current)
db.close()
return r_curr
else:
return None
@ -99,8 +100,10 @@ async def patch_current(
if current.save():
r_curr = model_to_dict(current)
db.close()
return r_curr
else:
db.close()
raise HTTPException(
status_code=500, detail=f"Unable to patch current {current_id}"
)
@ -117,8 +120,10 @@ async def post_current(new_current: CurrentModel, token: str = Depends(oauth2_sc
if this_current.save():
r_curr = model_to_dict(this_current)
db.close()
return r_curr
else:
db.close()
raise HTTPException(
status_code=500,
detail=f"Unable to post season {new_current.season} current",

View File

@ -363,6 +363,8 @@ async def get_custom_commands(
except Exception as e:
logger.error(f"Error getting custom commands: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
# Move this route to after the specific string routes
@ -427,6 +429,8 @@ async def create_custom_command_endpoint(
except Exception as e:
logger.error(f"Error creating custom command: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.put("/{command_id}", include_in_schema=PRIVATE_IN_SCHEMA)
@ -486,6 +490,8 @@ async def update_custom_command_endpoint(
except Exception as e:
logger.error(f"Error updating custom command {command_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.patch("/{command_id}", include_in_schema=PRIVATE_IN_SCHEMA)
@ -569,6 +575,8 @@ async def patch_custom_command(
except Exception as e:
logger.error(f"Error patching custom command {command_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.delete("/{command_id}", include_in_schema=PRIVATE_IN_SCHEMA)
@ -604,6 +612,8 @@ async def delete_custom_command_endpoint(
except Exception as e:
logger.error(f"Error deleting custom command {command_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
# Creator endpoints
@ -673,6 +683,8 @@ async def get_creators(
except Exception as e:
logger.error(f"Error getting creators: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.post("/creators", include_in_schema=PRIVATE_IN_SCHEMA)
@ -716,6 +728,8 @@ async def create_creator_endpoint(
except Exception as e:
logger.error(f"Error creating creator: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.get("/stats")
@ -840,6 +854,8 @@ async def get_custom_command_stats():
except Exception as e:
logger.error(f"Error getting custom command stats: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
# Special endpoints for Discord bot integration
@ -905,6 +921,8 @@ async def get_custom_command_by_name_endpoint(command_name: str):
except Exception as e:
logger.error(f"Error getting custom command by name '{command_name}': {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.patch("/by_name/{command_name}/execute", include_in_schema=PRIVATE_IN_SCHEMA)
@ -972,6 +990,8 @@ async def execute_custom_command(
except Exception as e:
logger.error(f"Error executing custom command '{command_name}': {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.get("/autocomplete")
@ -1007,6 +1027,8 @@ async def get_command_names_for_autocomplete(
except Exception as e:
logger.error(f"Error getting command names for autocomplete: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.get("/{command_id}")
@ -1055,3 +1077,5 @@ async def get_custom_command(command_id: int):
except Exception as e:
logger.error(f"Error getting custom command {command_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()

View File

@ -19,6 +19,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -73,7 +75,7 @@ async def get_decisions(
irunners_scored: list = Query(default=None),
game_id: list = Query(default=None),
player_id: list = Query(default=None),
limit: Optional[int] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
short_output: Optional[bool] = False,
):
all_dec = Decision.select().order_by(
@ -135,15 +137,13 @@ async def get_decisions(
if irunners_scored is not None:
all_dec = all_dec.where(Decision.irunners_scored << irunners_scored)
if limit is not None:
if limit < 1:
limit = 1
all_dec = all_dec.limit(limit)
all_dec = all_dec.limit(limit)
return_dec = {
"count": all_dec.count(),
"decisions": [model_to_dict(x, recurse=not short_output) for x in all_dec],
}
db.close()
return return_dec
@ -168,6 +168,7 @@ async def patch_decision(
this_dec = Decision.get_or_none(Decision.id == decision_id)
if this_dec is None:
db.close()
raise HTTPException(
status_code=404, detail=f"Decision ID {decision_id} not found"
)
@ -193,8 +194,10 @@ async def patch_decision(
if this_dec.save() == 1:
d_result = model_to_dict(this_dec)
db.close()
return d_result
else:
db.close()
raise HTTPException(
status_code=500, detail=f"Unable to patch decision {decision_id}"
)
@ -223,6 +226,7 @@ async def post_decisions(dec_list: DecisionList, token: str = Depends(oauth2_sch
with db.atomic():
for batch in chunked(new_dec, 10):
Decision.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_dec)} decisions"
@ -236,11 +240,13 @@ async def delete_decision(decision_id: int, token: str = Depends(oauth2_scheme))
this_dec = Decision.get_or_none(Decision.id == decision_id)
if this_dec is None:
db.close()
raise HTTPException(
status_code=404, detail=f"Decision ID {decision_id} not found"
)
count = this_dec.delete_instance()
db.close()
if count == 1:
return f"Decision {decision_id} has been deleted"
@ -259,9 +265,11 @@ async def delete_decisions_game(game_id: int, token: str = Depends(oauth2_scheme
this_game = StratGame.get_or_none(StratGame.id == game_id)
if not this_game:
db.close()
raise HTTPException(status_code=404, detail=f"Game ID {game_id} not found")
count = Decision.delete().where(Decision.game == this_game).execute()
db.close()
if count > 0:
return f"Deleted {count} decisions matching Game ID {game_id}"

View File

@ -9,6 +9,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -32,6 +34,8 @@ async def get_divisions(
div_abbrev: Optional[str] = None,
lg_name: Optional[str] = None,
lg_abbrev: Optional[str] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
all_divisions = Division.select().where(Division.season == season)
@ -44,10 +48,14 @@ async def get_divisions(
if lg_abbrev is not None:
all_divisions = all_divisions.where(Division.league_abbrev == lg_abbrev)
total_count = all_divisions.count()
all_divisions = all_divisions.offset(offset).limit(limit)
return_div = {
"count": all_divisions.count(),
"count": total_count,
"divisions": [model_to_dict(x) for x in all_divisions],
}
db.close()
return return_div
@ -56,11 +64,13 @@ async def get_divisions(
async def get_one_division(division_id: int):
this_div = Division.get_or_none(Division.id == division_id)
if this_div is None:
db.close()
raise HTTPException(
status_code=404, detail=f"Division ID {division_id} not found"
)
r_div = model_to_dict(this_div)
db.close()
return r_div
@ -80,6 +90,7 @@ async def patch_division(
this_div = Division.get_or_none(Division.id == division_id)
if this_div is None:
db.close()
raise HTTPException(
status_code=404, detail=f"Division ID {division_id} not found"
)
@ -95,8 +106,10 @@ async def patch_division(
if this_div.save() == 1:
r_division = model_to_dict(this_div)
db.close()
return r_division
else:
db.close()
raise HTTPException(
status_code=500, detail=f"Unable to patch division {division_id}"
)
@ -115,8 +128,10 @@ async def post_division(
if this_division.save() == 1:
r_division = model_to_dict(this_division)
db.close()
return r_division
else:
db.close()
raise HTTPException(status_code=500, detail=f"Unable to post division")
@ -129,11 +144,13 @@ async def delete_division(division_id: int, token: str = Depends(oauth2_scheme))
this_div = Division.get_or_none(Division.id == division_id)
if this_div is None:
db.close()
raise HTTPException(
status_code=404, detail=f"Division ID {division_id} not found"
)
count = this_div.delete_instance()
db.close()
if count == 1:
return f"Division {division_id} has been deleted"

View File

@ -32,6 +32,7 @@ async def get_draftdata():
if draft_data is not None:
r_data = model_to_dict(draft_data)
db.close()
return r_data
raise HTTPException(status_code=404, detail=f'No draft data found')
@ -49,6 +50,7 @@ async def patch_draftdata(
draft_data = DraftData.get_or_none(DraftData.id == data_id)
if draft_data is None:
db.close()
raise HTTPException(status_code=404, detail=f'No draft data found')
if currentpick is not None:
@ -66,6 +68,7 @@ async def patch_draftdata(
saved = draft_data.save()
r_data = model_to_dict(draft_data)
db.close()
if saved == 1:
return r_data

View File

@ -9,6 +9,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -34,6 +36,8 @@ async def get_draftlist(
season: Optional[int],
team_id: list = Query(default=None),
token: str = Depends(oauth2_scheme),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
if not valid_token(token):
logger.warning(f"get_draftlist - Bad Token: {token}")
@ -46,8 +50,12 @@ async def get_draftlist(
if team_id is not None:
all_list = all_list.where(DraftList.team_id << team_id)
r_list = {"count": all_list.count(), "picks": [model_to_dict(x) for x in all_list]}
total_count = all_list.count()
all_list = all_list.offset(offset).limit(limit)
r_list = {"count": total_count, "picks": [model_to_dict(x) for x in all_list]}
db.close()
return r_list
@ -68,6 +76,7 @@ async def get_team_draftlist(team_id: int, token: str = Depends(oauth2_scheme)):
"picks": [model_to_dict(x) for x in this_list],
}
db.close()
return r_list
@ -97,6 +106,7 @@ async def post_draftlist(
for batch in chunked(new_list, 15):
DraftList.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_list)} list values"
@ -108,4 +118,5 @@ async def delete_draftlist(team_id: int, token: str = Depends(oauth2_scheme)):
raise HTTPException(status_code=401, detail="Unauthorized")
count = DraftList.delete().where(DraftList.team_id == team_id).execute()
db.close()
return f"Deleted {count} list values"

View File

@ -9,6 +9,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -50,7 +52,7 @@ async def get_picks(
overall_end: Optional[int] = None,
short_output: Optional[bool] = False,
sort: Optional[str] = None,
limit: Optional[int] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
player_id: list = Query(default=None),
player_taken: Optional[bool] = None,
):
@ -110,8 +112,7 @@ async def get_picks(
all_picks = all_picks.where(DraftPick.overall <= overall_end)
if player_taken is not None:
all_picks = all_picks.where(DraftPick.player.is_null(not player_taken))
if limit is not None:
all_picks = all_picks.limit(limit)
all_picks = all_picks.limit(limit)
if sort is not None:
if sort == "order-asc":
@ -123,6 +124,7 @@ async def get_picks(
for line in all_picks:
return_picks["picks"].append(model_to_dict(line, recurse=not short_output))
db.close()
return return_picks
@ -134,6 +136,7 @@ async def get_one_pick(pick_id: int, short_output: Optional[bool] = False):
r_pick = model_to_dict(this_pick, recurse=not short_output)
else:
raise HTTPException(status_code=404, detail=f"Pick ID {pick_id} not found")
db.close()
return r_pick
@ -151,6 +154,7 @@ async def patch_pick(
DraftPick.update(**new_pick.dict()).where(DraftPick.id == pick_id).execute()
r_pick = model_to_dict(DraftPick.get_by_id(pick_id))
db.close()
return r_pick
@ -167,6 +171,7 @@ async def post_picks(p_list: DraftPickList, token: str = Depends(oauth2_scheme))
DraftPick.season == pick.season, DraftPick.overall == pick.overall
)
if dupe:
db.close()
raise HTTPException(
status_code=500,
detail=f"Pick # {pick.overall} already exists for season {pick.season}",
@ -177,6 +182,7 @@ async def post_picks(p_list: DraftPickList, token: str = Depends(oauth2_scheme))
with db.atomic():
for batch in chunked(new_picks, 15):
DraftPick.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_picks)} picks"
@ -193,6 +199,7 @@ async def delete_pick(pick_id: int, token: str = Depends(oauth2_scheme)):
raise HTTPException(status_code=404, detail=f"Pick ID {pick_id} not found")
count = this_pick.delete_instance()
db.close()
if count == 1:
return f"Draft pick {pick_id} has been deleted"

View File

@ -3,37 +3,61 @@ from typing import List, Optional, Literal
import logging
import pydantic
from ..db_engine import db, BattingStat, Team, Player, Current, model_to_dict, chunked, fn, per_season_weeks
from ..dependencies import oauth2_scheme, valid_token, handle_db_errors
logger = logging.getLogger('discord_app')
router = APIRouter(
prefix='/api/v3/fieldingstats',
tags=['fieldingstats']
from ..db_engine import (
db,
BattingStat,
Team,
Player,
Current,
model_to_dict,
chunked,
fn,
per_season_weeks,
)
from ..dependencies import (
oauth2_scheme,
valid_token,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@router.get('')
router = APIRouter(prefix="/api/v3/fieldingstats", tags=["fieldingstats"])
@router.get("")
@handle_db_errors
async def get_fieldingstats(
season: int, s_type: Optional[str] = 'regular', team_abbrev: list = Query(default=None),
player_name: list = Query(default=None), player_id: list = Query(default=None),
week_start: Optional[int] = None, week_end: Optional[int] = None, game_num: list = Query(default=None),
position: list = Query(default=None), limit: Optional[int] = None, sort: Optional[str] = None,
short_output: Optional[bool] = True):
if 'post' in s_type.lower():
season: int,
s_type: Optional[str] = "regular",
team_abbrev: list = Query(default=None),
player_name: list = Query(default=None),
player_id: list = Query(default=None),
week_start: Optional[int] = None,
week_end: Optional[int] = None,
game_num: list = Query(default=None),
position: list = Query(default=None),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
sort: Optional[str] = None,
short_output: Optional[bool] = True,
):
if "post" in s_type.lower():
all_stats = BattingStat.post_season(season)
if all_stats.count() == 0:
return {'count': 0, 'stats': []}
elif s_type.lower() in ['combined', 'total', 'all']:
db.close()
return {"count": 0, "stats": []}
elif s_type.lower() in ["combined", "total", "all"]:
all_stats = BattingStat.combined_season(season)
if all_stats.count() == 0:
return {'count': 0, 'stats': []}
db.close()
return {"count": 0, "stats": []}
else:
all_stats = BattingStat.regular_season(season)
if all_stats.count() == 0:
return {'count': 0, 'stats': []}
db.close()
return {"count": 0, "stats": []}
all_stats = all_stats.where(
(BattingStat.xch > 0) | (BattingStat.pb > 0) | (BattingStat.sbc > 0)
@ -48,7 +72,9 @@ async def get_fieldingstats(
if player_id:
all_stats = all_stats.where(BattingStat.player_id << player_id)
else:
p_query = Player.select_season(season).where(fn.Lower(Player.name) << [x.lower() for x in player_name])
p_query = Player.select_season(season).where(
fn.Lower(Player.name) << [x.lower() for x in player_name]
)
all_stats = all_stats.where(BattingStat.player << p_query)
if game_num:
all_stats = all_stats.where(BattingStat.game == game_num)
@ -60,73 +86,95 @@ async def get_fieldingstats(
if week_end is not None:
end = min(week_end, end)
if start > end:
db.close()
raise HTTPException(
status_code=404,
detail=f'Start week {start} is after end week {end} - cannot pull stats'
detail=f"Start week {start} is after end week {end} - cannot pull stats",
)
all_stats = all_stats.where(
(BattingStat.week >= start) & (BattingStat.week <= end)
)
all_stats = all_stats.where((BattingStat.week >= start) & (BattingStat.week <= end))
if limit:
all_stats = all_stats.limit(limit)
total_count = all_stats.count()
all_stats = all_stats.limit(limit)
if sort:
if sort == 'newest':
if sort == "newest":
all_stats = all_stats.order_by(-BattingStat.week, -BattingStat.game)
return_stats = {
'count': all_stats.count(),
'stats': [{
'player': x.player_id if short_output else model_to_dict(x.player, recurse=False),
'team': x.team_id if short_output else model_to_dict(x.team, recurse=False),
'pos': x.pos,
'xch': x.xch,
'xhit': x.xhit,
'error': x.error,
'pb': x.pb,
'sbc': x.sbc,
'csc': x.csc,
'week': x.week,
'game': x.game,
'season': x.season
} for x in all_stats]
"count": total_count,
"stats": [
{
"player": x.player_id
if short_output
else model_to_dict(x.player, recurse=False),
"team": x.team_id
if short_output
else model_to_dict(x.team, recurse=False),
"pos": x.pos,
"xch": x.xch,
"xhit": x.xhit,
"error": x.error,
"pb": x.pb,
"sbc": x.sbc,
"csc": x.csc,
"week": x.week,
"game": x.game,
"season": x.season,
}
for x in all_stats
],
}
db.close()
return return_stats
@router.get('/totals')
@router.get("/totals")
@handle_db_errors
async def get_totalstats(
season: int, s_type: Literal['regular', 'post', 'total', None] = None, team_abbrev: list = Query(default=None),
team_id: list = Query(default=None), player_name: list = Query(default=None),
week_start: Optional[int] = None, week_end: Optional[int] = None, game_num: list = Query(default=None),
position: list = Query(default=None), sort: Optional[str] = None, player_id: list = Query(default=None),
group_by: Literal['team', 'player', 'playerteam'] = 'player', short_output: Optional[bool] = False,
min_ch: Optional[int] = 1, week: list = Query(default=None)):
season: int,
s_type: Literal["regular", "post", "total", None] = None,
team_abbrev: list = Query(default=None),
team_id: list = Query(default=None),
player_name: list = Query(default=None),
week_start: Optional[int] = None,
week_end: Optional[int] = None,
game_num: list = Query(default=None),
position: list = Query(default=None),
sort: Optional[str] = None,
player_id: list = Query(default=None),
group_by: Literal["team", "player", "playerteam"] = "player",
short_output: Optional[bool] = False,
min_ch: Optional[int] = 1,
week: list = Query(default=None),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
# Build SELECT fields conditionally based on group_by to match GROUP BY exactly
select_fields = []
if group_by == 'player':
if group_by == "player":
select_fields = [BattingStat.player, BattingStat.pos]
elif group_by == 'team':
elif group_by == "team":
select_fields = [BattingStat.team, BattingStat.pos]
elif group_by == 'playerteam':
elif group_by == "playerteam":
select_fields = [BattingStat.player, BattingStat.team, BattingStat.pos]
else:
# Default case
select_fields = [BattingStat.player, BattingStat.pos]
all_stats = (
BattingStat
.select(*select_fields,
fn.SUM(BattingStat.xch).alias('sum_xch'),
fn.SUM(BattingStat.xhit).alias('sum_xhit'), fn.SUM(BattingStat.error).alias('sum_error'),
fn.SUM(BattingStat.pb).alias('sum_pb'), fn.SUM(BattingStat.sbc).alias('sum_sbc'),
fn.SUM(BattingStat.csc).alias('sum_csc'))
.where(BattingStat.season == season)
.having(fn.SUM(BattingStat.xch) >= min_ch)
BattingStat.select(
*select_fields,
fn.SUM(BattingStat.xch).alias("sum_xch"),
fn.SUM(BattingStat.xhit).alias("sum_xhit"),
fn.SUM(BattingStat.error).alias("sum_error"),
fn.SUM(BattingStat.pb).alias("sum_pb"),
fn.SUM(BattingStat.sbc).alias("sum_sbc"),
fn.SUM(BattingStat.csc).alias("sum_csc"),
)
.where(BattingStat.season == season)
.having(fn.SUM(BattingStat.xch) >= min_ch)
)
if True in [s_type is not None, week_start is not None, week_end is not None]:
@ -136,16 +184,20 @@ async def get_totalstats(
elif week_start is not None or week_end is not None:
if week_start is None or week_end is None:
raise HTTPException(
status_code=400, detail='Both week_start and week_end must be included if either is used.'
status_code=400,
detail="Both week_start and week_end must be included if either is used.",
)
weeks["start"] = week_start
if week_end < weeks["start"]:
raise HTTPException(
status_code=400,
detail="week_end must be greater than or equal to week_start",
)
weeks['start'] = week_start
if week_end < weeks['start']:
raise HTTPException(status_code=400, detail='week_end must be greater than or equal to week_start')
else:
weeks['end'] = week_end
weeks["end"] = week_end
all_stats = all_stats.where(
(BattingStat.week >= weeks['start']) & (BattingStat.week <= weeks['end'])
(BattingStat.week >= weeks["start"]) & (BattingStat.week <= weeks["end"])
)
elif week is not None:
@ -156,14 +208,20 @@ async def get_totalstats(
if position is not None:
p_list = [x.upper() for x in position]
all_players = Player.select().where(
(Player.pos_1 << p_list) | (Player.pos_2 << p_list) | (Player.pos_3 << p_list) | (Player.pos_4 << p_list) |
(Player.pos_5 << p_list) | (Player.pos_6 << p_list) | (Player.pos_7 << p_list) | (Player.pos_8 << p_list)
(Player.pos_1 << p_list)
| (Player.pos_2 << p_list)
| (Player.pos_3 << p_list)
| (Player.pos_4 << p_list)
| (Player.pos_5 << p_list)
| (Player.pos_6 << p_list)
| (Player.pos_7 << p_list)
| (Player.pos_8 << p_list)
)
all_stats = all_stats.where(BattingStat.player << all_players)
if sort is not None:
if sort == 'player':
if sort == "player":
all_stats = all_stats.order_by(BattingStat.player)
elif sort == 'team':
elif sort == "team":
all_stats = all_stats.order_by(BattingStat.team)
if group_by is not None:
# Use the same fields for GROUP BY as we used for SELECT
@ -172,46 +230,57 @@ async def get_totalstats(
all_teams = Team.select().where(Team.id << team_id)
all_stats = all_stats.where(BattingStat.team << all_teams)
elif team_abbrev is not None:
all_teams = Team.select().where(fn.Lower(Team.abbrev) << [x.lower() for x in team_abbrev])
all_teams = Team.select().where(
fn.Lower(Team.abbrev) << [x.lower() for x in team_abbrev]
)
all_stats = all_stats.where(BattingStat.team << all_teams)
if player_name is not None:
all_players = Player.select().where(fn.Lower(Player.name) << [x.lower() for x in player_name])
all_players = Player.select().where(
fn.Lower(Player.name) << [x.lower() for x in player_name]
)
all_stats = all_stats.where(BattingStat.player << all_players)
elif player_id is not None:
all_players = Player.select().where(Player.id << player_id)
all_stats = all_stats.where(BattingStat.player << all_players)
return_stats = {
'count': 0,
'stats': []
}
total_count = all_stats.count()
all_stats = all_stats.offset(offset).limit(limit)
return_stats = {"count": total_count, "stats": []}
for x in all_stats:
if x.sum_xch + x.sum_sbc <= 0:
continue
# Handle player field based on grouping with safe access
this_player = 'TOT'
if 'player' in group_by and hasattr(x, 'player'):
this_player = x.player_id if short_output else model_to_dict(x.player, recurse=False)
# Handle team field based on grouping with safe access
this_team = 'TOT'
if 'team' in group_by and hasattr(x, 'team'):
this_team = x.team_id if short_output else model_to_dict(x.team, recurse=False)
return_stats['stats'].append({
'player': this_player,
'team': this_team,
'pos': x.pos,
'xch': x.sum_xch,
'xhit': x.sum_xhit,
'error': x.sum_error,
'pb': x.sum_pb,
'sbc': x.sum_sbc,
'csc': x.sum_csc
})
return_stats['count'] = len(return_stats['stats'])
# Handle player field based on grouping with safe access
this_player = "TOT"
if "player" in group_by and hasattr(x, "player"):
this_player = (
x.player_id if short_output else model_to_dict(x.player, recurse=False)
)
# Handle team field based on grouping with safe access
this_team = "TOT"
if "team" in group_by and hasattr(x, "team"):
this_team = (
x.team_id if short_output else model_to_dict(x.team, recurse=False)
)
return_stats["stats"].append(
{
"player": this_player,
"team": this_team,
"pos": x.pos,
"xch": x.sum_xch,
"xhit": x.sum_xhit,
"error": x.sum_error,
"pb": x.sum_pb,
"sbc": x.sum_sbc,
"csc": x.sum_csc,
}
)
return_stats["count"] = len(return_stats["stats"])
db.close()
return return_stats

View File

@ -138,6 +138,8 @@ async def get_help_commands(
except Exception as e:
logger.error(f"Error getting help commands: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.post("/", include_in_schema=PRIVATE_IN_SCHEMA)
@ -185,6 +187,8 @@ async def create_help_command_endpoint(
except Exception as e:
logger.error(f"Error creating help command: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.put("/{command_id}", include_in_schema=PRIVATE_IN_SCHEMA)
@ -234,6 +238,8 @@ async def update_help_command_endpoint(
except Exception as e:
logger.error(f"Error updating help command {command_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.patch("/{command_id}/restore", include_in_schema=PRIVATE_IN_SCHEMA)
@ -271,6 +277,8 @@ async def restore_help_command_endpoint(
except Exception as e:
logger.error(f"Error restoring help command {command_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.delete("/{command_id}", include_in_schema=PRIVATE_IN_SCHEMA)
@ -301,6 +309,8 @@ async def delete_help_command_endpoint(
except Exception as e:
logger.error(f"Error deleting help command {command_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.get("/stats")
@ -358,6 +368,8 @@ async def get_help_command_stats():
except Exception as e:
logger.error(f"Error getting help command stats: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
# Special endpoints for Discord bot integration
@ -390,6 +402,8 @@ async def get_help_command_by_name_endpoint(
except Exception as e:
logger.error(f"Error getting help command by name '{command_name}': {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.patch("/by_name/{command_name}/view", include_in_schema=PRIVATE_IN_SCHEMA)
@ -425,6 +439,8 @@ async def increment_view_count(command_name: str, token: str = Depends(oauth2_sc
except Exception as e:
logger.error(f"Error incrementing view count for '{command_name}': {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.get("/autocomplete")
@ -454,6 +470,8 @@ async def get_help_names_for_autocomplete(
except Exception as e:
logger.error(f"Error getting help names for autocomplete: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()
@router.get("/{command_id}")
@ -481,3 +499,5 @@ async def get_help_command(command_id: int):
except Exception as e:
logger.error(f"Error getting help command {command_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))
finally:
db.close()

View File

@ -9,6 +9,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -38,6 +40,8 @@ async def get_injuries(
is_active: bool = None,
short_output: bool = False,
sort: Optional[str] = "start-asc",
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
all_injuries = Injury.select()
@ -64,10 +68,14 @@ async def get_injuries(
elif sort == "start-desc":
all_injuries = all_injuries.order_by(-Injury.start_week, -Injury.start_game)
total_count = all_injuries.count()
all_injuries = all_injuries.offset(offset).limit(limit)
return_injuries = {
"count": all_injuries.count(),
"count": total_count,
"injuries": [model_to_dict(x, recurse=not short_output) for x in all_injuries],
}
db.close()
return return_injuries
@ -84,6 +92,7 @@ async def patch_injury(
this_injury = Injury.get_or_none(Injury.id == injury_id)
if this_injury is None:
db.close()
raise HTTPException(status_code=404, detail=f"Injury ID {injury_id} not found")
if is_active is not None:
@ -91,8 +100,10 @@ async def patch_injury(
if this_injury.save() == 1:
r_injury = model_to_dict(this_injury)
db.close()
return r_injury
else:
db.close()
raise HTTPException(
status_code=500, detail=f"Unable to patch injury {injury_id}"
)
@ -109,8 +120,10 @@ async def post_injury(new_injury: InjuryModel, token: str = Depends(oauth2_schem
if this_injury.save():
r_injury = model_to_dict(this_injury)
db.close()
return r_injury
else:
db.close()
raise HTTPException(status_code=500, detail=f"Unable to post injury")
@ -123,9 +136,11 @@ async def delete_injury(injury_id: int, token: str = Depends(oauth2_scheme)):
this_injury = Injury.get_or_none(Injury.id == injury_id)
if this_injury is None:
db.close()
raise HTTPException(status_code=404, detail=f"Injury ID {injury_id} not found")
count = this_injury.delete_instance()
db.close()
if count == 1:
return f"Injury {injury_id} has been deleted"

View File

@ -9,6 +9,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -34,6 +36,8 @@ async def get_keepers(
team_id: list = Query(default=None),
player_id: list = Query(default=None),
short_output: bool = False,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
all_keepers = Keeper.select()
@ -44,10 +48,14 @@ async def get_keepers(
if player_id is not None:
all_keepers = all_keepers.where(Keeper.player_id << player_id)
total_count = all_keepers.count()
all_keepers = all_keepers.offset(offset).limit(limit)
return_keepers = {
"count": all_keepers.count(),
"count": total_count,
"keepers": [model_to_dict(x, recurse=not short_output) for x in all_keepers],
}
db.close()
return return_keepers
@ -77,8 +85,10 @@ async def patch_keeper(
if this_keeper.save():
r_keeper = model_to_dict(this_keeper)
db.close()
return r_keeper
else:
db.close()
raise HTTPException(
status_code=500, detail=f"Unable to patch keeper {keeper_id}"
)
@ -98,6 +108,7 @@ async def post_keepers(k_list: KeeperList, token: str = Depends(oauth2_scheme)):
with db.atomic():
for batch in chunked(new_keepers, 14):
Keeper.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_keepers)} keepers"
@ -114,6 +125,7 @@ async def delete_keeper(keeper_id: int, token: str = Depends(oauth2_scheme)):
raise HTTPException(status_code=404, detail=f"Keeper ID {keeper_id} not found")
count = this_keeper.delete_instance()
db.close()
if count == 1:
return f"Keeper ID {keeper_id} has been deleted"

View File

@ -9,6 +9,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -29,6 +31,8 @@ async def get_managers(
name: list = Query(default=None),
active: Optional[bool] = None,
short_output: Optional[bool] = False,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
if active is not None:
current = Current.latest()
@ -61,7 +65,9 @@ async def get_managers(
i_mgr.append(z)
final_mgrs = [model_to_dict(y, recurse=not short_output) for y in i_mgr]
return_managers = {"count": len(final_mgrs), "managers": final_mgrs}
total_count = len(final_mgrs)
final_mgrs = final_mgrs[offset : offset + limit]
return_managers = {"count": total_count, "managers": final_mgrs}
else:
all_managers = Manager.select()
@ -69,13 +75,16 @@ async def get_managers(
name_list = [x.lower() for x in name]
all_managers = all_managers.where(fn.Lower(Manager.name) << name_list)
total_count = all_managers.count()
all_managers = all_managers.offset(offset).limit(limit)
return_managers = {
"count": all_managers.count(),
"count": total_count,
"managers": [
model_to_dict(x, recurse=not short_output) for x in all_managers
],
}
db.close()
return return_managers
@ -85,6 +94,7 @@ async def get_one_manager(manager_id: int, short_output: Optional[bool] = False)
this_manager = Manager.get_or_none(Manager.id == manager_id)
if this_manager is not None:
r_manager = model_to_dict(this_manager, recurse=not short_output)
db.close()
return r_manager
else:
raise HTTPException(status_code=404, detail=f"Manager {manager_id} not found")
@ -106,6 +116,7 @@ async def patch_manager(
this_manager = Manager.get_or_none(Manager.id == manager_id)
if this_manager is None:
db.close()
raise HTTPException(
status_code=404, detail=f"Manager ID {manager_id} not found"
)
@ -121,8 +132,10 @@ async def patch_manager(
if this_manager.save() == 1:
r_manager = model_to_dict(this_manager)
db.close()
return r_manager
else:
db.close()
raise HTTPException(
status_code=500, detail=f"Unable to patch manager {this_manager}"
)
@ -139,8 +152,10 @@ async def post_manager(new_manager: ManagerModel, token: str = Depends(oauth2_sc
if this_manager.save():
r_manager = model_to_dict(this_manager)
db.close()
return r_manager
else:
db.close()
raise HTTPException(
status_code=500, detail=f"Unable to post manager {this_manager.name}"
)
@ -155,11 +170,13 @@ async def delete_manager(manager_id: int, token: str = Depends(oauth2_scheme)):
this_manager = Manager.get_or_none(Manager.id == manager_id)
if this_manager is None:
db.close()
raise HTTPException(
status_code=404, detail=f"Manager ID {manager_id} not found"
)
count = this_manager.delete_instance()
db.close()
if count == 1:
return f"Manager {manager_id} has been deleted"

View File

@ -19,6 +19,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -68,7 +70,7 @@ async def get_pitstats(
week_start: Optional[int] = None,
week_end: Optional[int] = None,
game_num: list = Query(default=None),
limit: Optional[int] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
ip_min: Optional[float] = None,
sort: Optional[str] = None,
short_output: Optional[bool] = True,
@ -76,14 +78,17 @@ async def get_pitstats(
if "post" in s_type.lower():
all_stats = PitchingStat.post_season(season)
if all_stats.count() == 0:
db.close()
return {"count": 0, "stats": []}
elif s_type.lower() in ["combined", "total", "all"]:
all_stats = PitchingStat.combined_season(season)
if all_stats.count() == 0:
db.close()
return {"count": 0, "stats": []}
else:
all_stats = PitchingStat.regular_season(season)
if all_stats.count() == 0:
db.close()
return {"count": 0, "stats": []}
if team_abbrev is not None:
@ -109,6 +114,7 @@ async def get_pitstats(
if week_end is not None:
end = min(week_end, end)
if start > end:
db.close()
raise HTTPException(
status_code=404,
detail=f"Start week {start} is after end week {end} - cannot pull stats",
@ -117,8 +123,7 @@ async def get_pitstats(
(PitchingStat.week >= start) & (PitchingStat.week <= end)
)
if limit:
all_stats = all_stats.limit(limit)
all_stats = all_stats.limit(limit)
if sort:
if sort == "newest":
all_stats = all_stats.order_by(-PitchingStat.week, -PitchingStat.game)
@ -128,6 +133,7 @@ async def get_pitstats(
"stats": [model_to_dict(x, recurse=not short_output) for x in all_stats],
}
db.close()
return return_stats
@ -149,6 +155,8 @@ async def get_totalstats(
short_output: Optional[bool] = False,
group_by: Literal["team", "player", "playerteam"] = "player",
week: list = Query(default=None),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
if sum(1 for x in [s_type, (week_start or week_end), week] if x is not None) > 1:
raise HTTPException(
@ -254,7 +262,10 @@ async def get_totalstats(
all_players = Player.select().where(Player.id << player_id)
all_stats = all_stats.where(PitchingStat.player << all_players)
return_stats = {"count": all_stats.count(), "stats": []}
total_count = all_stats.count()
all_stats = all_stats.offset(offset).limit(limit)
return_stats = {"count": total_count, "stats": []}
for x in all_stats:
# Handle player field based on grouping with safe access
@ -296,6 +307,7 @@ async def get_totalstats(
"bsv": x.sum_bsv,
}
)
db.close()
return return_stats
@ -313,6 +325,7 @@ async def patch_pitstats(
PitchingStat.update(**new_stats.dict()).where(PitchingStat.id == stat_id).execute()
r_stat = model_to_dict(PitchingStat.get_by_id(stat_id))
db.close()
return r_stat
@ -343,4 +356,5 @@ async def post_pitstats(s_list: PitStatList, token: str = Depends(oauth2_scheme)
for batch in chunked(all_stats, 15):
PitchingStat.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Added {len(all_stats)} batting lines"

View File

@ -6,7 +6,11 @@ Thin HTTP layer using PlayerService for business logic.
from fastapi import APIRouter, Query, Response, Depends
from typing import Optional, List
from ..dependencies import oauth2_scheme, cache_result, handle_db_errors
from ..dependencies import (
oauth2_scheme,
cache_result,
handle_db_errors,
)
from ..services.base import BaseService
from ..services.player_service import PlayerService
@ -24,9 +28,7 @@ async def get_players(
strat_code: list = Query(default=None),
is_injured: Optional[bool] = None,
sort: Optional[str] = None,
limit: Optional[int] = Query(
default=None, ge=1, description="Maximum number of results to return"
),
limit: Optional[int] = Query(default=None, ge=1),
offset: Optional[int] = Query(
default=None, ge=0, description="Number of results to skip for pagination"
),

View File

@ -9,6 +9,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -42,6 +44,8 @@ async def get_results(
away_abbrev: list = Query(default=None),
home_abbrev: list = Query(default=None),
short_output: Optional[bool] = False,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
all_results = Result.select_season(season)
@ -74,10 +78,14 @@ async def get_results(
if week_end is not None:
all_results = all_results.where(Result.week <= week_end)
total_count = all_results.count()
all_results = all_results.offset(offset).limit(limit)
return_results = {
"count": all_results.count(),
"count": total_count,
"results": [model_to_dict(x, recurse=not short_output) for x in all_results],
}
db.close()
return return_results
@ -89,6 +97,7 @@ async def get_one_result(result_id: int, short_output: Optional[bool] = False):
r_result = model_to_dict(this_result, recurse=not short_output)
else:
r_result = None
db.close()
return r_result
@ -140,8 +149,10 @@ async def patch_result(
if this_result.save() == 1:
r_result = model_to_dict(this_result)
db.close()
return r_result
else:
db.close()
raise HTTPException(
status_code=500, detail=f"Unable to patch result {result_id}"
)
@ -181,6 +192,7 @@ async def post_results(result_list: ResultList, token: str = Depends(oauth2_sche
with db.atomic():
for batch in chunked(new_results, 15):
Result.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_results)} results"
@ -194,9 +206,11 @@ async def delete_result(result_id: int, token: str = Depends(oauth2_scheme)):
this_result = Result.get_or_none(Result.id == result_id)
if not this_result:
db.close()
raise HTTPException(status_code=404, detail=f"Result ID {result_id} not found")
count = this_result.delete_instance()
db.close()
if count == 1:
return f"Result {result_id} has been deleted"

View File

@ -12,6 +12,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -44,6 +46,8 @@ async def get_players(
key_mlbam: list = Query(default=None),
sort: Optional[str] = None,
csv: Optional[bool] = False,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
all_players = SbaPlayer.select()
@ -98,12 +102,17 @@ async def get_players(
if csv:
return_val = query_to_csv(all_players)
db.close()
return Response(content=return_val, media_type="text/csv")
total_count = all_players.count()
all_players = all_players.offset(offset).limit(limit)
return_val = {
"count": all_players.count(),
"count": total_count,
"players": [model_to_dict(x) for x in all_players],
}
db.close()
return return_val
@ -112,11 +121,13 @@ async def get_players(
async def get_one_player(player_id: int):
this_player = SbaPlayer.get_or_none(SbaPlayer.id == player_id)
if this_player is None:
db.close()
raise HTTPException(
status_code=404, detail=f"SbaPlayer id {player_id} not found"
)
r_data = model_to_dict(this_player)
db.close()
return r_data
@ -134,6 +145,7 @@ async def patch_player(
):
if not valid_token(token):
logging.warning(f"Bad Token: {token}")
db.close()
raise HTTPException(
status_code=401,
detail="You are not authorized to patch mlb players. This event has been logged.",
@ -141,6 +153,7 @@ async def patch_player(
this_player = SbaPlayer.get_or_none(SbaPlayer.id == player_id)
if this_player is None:
db.close()
raise HTTPException(
status_code=404, detail=f"SbaPlayer id {player_id} not found"
)
@ -160,8 +173,10 @@ async def patch_player(
if this_player.save() == 1:
return_val = model_to_dict(this_player)
db.close()
return return_val
else:
db.close()
raise HTTPException(
status_code=418,
detail="Well slap my ass and call me a teapot; I could not save that player",
@ -173,6 +188,7 @@ async def patch_player(
async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning(f"Bad Token: {token}")
db.close()
raise HTTPException(
status_code=401,
detail="You are not authorized to post mlb players. This event has been logged.",
@ -191,6 +207,7 @@ async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme))
)
if dupes.count() > 0:
logger.error(f"Found a dupe for {x}")
db.close()
raise HTTPException(
status_code=400,
detail=f"{x.first_name} {x.last_name} has a key already in the database",
@ -201,6 +218,7 @@ async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme))
with db.atomic():
for batch in chunked(new_players, 15):
SbaPlayer.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_players)} new MLB players"
@ -210,6 +228,7 @@ async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme))
async def post_one_player(player: SbaPlayerModel, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning(f"Bad Token: {token}")
db.close()
raise HTTPException(
status_code=401,
detail="You are not authorized to post mlb players. This event has been logged.",
@ -224,6 +243,7 @@ async def post_one_player(player: SbaPlayerModel, token: str = Depends(oauth2_sc
logging.info(f"POST /SbaPlayers/one - dupes found:")
for x in dupes:
logging.info(f"{x}")
db.close()
raise HTTPException(
status_code=400,
detail=f"{player.first_name} {player.last_name} has a key already in the database",
@ -233,8 +253,10 @@ async def post_one_player(player: SbaPlayerModel, token: str = Depends(oauth2_sc
saved = new_player.save()
if saved == 1:
return_val = model_to_dict(new_player)
db.close()
return return_val
else:
db.close()
raise HTTPException(
status_code=418,
detail="Well slap my ass and call me a teapot; I could not save that player",
@ -246,6 +268,7 @@ async def post_one_player(player: SbaPlayerModel, token: str = Depends(oauth2_sc
async def delete_player(player_id: int, token: str = Depends(oauth2_scheme)):
if not valid_token(token):
logging.warning(f"Bad Token: {token}")
db.close()
raise HTTPException(
status_code=401,
detail="You are not authorized to delete mlb players. This event has been logged.",
@ -253,11 +276,13 @@ async def delete_player(player_id: int, token: str = Depends(oauth2_scheme)):
this_player = SbaPlayer.get_or_none(SbaPlayer.id == player_id)
if this_player is None:
db.close()
raise HTTPException(
status_code=404, detail=f"SbaPlayer id {player_id} not found"
)
count = this_player.delete_instance()
db.close()
if count == 1:
return f"Player {player_id} has been deleted"

View File

@ -9,6 +9,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -38,6 +40,8 @@ async def get_schedules(
week_start: Optional[int] = None,
week_end: Optional[int] = None,
short_output: Optional[bool] = True,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
all_sched = Schedule.select_season(season)
@ -69,10 +73,14 @@ async def get_schedules(
all_sched = all_sched.order_by(Schedule.id)
total_count = all_sched.count()
all_sched = all_sched.offset(offset).limit(limit)
return_sched = {
"count": all_sched.count(),
"count": total_count,
"schedules": [model_to_dict(x, recurse=not short_output) for x in all_sched],
}
db.close()
return return_sched
@ -84,6 +92,7 @@ async def get_one_schedule(schedule_id: int):
r_sched = model_to_dict(this_sched)
else:
r_sched = None
db.close()
return r_sched
@ -125,8 +134,10 @@ async def patch_schedule(
if this_sched.save() == 1:
r_sched = model_to_dict(this_sched)
db.close()
return r_sched
else:
db.close()
raise HTTPException(
status_code=500, detail=f"Unable to patch schedule {schedule_id}"
)
@ -166,6 +177,7 @@ async def post_schedules(sched_list: ScheduleList, token: str = Depends(oauth2_s
with db.atomic():
for batch in chunked(new_sched, 15):
Schedule.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_sched)} schedules"
@ -184,6 +196,7 @@ async def delete_schedule(schedule_id: int, token: str = Depends(oauth2_scheme))
)
count = this_sched.delete_instance()
db.close()
if count == 1:
return f"Schedule {this_sched} has been deleted"

View File

@ -8,6 +8,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -23,6 +25,8 @@ async def get_standings(
league_abbrev: Optional[str] = None,
division_abbrev: Optional[str] = None,
short_output: Optional[bool] = False,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
standings = Standings.select_season(season)
@ -57,11 +61,15 @@ async def get_standings(
div_teams = [x for x in standings]
div_teams.sort(key=lambda team: win_pct(team), reverse=True)
total_count = len(div_teams)
div_teams = div_teams[offset : offset + limit]
return_standings = {
"count": len(div_teams),
"count": total_count,
"standings": [model_to_dict(x, recurse=not short_output) for x in div_teams],
}
db.close()
return return_standings
@ -92,6 +100,7 @@ async def patch_standings(
try:
this_stan = Standings.get_by_id(stan_id)
except Exception as e:
db.close()
raise HTTPException(status_code=404, detail=f"No team found with id {stan_id}")
if wins:
@ -100,6 +109,7 @@ async def patch_standings(
this_stan.losses = losses
this_stan.save()
db.close()
return model_to_dict(this_stan)
@ -119,6 +129,7 @@ async def post_standings(season: int, token: str = Depends(oauth2_scheme)):
with db.atomic():
for batch in chunked(new_teams, 16):
Standings.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_teams)} standings"
@ -131,6 +142,7 @@ async def recalculate_standings(season: int, token: str = Depends(oauth2_scheme)
raise HTTPException(status_code=401, detail="Unauthorized")
code = Standings.recalculate(season)
db.close()
if code == 69:
raise HTTPException(status_code=500, detail=f"Error recreating Standings rows")
return f"Just recalculated standings for season {season}"

View File

@ -13,6 +13,8 @@ from ..dependencies import (
PRIVATE_IN_SCHEMA,
handle_db_errors,
update_season_batting_stats,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -59,6 +61,8 @@ async def get_games(
division_id: Optional[int] = None,
short_output: Optional[bool] = False,
sort: Optional[str] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
) -> Any:
all_games = StratGame.select()
@ -119,10 +123,14 @@ async def get_games(
StratGame.season, StratGame.week, StratGame.game_num
)
total_count = all_games.count()
all_games = all_games.offset(offset).limit(limit)
return_games = {
"count": all_games.count(),
"count": total_count,
"games": [model_to_dict(x, recurse=not short_output) for x in all_games],
}
db.close()
return return_games
@ -131,9 +139,11 @@ async def get_games(
async def get_one_game(game_id: int) -> Any:
this_game = StratGame.get_or_none(StratGame.id == game_id)
if not this_game:
db.close()
raise HTTPException(status_code=404, detail=f"StratGame ID {game_id} not found")
g_result = model_to_dict(this_game)
db.close()
return g_result
@ -155,6 +165,7 @@ async def patch_game(
this_game = StratGame.get_or_none(StratGame.id == game_id)
if not this_game:
db.close()
raise HTTPException(status_code=404, detail=f"StratGame ID {game_id} not found")
if game_num is not None:
@ -249,6 +260,7 @@ async def post_games(game_list: GameList, token: str = Depends(oauth2_scheme)) -
with db.atomic():
for batch in chunked(new_games, 16):
StratGame.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_games)} games"
@ -262,6 +274,7 @@ async def wipe_game(game_id: int, token: str = Depends(oauth2_scheme)) -> Any:
this_game = StratGame.get_or_none(StratGame.id == game_id)
if not this_game:
db.close()
raise HTTPException(status_code=404, detail=f"StratGame ID {game_id} not found")
this_game.away_score = None
@ -272,8 +285,10 @@ async def wipe_game(game_id: int, token: str = Depends(oauth2_scheme)) -> Any:
if this_game.save() == 1:
g_result = model_to_dict(this_game)
db.close()
return g_result
else:
db.close()
raise HTTPException(status_code=500, detail=f"Unable to wipe game {game_id}")
@ -286,9 +301,11 @@ async def delete_game(game_id: int, token: str = Depends(oauth2_scheme)) -> Any:
this_game = StratGame.get_or_none(StratGame.id == game_id)
if not this_game:
db.close()
raise HTTPException(status_code=404, detail=f"StratGame ID {game_id} not found")
count = this_game.delete_instance()
db.close()
if count == 1:
return f"StratGame {game_id} has been deleted"

View File

@ -13,7 +13,13 @@ from ...db_engine import (
fn,
model_to_dict,
)
from ...dependencies import add_cache_headers, cache_result, handle_db_errors
from ...dependencies import (
add_cache_headers,
cache_result,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
from .common import build_season_games
router = APIRouter()
@ -52,7 +58,7 @@ async def get_batting_totals(
risp: Optional[bool] = None,
inning: list = Query(default=None),
sort: Optional[str] = None,
limit: Optional[int] = 200,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
short_output: Optional[bool] = False,
page_num: Optional[int] = 1,
week_start: Optional[int] = None,
@ -423,8 +429,6 @@ async def get_batting_totals(
run_plays = run_plays.order_by(StratPlay.game.asc())
# For other group_by values, skip game_id/play_num sorting since they're not in GROUP BY
if limit < 1:
limit = 1
bat_plays = bat_plays.paginate(page_num, limit)
logger.info(f"bat_plays query: {bat_plays}")
@ -594,4 +598,5 @@ async def get_batting_totals(
}
)
db.close()
return return_stats

View File

@ -20,8 +20,10 @@ logger = logging.getLogger("discord_app")
@handle_db_errors
async def get_one_play(play_id: int):
if StratPlay.get_or_none(StratPlay.id == play_id) is None:
db.close()
raise HTTPException(status_code=404, detail=f"Play ID {play_id} not found")
r_play = model_to_dict(StratPlay.get_by_id(play_id))
db.close()
return r_play
@ -35,10 +37,12 @@ async def patch_play(
raise HTTPException(status_code=401, detail="Unauthorized")
if StratPlay.get_or_none(StratPlay.id == play_id) is None:
db.close()
raise HTTPException(status_code=404, detail=f"Play ID {play_id} not found")
StratPlay.update(**new_play.dict()).where(StratPlay.id == play_id).execute()
r_play = model_to_dict(StratPlay.get_by_id(play_id))
db.close()
return r_play
@ -89,6 +93,7 @@ async def post_plays(p_list: PlayList, token: str = Depends(oauth2_scheme)):
with db.atomic():
for batch in chunked(new_plays, 20):
StratPlay.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"Inserted {len(new_plays)} plays"
@ -102,9 +107,11 @@ async def delete_play(play_id: int, token: str = Depends(oauth2_scheme)):
this_play = StratPlay.get_or_none(StratPlay.id == play_id)
if not this_play:
db.close()
raise HTTPException(status_code=404, detail=f"Play ID {play_id} not found")
count = this_play.delete_instance()
db.close()
if count == 1:
return f"Play {play_id} has been deleted"
@ -123,9 +130,11 @@ async def delete_plays_game(game_id: int, token: str = Depends(oauth2_scheme)):
this_game = StratGame.get_or_none(StratGame.id == game_id)
if not this_game:
db.close()
raise HTTPException(status_code=404, detail=f"Game ID {game_id} not found")
count = StratPlay.delete().where(StratPlay.game == this_game).execute()
db.close()
if count > 0:
return f"Deleted {count} plays matching Game ID {game_id}"
@ -146,4 +155,5 @@ async def post_erun_check(token: str = Depends(oauth2_scheme)):
(StratPlay.e_run == 1) & (StratPlay.run == 0)
)
count = all_plays.execute()
db.close()
return count

View File

@ -13,7 +13,13 @@ from ...db_engine import (
fn,
SQL,
)
from ...dependencies import handle_db_errors, add_cache_headers, cache_result
from ...dependencies import (
handle_db_errors,
add_cache_headers,
cache_result,
MAX_LIMIT,
DEFAULT_LIMIT,
)
from .common import build_season_games
logger = logging.getLogger("discord_app")
@ -51,7 +57,7 @@ async def get_fielding_totals(
team_id: list = Query(default=None),
manager_id: list = Query(default=None),
sort: Optional[str] = None,
limit: Optional[int] = 200,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
short_output: Optional[bool] = False,
page_num: Optional[int] = 1,
):
@ -237,8 +243,6 @@ async def get_fielding_totals(
def_plays = def_plays.order_by(StratPlay.game.asc())
# For other group_by values, skip game_id/play_num sorting since they're not in GROUP BY
if limit < 1:
limit = 1
def_plays = def_plays.paginate(page_num, limit)
logger.info(f"def_plays query: {def_plays}")
@ -361,4 +365,5 @@ async def get_fielding_totals(
"week": this_week,
}
)
db.close()
return return_stats

View File

@ -16,7 +16,13 @@ from ...db_engine import (
SQL,
complex_data_to_csv,
)
from ...dependencies import handle_db_errors, add_cache_headers, cache_result
from ...dependencies import (
handle_db_errors,
add_cache_headers,
cache_result,
MAX_LIMIT,
DEFAULT_LIMIT,
)
from .common import build_season_games
router = APIRouter()
@ -51,7 +57,7 @@ async def get_pitching_totals(
risp: Optional[bool] = None,
inning: list = Query(default=None),
sort: Optional[str] = None,
limit: Optional[int] = 200,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
short_output: Optional[bool] = False,
csv: Optional[bool] = False,
page_num: Optional[int] = 1,
@ -164,8 +170,6 @@ async def get_pitching_totals(
if group_by in ["playergame", "teamgame"]:
pitch_plays = pitch_plays.order_by(StratPlay.game.asc())
if limit < 1:
limit = 1
pitch_plays = pitch_plays.paginate(page_num, limit)
# Execute the Peewee query
@ -348,6 +352,7 @@ async def get_pitching_totals(
)
return_stats["count"] = len(return_stats["stats"])
db.close()
if csv:
return Response(
content=complex_data_to_csv(return_stats["stats"]), media_type="text/csv"

View File

@ -16,6 +16,8 @@ from ...dependencies import (
handle_db_errors,
add_cache_headers,
cache_result,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -70,7 +72,7 @@ async def get_plays(
pitcher_team_id: list = Query(default=None),
short_output: Optional[bool] = False,
sort: Optional[str] = None,
limit: Optional[int] = 200,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
page_num: Optional[int] = 1,
s_type: Literal["regular", "post", "total", None] = None,
):
@ -185,8 +187,6 @@ async def get_plays(
season_games = season_games.where(StratGame.week > 18)
all_plays = all_plays.where(StratPlay.game << season_games)
if limit < 1:
limit = 1
bat_plays = all_plays.paginate(page_num, limit)
if sort == "wpa-desc":
@ -210,4 +210,5 @@ async def get_plays(
"count": all_plays.count(),
"plays": [model_to_dict(x, recurse=not short_output) for x in all_plays],
}
db.close()
return return_plays

View File

@ -11,6 +11,8 @@ from ..dependencies import (
PRIVATE_IN_SCHEMA,
handle_db_errors,
cache_result,
MAX_LIMIT,
DEFAULT_LIMIT,
)
from ..services.base import BaseService
from ..services.team_service import TeamService

View File

@ -10,6 +10,8 @@ from ..dependencies import (
valid_token,
PRIVATE_IN_SCHEMA,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -36,7 +38,7 @@ class TransactionList(pydantic.BaseModel):
@router.get("")
@handle_db_errors
async def get_transactions(
season,
season: int,
team_abbrev: list = Query(default=None),
week_start: Optional[int] = 0,
week_end: Optional[int] = None,
@ -45,8 +47,9 @@ async def get_transactions(
player_name: list = Query(default=None),
player_id: list = Query(default=None),
move_id: Optional[str] = None,
is_trade: Optional[bool] = None,
short_output: Optional[bool] = False,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = Query(default=0, ge=0),
):
if season:
transactions = Transaction.select_season(season)
@ -84,20 +87,21 @@ async def get_transactions(
else:
transactions = transactions.where(Transaction.frozen == 0)
if is_trade is not None:
raise HTTPException(
status_code=501, detail="The is_trade parameter is not implemented, yet"
)
transactions = transactions.order_by(-Transaction.week, Transaction.moveid)
total_count = transactions.count()
transactions = transactions.offset(offset).limit(limit)
return_trans = {
"count": transactions.count(),
"count": total_count,
"limit": limit,
"offset": offset,
"transactions": [
model_to_dict(x, recurse=not short_output) for x in transactions
],
}
db.close()
return return_trans
@ -115,6 +119,7 @@ async def patch_transactions(
these_moves = Transaction.select().where(Transaction.moveid == move_id)
if these_moves.count() == 0:
db.close()
raise HTTPException(status_code=404, detail=f"Move ID {move_id} not found")
if frozen is not None:
@ -126,6 +131,7 @@ async def patch_transactions(
x.cancelled = cancelled
x.save()
db.close()
return f"Updated {these_moves.count()} transactions"
@ -175,6 +181,7 @@ async def post_transactions(
for batch in chunked(all_moves, 15):
Transaction.insert_many(batch).on_conflict_ignore().execute()
db.close()
return f"{len(all_moves)} transactions have been added"
@ -188,6 +195,7 @@ async def delete_transactions(move_id, token: str = Depends(oauth2_scheme)):
delete_query = Transaction.delete().where(Transaction.moveid == move_id)
count = delete_query.execute()
db.close()
if count > 0:
return f"Removed {count} transactions"
else:

View File

@ -26,6 +26,8 @@ from ..dependencies import (
update_season_batting_stats,
update_season_pitching_stats,
get_cache_stats,
MAX_LIMIT,
DEFAULT_LIMIT,
)
logger = logging.getLogger("discord_app")
@ -72,7 +74,7 @@ async def get_season_batting_stats(
"cs",
] = "woba", # Sort field
sort_order: Literal["asc", "desc"] = "desc", # asc or desc
limit: Optional[int] = 200,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = 0,
csv: Optional[bool] = False,
):
@ -218,7 +220,7 @@ async def get_season_pitching_stats(
"re24",
] = "era", # Sort field
sort_order: Literal["asc", "desc"] = "asc", # asc or desc (asc default for ERA)
limit: Optional[int] = 200,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
offset: int = 0,
csv: Optional[bool] = False,
):

View File

@ -81,9 +81,9 @@ class TestRouteRegistration:
for route, methods in EXPECTED_PLAY_ROUTES.items():
assert route in paths, f"Route {route} missing from OpenAPI schema"
for method in methods:
assert (
method in paths[route]
), f"Method {method.upper()} missing for {route}"
assert method in paths[route], (
f"Method {method.upper()} missing for {route}"
)
def test_play_routes_have_plays_tag(self, api):
"""All play routes should be tagged with 'plays'."""
@ -96,9 +96,9 @@ class TestRouteRegistration:
for method, spec in paths[route].items():
if method in ("get", "post", "patch", "delete"):
tags = spec.get("tags", [])
assert (
"plays" in tags
), f"{method.upper()} {route} missing 'plays' tag, has {tags}"
assert "plays" in tags, (
f"{method.upper()} {route} missing 'plays' tag, has {tags}"
)
@pytest.mark.post_deploy
@pytest.mark.skip(
@ -124,9 +124,9 @@ class TestRouteRegistration:
]:
params = paths[route]["get"].get("parameters", [])
param_names = [p["name"] for p in params]
assert (
"sbaplayer_id" in param_names
), f"sbaplayer_id parameter missing from {route}"
assert "sbaplayer_id" in param_names, (
f"sbaplayer_id parameter missing from {route}"
)
# ---------------------------------------------------------------------------
@ -493,10 +493,9 @@ class TestPlayCrud:
assert result["id"] == play_id
def test_get_nonexistent_play(self, api):
"""GET /plays/999999999 returns an error (wrapped by handle_db_errors)."""
"""GET /plays/999999999 returns 404 Not Found."""
r = requests.get(f"{api}/api/v3/plays/999999999", timeout=10)
# handle_db_errors wraps HTTPException as 500 with detail message
assert r.status_code == 500
assert r.status_code == 404
assert "not found" in r.json().get("detail", "").lower()
@ -575,9 +574,9 @@ class TestGroupBySbaPlayer:
)
assert r_seasons.status_code == 200
season_pas = [s["pa"] for s in r_seasons.json()["stats"]]
assert career_pa >= max(
season_pas
), f"Career PA ({career_pa}) should be >= max season PA ({max(season_pas)})"
assert career_pa >= max(season_pas), (
f"Career PA ({career_pa}) should be >= max season PA ({max(season_pas)})"
)
@pytest.mark.post_deploy
def test_batting_sbaplayer_short_output(self, api):

View File

@ -0,0 +1,154 @@
"""
Tests for query limit/offset parameter validation and middleware behavior.
Verifies that:
- FastAPI enforces MAX_LIMIT cap (returns 422 for limit > 500)
- FastAPI enforces ge=1 on limit (returns 422 for limit=0 or limit=-1)
- Transactions endpoint returns limit/offset keys in the response
- strip_empty_query_params middleware treats ?param= as absent
These tests exercise FastAPI parameter validation which fires before any
handler code runs, so most tests don't require a live DB connection.
The app imports redis and psycopg2 at module level, so we mock those
system-level packages before importing app.main.
"""
import sys
import pytest
from unittest.mock import MagicMock, patch
# ---------------------------------------------------------------------------
# Stub out C-extension / system packages that aren't installed in the test
# environment before any app code is imported.
# ---------------------------------------------------------------------------
_redis_stub = MagicMock()
_redis_stub.Redis = MagicMock(return_value=MagicMock(ping=MagicMock(return_value=True)))
sys.modules.setdefault("redis", _redis_stub)
_psycopg2_stub = MagicMock()
sys.modules.setdefault("psycopg2", _psycopg2_stub)
_playhouse_pool_stub = MagicMock()
sys.modules.setdefault("playhouse.pool", _playhouse_pool_stub)
_playhouse_pool_stub.PooledPostgresqlDatabase = MagicMock()
_pandas_stub = MagicMock()
sys.modules.setdefault("pandas", _pandas_stub)
_pandas_stub.DataFrame = MagicMock()
@pytest.fixture(scope="module")
def client():
"""
TestClient with the Peewee db object mocked so the app can be imported
without a running PostgreSQL instance. FastAPI validates query params
before calling handler code, so 422 responses don't need a real DB.
"""
mock_db = MagicMock()
mock_db.is_closed.return_value = False
mock_db.connect.return_value = None
mock_db.close.return_value = None
with patch("app.db_engine.db", mock_db):
from fastapi.testclient import TestClient
from app.main import app
with TestClient(app, raise_server_exceptions=False) as c:
yield c
def test_limit_exceeds_max_returns_422(client):
"""
GET /api/v3/decisions with limit=1000 should return 422.
MAX_LIMIT is 500; the decisions endpoint declares
limit: int = Query(ge=1, le=MAX_LIMIT), so FastAPI rejects values > 500
before any handler code runs.
"""
response = client.get("/api/v3/decisions?limit=1000")
assert response.status_code == 422
def test_limit_zero_returns_422(client):
"""
GET /api/v3/decisions with limit=0 should return 422.
Query(ge=1) rejects zero values.
"""
response = client.get("/api/v3/decisions?limit=0")
assert response.status_code == 422
def test_limit_negative_returns_422(client):
"""
GET /api/v3/decisions with limit=-1 should return 422.
Query(ge=1) rejects negative values.
"""
response = client.get("/api/v3/decisions?limit=-1")
assert response.status_code == 422
def test_transactions_has_limit_in_response(client):
"""
GET /api/v3/transactions?season=12 should include 'limit' and 'offset'
keys in the JSON response body.
The transactions endpoint was updated to return pagination metadata
alongside results so callers know the applied page size.
"""
mock_qs = MagicMock()
mock_qs.count.return_value = 0
mock_qs.where.return_value = mock_qs
mock_qs.order_by.return_value = mock_qs
mock_qs.offset.return_value = mock_qs
mock_qs.limit.return_value = mock_qs
mock_qs.__iter__ = MagicMock(return_value=iter([]))
with (
patch("app.routers_v3.transactions.Transaction") as mock_txn,
patch("app.routers_v3.transactions.Team") as mock_team,
patch("app.routers_v3.transactions.Player") as mock_player,
):
mock_txn.select_season.return_value = mock_qs
mock_txn.select.return_value = mock_qs
mock_team.select.return_value = mock_qs
mock_player.select.return_value = mock_qs
response = client.get("/api/v3/transactions?season=12")
# If the mock is sufficient the response is 200 with pagination keys;
# if some DB path still fires we at least confirm limit param is accepted.
assert response.status_code != 422
if response.status_code == 200:
data = response.json()
assert "limit" in data, "Response missing 'limit' key"
assert "offset" in data, "Response missing 'offset' key"
def test_empty_string_param_stripped(client):
"""
Query params with an empty string value should be treated as absent.
The strip_empty_query_params middleware rewrites the query string before
FastAPI parses it, so ?league_abbrev= is removed entirely rather than
forwarded as an empty string to the handler.
Expected: the request is accepted (not 422) and the empty param is ignored.
"""
mock_qs = MagicMock()
mock_qs.count.return_value = 0
mock_qs.where.return_value = mock_qs
mock_qs.__iter__ = MagicMock(return_value=iter([]))
with patch("app.routers_v3.standings.Standings") as mock_standings:
mock_standings.select_season.return_value = mock_qs
# ?league_abbrev= should be stripped → treated as absent (None), not ""
response = client.get("/api/v3/standings?season=12&league_abbrev=")
assert response.status_code != 422, (
"Empty string query param caused a 422 — middleware may not be stripping it"
)