Compare commits

..

18 Commits

Author SHA1 Message Date
cal
f9e24eb4bc Merge pull request 'fix: update test limit to respect MAX_LIMIT=500 (#110)' (#112) from issue/110-fix-test-batting-sbaplayer-career-totals-returns-4 into main
Reviewed-on: #112
2026-04-08 12:55:55 +00:00
Cal Corum
36b962e5d5 fix: update test limit to respect MAX_LIMIT=500 (#110)
Closes #110

The test was sending limit=999 which exceeds MAX_LIMIT (500), causing
FastAPI to return 422. Changed to limit=500, which is sufficient to
cover all seasons for any player.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-08 12:55:35 +00:00
cal
5d5df325bc Merge pull request 'feat: increase MAX_LIMIT to 1000 for plays batting/fielding/pitching (#111)' (#113) from issue/111-feat-increase-max-limit-to-1000-for-plays-fielding into main
Reviewed-on: #113
2026-04-08 12:53:38 +00:00
Cal Corum
682b990321 feat: increase MAX_LIMIT to 1000 for plays batting/fielding/pitching (#111)
Closes #111

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-08 06:32:46 -05:00
cal
5b19bd486a Merge pull request 'fix: preserve total_count in get_totalstats instead of overwriting with page length (#101)' (#102) from issue/101-fieldingstats-get-totalstats-total-count-overwritt into main 2026-04-08 04:08:40 +00:00
Cal Corum
718abc0096 fix: preserve total_count in get_totalstats instead of overwriting with page length (#101)
Closes #101

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 23:08:10 -05:00
cal
52d88ae950 Merge pull request 'fix: add missing indexes on FK columns in stratplay and stratgame (#74)' (#95) from issue/74-add-missing-indexes-on-foreign-key-columns-in-high into main 2026-04-08 04:06:06 +00:00
Cal Corum
9165419ed0 fix: add missing indexes on FK columns in stratplay and stratgame (#74)
Closes #74

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 23:05:25 -05:00
cal
d23d6520c3 Merge pull request 'fix: batch standings updates to eliminate N+1 queries in recalculate (#75)' (#93) from issue/75-fix-n-1-query-pattern-in-standings-recalculation into main 2026-04-08 04:03:39 +00:00
Cal Corum
c23ca9a721 fix: batch standings updates to eliminate N+1 queries in recalculate (#75)
Replace per-game update_standings() calls with pre-fetched dicts and
in-memory accumulation, then a single bulk_update at the end.
Reduces ~1,100+ queries for a full season to ~5 queries.

Closes #75

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 23:03:11 -05:00
cal
1db06576cc Merge pull request 'fix: replace integer comparisons on boolean fields with True/False (#69)' (#94) from issue/69-boolean-fields-compared-as-integers-sqlite-pattern into main 2026-04-08 03:57:35 +00:00
Cal Corum
7a5327f490 fix: replace integer comparisons on boolean fields with True/False (#69)
Closes #69

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 22:57:01 -05:00
cal
a2889751da Merge pull request 'fix: remove SQLite fallback code from db_engine.py (#70)' (#89) from issue/70-remove-sqlite-fallback-code-from-db-engine-py into main 2026-04-08 03:56:11 +00:00
Cal Corum
eb886a4690 fix: remove SQLite fallback code from db_engine.py (#70)
Removes DATABASE_TYPE conditional entirely. PostgreSQL is now the only
supported backend. Moves PooledPostgresqlDatabase import to top-level
and raises RuntimeError at startup if POSTGRES_PASSWORD is unset,
preventing silent misconnection with misleading errors.

Closes #70

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 22:55:44 -05:00
cal
0ee7367bc0 Merge pull request 'fix: disable autoconnect and set pool timeout on PooledPostgresqlDatabase (#80)' (#87) from issue/80-disable-autoconnect-and-set-pool-timeout-on-pooled into main 2026-04-08 03:55:05 +00:00
Cal Corum
6637f6e9eb fix: disable autoconnect and set pool timeout on PooledPostgresqlDatabase (#80)
- Set timeout=5 so pool exhaustion surfaces as an error instead of hanging forever
- Set autoconnect=False to require explicit connection acquisition
- Add HTTP middleware in main.py to open/close connections per request

Closes #80

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 22:54:27 -05:00
cal
fa176c9b05 Merge pull request 'fix: enforce Literal validation on sort parameter in GET /api/v3/players (#66)' (#68) from ai/major-domo-database-66 into main 2026-04-08 03:54:02 +00:00
Cal Corum
ece25ec22c fix: enforce Literal validation on sort parameter in GET /api/v3/players (#66)
Closes #66

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 22:53:33 -05:00
14 changed files with 302 additions and 116 deletions

View File

@ -8,43 +8,43 @@ from typing import Literal, List, Optional
from pandas import DataFrame
from peewee import *
from peewee import ModelSelect
from playhouse.pool import PooledPostgresqlDatabase
from playhouse.shortcuts import model_to_dict
# Database configuration - supports both SQLite and PostgreSQL
DATABASE_TYPE = os.environ.get("DATABASE_TYPE", "sqlite")
if DATABASE_TYPE.lower() == "postgresql":
from playhouse.pool import PooledPostgresqlDatabase
_postgres_password = os.environ.get("POSTGRES_PASSWORD")
if _postgres_password is None:
raise RuntimeError(
"POSTGRES_PASSWORD environment variable is not set. "
"This variable is required when DATABASE_TYPE=postgresql."
)
db = PooledPostgresqlDatabase(
os.environ.get("POSTGRES_DB", "sba_master"),
user=os.environ.get("POSTGRES_USER", "sba_admin"),
password=_postgres_password,
host=os.environ.get("POSTGRES_HOST", "sba_postgres"),
port=int(os.environ.get("POSTGRES_PORT", "5432")),
max_connections=20,
stale_timeout=300, # 5 minutes
timeout=0,
autoconnect=True,
autorollback=True, # Automatically rollback failed transactions
)
else:
# Default SQLite configuration
db = SqliteDatabase(
"storage/sba_master.db",
pragmas={"journal_mode": "wal", "cache_size": -1 * 64000, "synchronous": 0},
_postgres_password = os.environ.get("POSTGRES_PASSWORD")
if _postgres_password is None:
raise RuntimeError(
"POSTGRES_PASSWORD environment variable is not set. "
"This variable is required when DATABASE_TYPE=postgresql."
)
db = PooledPostgresqlDatabase(
os.environ.get("POSTGRES_DB", "sba_master"),
user=os.environ.get("POSTGRES_USER", "sba_admin"),
password=_postgres_password,
host=os.environ.get("POSTGRES_HOST", "sba_postgres"),
port=int(os.environ.get("POSTGRES_PORT", "5432")),
max_connections=20,
stale_timeout=300, # 5 minutes
timeout=5,
autoconnect=False,
autorollback=True, # Automatically rollback failed transactions
)
date = f"{datetime.datetime.now().year}-{datetime.datetime.now().month}-{datetime.datetime.now().day}"
logger = logging.getLogger("discord_app")
"""
Per season updates:
Result: regular_season & post_season - set season length
update_standings - confirm division alignments and records
Standings: recalculate - e_number function, set season length
- wildcard section, set league abbrevs
"""
def model_csv_headers(this_obj, exclude=None) -> List:
data = model_to_dict(this_obj, recurse=False, exclude=exclude)
return [x for x in data.keys()]
@ -197,6 +197,22 @@ class Division(BaseModel):
# Assign div_gb and e_num
for x in range(len(div_teams)):
# # Used for two playoff teams per divsion
# # Special calculations for the division leader
# if x == 0:
# div_teams[0].div_gb = -games_back(div_teams[0], div_teams[2])
# div_teams[0].div_e_num = None
# div_teams[0].wc_gb = None
# div_teams[0].wc_e_num = None
# elif x == 1:
# div_teams[1].div_gb = 0
# div_teams[1].div_e_num = None
# div_teams[1].wc_gb = None
# div_teams[1].wc_e_num = None
# else:
# div_teams[x].div_gb = games_back(div_teams[1], div_teams[x])
# div_teams[x].div_e_num = e_number(div_teams[1], div_teams[x])
# Used for one playoff team per division
if x == 0:
div_teams[0].div_gb = None
div_teams[0].div_e_num = None
@ -498,12 +514,12 @@ class Team(BaseModel):
all_drops = Transaction.select_season(Current.latest().season).where(
(Transaction.oldteam == self)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
all_adds = Transaction.select_season(Current.latest().season).where(
(Transaction.newteam == self)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
for move in all_drops:
@ -518,7 +534,7 @@ class Team(BaseModel):
try:
active_roster["players"].remove(move.player)
except Exception:
logger.error(f"I could not drop {move.player.name}")
print(f"I could not drop {move.player.name}")
for move in all_adds:
guy_pos = move.player.get_positions()
@ -581,12 +597,12 @@ class Team(BaseModel):
all_drops = Transaction.select_season(Current.latest().season).where(
(Transaction.oldteam == sil_team)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
all_adds = Transaction.select_season(Current.latest().season).where(
(Transaction.newteam == sil_team)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
for move in all_drops:
@ -601,7 +617,7 @@ class Team(BaseModel):
try:
short_roster["players"].remove(move.player)
except Exception:
logger.error(f"I could not drop {move.player.name}")
print(f"I could not drop {move.player.name}")
for move in all_adds:
guy_pos = move.player.get_positions()
@ -664,12 +680,12 @@ class Team(BaseModel):
all_drops = Transaction.select_season(Current.latest().season).where(
(Transaction.oldteam == lil_team)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
all_adds = Transaction.select_season(Current.latest().season).where(
(Transaction.newteam == lil_team)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
for move in all_drops:
@ -684,7 +700,7 @@ class Team(BaseModel):
try:
long_roster["players"].remove(move.player)
except Exception:
logger.error(f"I could not drop {move.player.name}")
print(f"I could not drop {move.player.name}")
for move in all_adds:
guy_pos = move.player.get_positions()
@ -998,7 +1014,7 @@ class Player(BaseModel):
fn.Lower(Player.name) == name.lower(), Player.season == num
)
except Exception as e:
logger.error(f"**Error** (db_engine player): {e}")
print(f"**Error** (db_engine player): {e}")
finally:
return player
@ -1508,7 +1524,18 @@ class Standings(BaseModel):
with db.atomic():
Standings.bulk_create(create_teams)
# Iterate through each individual result
# Pre-fetch all data needed for in-memory processing (avoids N+1 queries)
standings_by_team_id = {
s.team_id: s
for s in Standings.select().where(Standings.team << s_teams)
}
teams_by_id = {t.id: t for t in Team.select().where(Team.season == season)}
divisions_by_id = {
d.id: d
for d in Division.select().where(Division.season == season)
}
# Iterate through each individual result, tallying wins/losses in memory
# for game in Result.select_season(season).where(Result.week <= 22):
for game in (
StratGame.select()
@ -1519,8 +1546,121 @@ class Standings(BaseModel):
)
.order_by(StratGame.week, StratGame.game_num)
):
# tally win and loss for each standings object
game.update_standings()
away_stan = standings_by_team_id.get(game.away_team_id)
home_stan = standings_by_team_id.get(game.home_team_id)
away_team_obj = teams_by_id.get(game.away_team_id)
home_team_obj = teams_by_id.get(game.home_team_id)
if None in (away_stan, home_stan, away_team_obj, home_team_obj):
continue
away_div = divisions_by_id.get(away_team_obj.division_id)
home_div = divisions_by_id.get(home_team_obj.division_id)
if away_div is None or home_div is None:
continue
# Home Team Won
if game.home_score > game.away_score:
home_stan.wins += 1
home_stan.home_wins += 1
away_stan.losses += 1
away_stan.away_losses += 1
if home_stan.streak_wl == 'w':
home_stan.streak_num += 1
else:
home_stan.streak_wl = 'w'
home_stan.streak_num = 1
if away_stan.streak_wl == 'l':
away_stan.streak_num += 1
else:
away_stan.streak_wl = 'l'
away_stan.streak_num = 1
if game.home_score == game.away_score + 1:
home_stan.one_run_wins += 1
away_stan.one_run_losses += 1
if away_div.division_abbrev == 'TC':
home_stan.div1_wins += 1
elif away_div.division_abbrev == 'ETSOS':
home_stan.div2_wins += 1
elif away_div.division_abbrev == 'APL':
home_stan.div3_wins += 1
elif away_div.division_abbrev == 'BBC':
home_stan.div4_wins += 1
if home_div.division_abbrev == 'TC':
away_stan.div1_losses += 1
elif home_div.division_abbrev == 'ETSOS':
away_stan.div2_losses += 1
elif home_div.division_abbrev == 'APL':
away_stan.div3_losses += 1
elif home_div.division_abbrev == 'BBC':
away_stan.div4_losses += 1
home_stan.run_diff += game.home_score - game.away_score
away_stan.run_diff -= game.home_score - game.away_score
# Away Team Won
else:
home_stan.losses += 1
home_stan.home_losses += 1
away_stan.wins += 1
away_stan.away_wins += 1
if home_stan.streak_wl == 'l':
home_stan.streak_num += 1
else:
home_stan.streak_wl = 'l'
home_stan.streak_num = 1
if away_stan.streak_wl == 'w':
away_stan.streak_num += 1
else:
away_stan.streak_wl = 'w'
away_stan.streak_num = 1
if game.away_score == game.home_score + 1:
home_stan.one_run_losses += 1
away_stan.one_run_wins += 1
if away_div.division_abbrev == 'TC':
home_stan.div1_losses += 1
elif away_div.division_abbrev == 'ETSOS':
home_stan.div2_losses += 1
elif away_div.division_abbrev == 'APL':
home_stan.div3_losses += 1
elif away_div.division_abbrev == 'BBC':
home_stan.div4_losses += 1
if home_div.division_abbrev == 'TC':
away_stan.div1_wins += 1
elif home_div.division_abbrev == 'ETSOS':
away_stan.div2_wins += 1
elif home_div.division_abbrev == 'APL':
away_stan.div3_wins += 1
elif home_div.division_abbrev == 'BBC':
away_stan.div4_wins += 1
home_stan.run_diff -= game.away_score - game.home_score
away_stan.run_diff += game.away_score - game.home_score
# Bulk save all modified standings
with db.atomic():
Standings.bulk_update(
list(standings_by_team_id.values()),
fields=[
Standings.wins, Standings.losses,
Standings.home_wins, Standings.home_losses,
Standings.away_wins, Standings.away_losses,
Standings.one_run_wins, Standings.one_run_losses,
Standings.streak_wl, Standings.streak_num,
Standings.run_diff,
Standings.div1_wins, Standings.div1_losses,
Standings.div2_wins, Standings.div2_losses,
Standings.div3_wins, Standings.div3_losses,
Standings.div4_wins, Standings.div4_losses,
]
)
# Set pythag record and iterate through last 8 games for last8 record
for team in all_teams:
@ -2342,6 +2482,12 @@ class StratGame(BaseModel):
home_stan.save()
away_stan.save()
class Meta:
indexes = (
(("season",), False),
(("season", "week", "game_num"), False),
)
class StratPlay(BaseModel):
game = ForeignKeyField(StratGame)
@ -2416,6 +2562,14 @@ class StratPlay(BaseModel):
re24_primary = FloatField(null=True)
re24_running = FloatField(null=True)
class Meta:
indexes = (
(("game",), False),
(("batter",), False),
(("pitcher",), False),
(("runner",), False),
)
class Decision(BaseModel):
game = ForeignKeyField(StratGame)

View File

@ -14,6 +14,13 @@ from redis import Redis
date = f"{datetime.datetime.now().year}-{datetime.datetime.now().month}-{datetime.datetime.now().day}"
logger = logging.getLogger("discord_app")
# date = f'{datetime.datetime.now().year}-{datetime.datetime.now().month}-{datetime.datetime.now().day}'
# log_level = logger.info if os.environ.get('LOG_LEVEL') == 'INFO' else 'WARN'
# logging.basicConfig(
# filename=f'logs/database/{date}.log',
# format='%(asctime)s - sba-database - %(levelname)s - %(message)s',
# level=log_level
# )
# Discord integration
DISCORD_WEBHOOK_URL = os.environ.get("DISCORD_WEBHOOK_URL")

View File

@ -8,33 +8,10 @@ from fastapi import Depends, FastAPI, Request
from fastapi.openapi.docs import get_swagger_ui_html
from fastapi.openapi.utils import get_openapi
from .routers_v3 import (
current,
players,
results,
schedules,
standings,
teams,
transactions,
battingstats,
pitchingstats,
fieldingstats,
draftpicks,
draftlist,
managers,
awards,
draftdata,
keepers,
stratgame,
stratplay,
injuries,
decisions,
divisions,
sbaplayers,
custom_commands,
help_commands,
views,
)
from .db_engine import db
# from fastapi.openapi.docs import get_swagger_ui_html
# from fastapi.openapi.utils import get_openapi
from .routers_v3 import (
current,
@ -64,7 +41,13 @@ from .routers_v3 import (
views,
)
# date = f'{datetime.datetime.now().year}-{datetime.datetime.now().month}-{datetime.datetime.now().day}'
log_level = logging.INFO if os.environ.get("LOG_LEVEL") == "INFO" else logging.WARNING
# logging.basicConfig(
# filename=f'logs/database/{date}.log',
# format='%(asctime)s - sba-database - %(levelname)s - %(message)s',
# level=log_level
# )
logger = logging.getLogger("discord_app")
logger.setLevel(log_level)
@ -87,6 +70,17 @@ app = FastAPI(
)
@app.middleware("http")
async def db_connection_middleware(request: Request, call_next):
db.connect(reuse_if_open=True)
try:
response = await call_next(request)
finally:
if not db.is_closed():
db.close()
return response
logger.info(f"Starting up now...")
@ -157,3 +151,7 @@ async def get_docs(req: Request):
async def openapi():
return get_openapi(title="SBa API Docs", version=f"0.1.1", routes=app.routes)
# @app.get("/api")
# async def root():
# return {"message": "Hello Bigger Applications!"}

View File

@ -93,17 +93,14 @@ async def get_batstats(
if "post" in s_type.lower():
all_stats = BattingStat.post_season(season)
if all_stats.count() == 0:
return {"count": 0, "stats": []}
elif s_type.lower() in ["combined", "total", "all"]:
all_stats = BattingStat.combined_season(season)
if all_stats.count() == 0:
return {"count": 0, "stats": []}
else:
all_stats = BattingStat.regular_season(season)
if all_stats.count() == 0:
return {"count": 0, "stats": []}
if position is not None:
@ -351,6 +348,11 @@ async def get_totalstats(
return return_stats
# @router.get('/career/{player_name}')
# async def get_careerstats(
# s_type: Literal['regular', 'post', 'total'] = 'regular', player_name: list = Query(default=None)):
# pass # Keep Career Stats table and recalculate after posting stats
@router.patch("/{stat_id}", include_in_schema=PRIVATE_IN_SCHEMA)
@handle_db_errors

View File

@ -273,10 +273,7 @@ async def get_custom_commands(
sql = f"""
SELECT cc.*, creator.discord_id as creator_discord_id,
creator.username as creator_username,
creator.display_name as creator_display_name,
creator.created_at as creator_created_at,
creator.total_commands as creator_total_commands,
creator.active_commands as creator_active_commands
creator.display_name as creator_display_name
FROM custom_commands cc
LEFT JOIN custom_command_creators creator ON cc.creator_id = creator.id
{where_clause}
@ -301,23 +298,24 @@ async def get_custom_commands(
command_dict["tags"] = json.loads(command_dict["tags"])
except Exception:
command_dict["tags"] = []
# Get full creator information
creator_id = command_dict["creator_id"]
creator_cursor = db.execute_sql(
"SELECT * FROM custom_command_creators WHERE id = %s", (creator_id,)
)
creator_result = creator_cursor.fetchone()
# Build creator object from joined data (avoids N+1 queries)
if command_dict.get("creator_discord_id") is not None:
creator_created_at = command_dict.get("creator_created_at")
if creator_created_at and hasattr(creator_created_at, "isoformat"):
creator_created_at = creator_created_at.isoformat()
creator_dict = {
"id": command_dict["creator_id"],
"discord_id": command_dict["creator_discord_id"],
"username": command_dict["creator_username"],
"display_name": command_dict.get("creator_display_name"),
"created_at": creator_created_at,
"total_commands": command_dict.get("creator_total_commands", 0),
"active_commands": command_dict.get(
"creator_active_commands", 0
),
}
if creator_result:
# Create complete creator object
creator_columns = [desc[0] for desc in creator_cursor.description]
creator_dict = dict(zip(creator_columns, creator_result))
# Convert datetime to ISO string
if creator_dict.get("created_at") and hasattr(
creator_dict["created_at"], "isoformat"
):
creator_dict["created_at"] = creator_dict[
"created_at"
].isoformat()
try:
creator_model = CustomCommandCreatorModel(**creator_dict)
command_dict["creator"] = creator_model
@ -327,15 +325,13 @@ async def get_custom_commands(
)
command_dict["creator"] = None
else:
# No creator found, set to None
command_dict["creator"] = None
# Remove joined creator fields before building the command model
# Remove the individual creator fields now that we have the creator object
command_dict.pop("creator_discord_id", None)
command_dict.pop("creator_username", None)
command_dict.pop("creator_display_name", None)
command_dict.pop("creator_created_at", None)
command_dict.pop("creator_total_commands", None)
command_dict.pop("creator_active_commands", None)
# Convert datetime fields to ISO strings
for field in ["created_at", "updated_at", "last_used"]:
@ -1059,4 +1055,3 @@ async def get_custom_command(command_id: int):
except Exception as e:
logger.error(f"Error getting custom command {command_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))

View File

@ -92,6 +92,15 @@ async def get_decisions(
all_dec = all_dec.where(Decision.game_id << game_id)
if player_id is not None:
all_dec = all_dec.where(Decision.pitcher << player_id)
# # Need to allow for split-season stats
# if team_id is not None:
# all_teams = Team.select().where(Team.id << team_id)
# all_games = StratGame.select().where(
# (StratGame.away_team << all_teams) | (StratGame.home_team << all_teams))
# all_dec = all_dec.where(Decision.game << all_games)
# if team_id is not None:
# all_players = Player.select().where(Player.team_id << team_id)
# all_dec = all_dec.where(Decision.pitcher << all_players)
if team_id is not None:
s8_teams = [int(x) for x in team_id if int(x) <= 350]
if season is not None and 8 in season or s8_teams:
@ -106,6 +115,9 @@ async def get_decisions(
if s_type is not None:
all_games = StratGame.select().where(StratGame.season_type == s_type)
all_dec = all_dec.where(Decision.game << all_games)
# if team_id is not None:
# all_players = Player.select().where(Player.team_id << team_id)
# all_dec = all_dec.where(Decision.pitcher << all_players)
if week_start is not None:
all_dec = all_dec.where(Decision.week >= week_start)
if week_end is not None:
@ -155,7 +167,6 @@ async def patch_decision(
this_dec = Decision.get_or_none(Decision.id == decision_id)
if this_dec is None:
raise HTTPException(
status_code=404, detail=f"Decision ID {decision_id} not found"
)
@ -183,7 +194,6 @@ async def patch_decision(
d_result = model_to_dict(this_dec)
return d_result
else:
raise HTTPException(
status_code=500, detail=f"Unable to patch decision {decision_id}"
)
@ -225,7 +235,6 @@ async def delete_decision(decision_id: int, token: str = Depends(oauth2_scheme))
this_dec = Decision.get_or_none(Decision.id == decision_id)
if this_dec is None:
raise HTTPException(
status_code=404, detail=f"Decision ID {decision_id} not found"
)
@ -249,7 +258,6 @@ async def delete_decisions_game(game_id: int, token: str = Depends(oauth2_scheme
this_game = StratGame.get_or_none(StratGame.id == game_id)
if not this_game:
raise HTTPException(status_code=404, detail=f"Game ID {game_id} not found")
count = Decision.delete().where(Decision.game == this_game).execute()

View File

@ -276,5 +276,4 @@ async def get_totalstats(
}
)
return_stats["count"] = len(return_stats["stats"])
return return_stats

View File

@ -4,7 +4,7 @@ Thin HTTP layer using PlayerService for business logic.
"""
from fastapi import APIRouter, Query, Response, Depends
from typing import Optional, List
from typing import Literal, Optional, List
from ..dependencies import (
oauth2_scheme,
@ -27,8 +27,10 @@ async def get_players(
pos: list = Query(default=None),
strat_code: list = Query(default=None),
is_injured: Optional[bool] = None,
sort: Optional[str] = None,
limit: Optional[int] = Query(default=None, ge=1),
sort: Optional[Literal["cost-asc", "cost-desc", "name-asc", "name-desc"]] = None,
limit: Optional[int] = Query(
default=None, ge=1, description="Maximum number of results to return"
),
offset: Optional[int] = Query(
default=None, ge=0, description="Number of results to skip for pagination"
),

View File

@ -17,7 +17,6 @@ from ...dependencies import (
add_cache_headers,
cache_result,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
from .common import build_season_games
@ -58,7 +57,7 @@ async def get_batting_totals(
risp: Optional[bool] = None,
inning: list = Query(default=None),
sort: Optional[str] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=1000),
short_output: Optional[bool] = False,
page_num: Optional[int] = 1,
week_start: Optional[int] = None,

View File

@ -17,7 +17,6 @@ from ...dependencies import (
handle_db_errors,
add_cache_headers,
cache_result,
MAX_LIMIT,
DEFAULT_LIMIT,
)
from .common import build_season_games
@ -57,7 +56,7 @@ async def get_fielding_totals(
team_id: list = Query(default=None),
manager_id: list = Query(default=None),
sort: Optional[str] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=1000),
short_output: Optional[bool] = False,
page_num: Optional[int] = 1,
):

View File

@ -20,7 +20,6 @@ from ...dependencies import (
handle_db_errors,
add_cache_headers,
cache_result,
MAX_LIMIT,
DEFAULT_LIMIT,
)
from .common import build_season_games
@ -57,7 +56,7 @@ async def get_pitching_totals(
risp: Optional[bool] = None,
inning: list = Query(default=None),
sort: Optional[str] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=1000),
short_output: Optional[bool] = False,
csv: Optional[bool] = False,
page_num: Optional[int] = 1,

View File

@ -78,14 +78,14 @@ async def get_transactions(
transactions = transactions.where(Transaction.player << these_players)
if cancelled:
transactions = transactions.where(Transaction.cancelled == 1)
transactions = transactions.where(Transaction.cancelled == True)
else:
transactions = transactions.where(Transaction.cancelled == 0)
transactions = transactions.where(Transaction.cancelled == False)
if frozen:
transactions = transactions.where(Transaction.frozen == 1)
transactions = transactions.where(Transaction.frozen == True)
else:
transactions = transactions.where(Transaction.frozen == 0)
transactions = transactions.where(Transaction.frozen == False)
transactions = transactions.order_by(-Transaction.week, Transaction.moveid)

View File

@ -0,0 +1,24 @@
-- Migration: Add missing indexes on foreign key columns in stratplay and stratgame
-- Created: 2026-03-27
--
-- PostgreSQL does not auto-index foreign key columns. These tables are the
-- highest-volume tables in the schema and are filtered/joined on these columns
-- in batting, pitching, and running stats aggregation and standings recalculation.
-- stratplay: FK join column
CREATE INDEX IF NOT EXISTS idx_stratplay_game_id ON stratplay(game_id);
-- stratplay: filtered in batting stats aggregation
CREATE INDEX IF NOT EXISTS idx_stratplay_batter_id ON stratplay(batter_id);
-- stratplay: filtered in pitching stats aggregation
CREATE INDEX IF NOT EXISTS idx_stratplay_pitcher_id ON stratplay(pitcher_id);
-- stratplay: filtered in running stats
CREATE INDEX IF NOT EXISTS idx_stratplay_runner_id ON stratplay(runner_id);
-- stratgame: heavily filtered by season
CREATE INDEX IF NOT EXISTS idx_stratgame_season ON stratgame(season);
-- stratgame: standings recalculation query ordering
CREATE INDEX IF NOT EXISTS idx_stratgame_season_week_game_num ON stratgame(season, week, game_num);

View File

@ -569,7 +569,7 @@ class TestGroupBySbaPlayer:
# Get per-season rows
r_seasons = requests.get(
f"{api}/api/v3/plays/batting",
params={"group_by": "player", "sbaplayer_id": 1, "limit": 999},
params={"group_by": "player", "sbaplayer_id": 1, "limit": 500},
timeout=15,
)
assert r_seasons.status_code == 200