Compare commits

...

18 Commits

Author SHA1 Message Date
cal
f9e24eb4bc Merge pull request 'fix: update test limit to respect MAX_LIMIT=500 (#110)' (#112) from issue/110-fix-test-batting-sbaplayer-career-totals-returns-4 into main
Reviewed-on: #112
2026-04-08 12:55:55 +00:00
Cal Corum
36b962e5d5 fix: update test limit to respect MAX_LIMIT=500 (#110)
Closes #110

The test was sending limit=999 which exceeds MAX_LIMIT (500), causing
FastAPI to return 422. Changed to limit=500, which is sufficient to
cover all seasons for any player.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-08 12:55:35 +00:00
cal
5d5df325bc Merge pull request 'feat: increase MAX_LIMIT to 1000 for plays batting/fielding/pitching (#111)' (#113) from issue/111-feat-increase-max-limit-to-1000-for-plays-fielding into main
Reviewed-on: #113
2026-04-08 12:53:38 +00:00
Cal Corum
682b990321 feat: increase MAX_LIMIT to 1000 for plays batting/fielding/pitching (#111)
Closes #111

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-08 06:32:46 -05:00
cal
5b19bd486a Merge pull request 'fix: preserve total_count in get_totalstats instead of overwriting with page length (#101)' (#102) from issue/101-fieldingstats-get-totalstats-total-count-overwritt into main 2026-04-08 04:08:40 +00:00
Cal Corum
718abc0096 fix: preserve total_count in get_totalstats instead of overwriting with page length (#101)
Closes #101

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 23:08:10 -05:00
cal
52d88ae950 Merge pull request 'fix: add missing indexes on FK columns in stratplay and stratgame (#74)' (#95) from issue/74-add-missing-indexes-on-foreign-key-columns-in-high into main 2026-04-08 04:06:06 +00:00
Cal Corum
9165419ed0 fix: add missing indexes on FK columns in stratplay and stratgame (#74)
Closes #74

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 23:05:25 -05:00
cal
d23d6520c3 Merge pull request 'fix: batch standings updates to eliminate N+1 queries in recalculate (#75)' (#93) from issue/75-fix-n-1-query-pattern-in-standings-recalculation into main 2026-04-08 04:03:39 +00:00
Cal Corum
c23ca9a721 fix: batch standings updates to eliminate N+1 queries in recalculate (#75)
Replace per-game update_standings() calls with pre-fetched dicts and
in-memory accumulation, then a single bulk_update at the end.
Reduces ~1,100+ queries for a full season to ~5 queries.

Closes #75

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 23:03:11 -05:00
cal
1db06576cc Merge pull request 'fix: replace integer comparisons on boolean fields with True/False (#69)' (#94) from issue/69-boolean-fields-compared-as-integers-sqlite-pattern into main 2026-04-08 03:57:35 +00:00
Cal Corum
7a5327f490 fix: replace integer comparisons on boolean fields with True/False (#69)
Closes #69

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 22:57:01 -05:00
cal
a2889751da Merge pull request 'fix: remove SQLite fallback code from db_engine.py (#70)' (#89) from issue/70-remove-sqlite-fallback-code-from-db-engine-py into main 2026-04-08 03:56:11 +00:00
Cal Corum
eb886a4690 fix: remove SQLite fallback code from db_engine.py (#70)
Removes DATABASE_TYPE conditional entirely. PostgreSQL is now the only
supported backend. Moves PooledPostgresqlDatabase import to top-level
and raises RuntimeError at startup if POSTGRES_PASSWORD is unset,
preventing silent misconnection with misleading errors.

Closes #70

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 22:55:44 -05:00
cal
0ee7367bc0 Merge pull request 'fix: disable autoconnect and set pool timeout on PooledPostgresqlDatabase (#80)' (#87) from issue/80-disable-autoconnect-and-set-pool-timeout-on-pooled into main 2026-04-08 03:55:05 +00:00
Cal Corum
6637f6e9eb fix: disable autoconnect and set pool timeout on PooledPostgresqlDatabase (#80)
- Set timeout=5 so pool exhaustion surfaces as an error instead of hanging forever
- Set autoconnect=False to require explicit connection acquisition
- Add HTTP middleware in main.py to open/close connections per request

Closes #80

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 22:54:27 -05:00
cal
fa176c9b05 Merge pull request 'fix: enforce Literal validation on sort parameter in GET /api/v3/players (#66)' (#68) from ai/major-domo-database-66 into main 2026-04-08 03:54:02 +00:00
Cal Corum
ece25ec22c fix: enforce Literal validation on sort parameter in GET /api/v3/players (#66)
Closes #66

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 22:53:33 -05:00
10 changed files with 217 additions and 53 deletions

View File

@ -8,39 +8,30 @@ from typing import Literal, List, Optional
from pandas import DataFrame
from peewee import *
from peewee import ModelSelect
from playhouse.pool import PooledPostgresqlDatabase
from playhouse.shortcuts import model_to_dict
# Database configuration - supports both SQLite and PostgreSQL
DATABASE_TYPE = os.environ.get("DATABASE_TYPE", "sqlite")
if DATABASE_TYPE.lower() == "postgresql":
from playhouse.pool import PooledPostgresqlDatabase
_postgres_password = os.environ.get("POSTGRES_PASSWORD")
if _postgres_password is None:
raise RuntimeError(
"POSTGRES_PASSWORD environment variable is not set. "
"This variable is required when DATABASE_TYPE=postgresql."
)
db = PooledPostgresqlDatabase(
os.environ.get("POSTGRES_DB", "sba_master"),
user=os.environ.get("POSTGRES_USER", "sba_admin"),
password=_postgres_password,
host=os.environ.get("POSTGRES_HOST", "sba_postgres"),
port=int(os.environ.get("POSTGRES_PORT", "5432")),
max_connections=20,
stale_timeout=300, # 5 minutes
timeout=0,
autoconnect=True,
autorollback=True, # Automatically rollback failed transactions
)
else:
# Default SQLite configuration
db = SqliteDatabase(
"storage/sba_master.db",
pragmas={"journal_mode": "wal", "cache_size": -1 * 64000, "synchronous": 0},
_postgres_password = os.environ.get("POSTGRES_PASSWORD")
if _postgres_password is None:
raise RuntimeError(
"POSTGRES_PASSWORD environment variable is not set. "
"This variable is required when DATABASE_TYPE=postgresql."
)
db = PooledPostgresqlDatabase(
os.environ.get("POSTGRES_DB", "sba_master"),
user=os.environ.get("POSTGRES_USER", "sba_admin"),
password=_postgres_password,
host=os.environ.get("POSTGRES_HOST", "sba_postgres"),
port=int(os.environ.get("POSTGRES_PORT", "5432")),
max_connections=20,
stale_timeout=300, # 5 minutes
timeout=5,
autoconnect=False,
autorollback=True, # Automatically rollback failed transactions
)
date = f"{datetime.datetime.now().year}-{datetime.datetime.now().month}-{datetime.datetime.now().day}"
logger = logging.getLogger("discord_app")
@ -523,12 +514,12 @@ class Team(BaseModel):
all_drops = Transaction.select_season(Current.latest().season).where(
(Transaction.oldteam == self)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
all_adds = Transaction.select_season(Current.latest().season).where(
(Transaction.newteam == self)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
for move in all_drops:
@ -606,12 +597,12 @@ class Team(BaseModel):
all_drops = Transaction.select_season(Current.latest().season).where(
(Transaction.oldteam == sil_team)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
all_adds = Transaction.select_season(Current.latest().season).where(
(Transaction.newteam == sil_team)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
for move in all_drops:
@ -689,12 +680,12 @@ class Team(BaseModel):
all_drops = Transaction.select_season(Current.latest().season).where(
(Transaction.oldteam == lil_team)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
all_adds = Transaction.select_season(Current.latest().season).where(
(Transaction.newteam == lil_team)
& (Transaction.week == current.week + 1)
& (Transaction.cancelled == 0)
& (Transaction.cancelled == False)
)
for move in all_drops:
@ -1533,7 +1524,18 @@ class Standings(BaseModel):
with db.atomic():
Standings.bulk_create(create_teams)
# Iterate through each individual result
# Pre-fetch all data needed for in-memory processing (avoids N+1 queries)
standings_by_team_id = {
s.team_id: s
for s in Standings.select().where(Standings.team << s_teams)
}
teams_by_id = {t.id: t for t in Team.select().where(Team.season == season)}
divisions_by_id = {
d.id: d
for d in Division.select().where(Division.season == season)
}
# Iterate through each individual result, tallying wins/losses in memory
# for game in Result.select_season(season).where(Result.week <= 22):
for game in (
StratGame.select()
@ -1544,8 +1546,121 @@ class Standings(BaseModel):
)
.order_by(StratGame.week, StratGame.game_num)
):
# tally win and loss for each standings object
game.update_standings()
away_stan = standings_by_team_id.get(game.away_team_id)
home_stan = standings_by_team_id.get(game.home_team_id)
away_team_obj = teams_by_id.get(game.away_team_id)
home_team_obj = teams_by_id.get(game.home_team_id)
if None in (away_stan, home_stan, away_team_obj, home_team_obj):
continue
away_div = divisions_by_id.get(away_team_obj.division_id)
home_div = divisions_by_id.get(home_team_obj.division_id)
if away_div is None or home_div is None:
continue
# Home Team Won
if game.home_score > game.away_score:
home_stan.wins += 1
home_stan.home_wins += 1
away_stan.losses += 1
away_stan.away_losses += 1
if home_stan.streak_wl == 'w':
home_stan.streak_num += 1
else:
home_stan.streak_wl = 'w'
home_stan.streak_num = 1
if away_stan.streak_wl == 'l':
away_stan.streak_num += 1
else:
away_stan.streak_wl = 'l'
away_stan.streak_num = 1
if game.home_score == game.away_score + 1:
home_stan.one_run_wins += 1
away_stan.one_run_losses += 1
if away_div.division_abbrev == 'TC':
home_stan.div1_wins += 1
elif away_div.division_abbrev == 'ETSOS':
home_stan.div2_wins += 1
elif away_div.division_abbrev == 'APL':
home_stan.div3_wins += 1
elif away_div.division_abbrev == 'BBC':
home_stan.div4_wins += 1
if home_div.division_abbrev == 'TC':
away_stan.div1_losses += 1
elif home_div.division_abbrev == 'ETSOS':
away_stan.div2_losses += 1
elif home_div.division_abbrev == 'APL':
away_stan.div3_losses += 1
elif home_div.division_abbrev == 'BBC':
away_stan.div4_losses += 1
home_stan.run_diff += game.home_score - game.away_score
away_stan.run_diff -= game.home_score - game.away_score
# Away Team Won
else:
home_stan.losses += 1
home_stan.home_losses += 1
away_stan.wins += 1
away_stan.away_wins += 1
if home_stan.streak_wl == 'l':
home_stan.streak_num += 1
else:
home_stan.streak_wl = 'l'
home_stan.streak_num = 1
if away_stan.streak_wl == 'w':
away_stan.streak_num += 1
else:
away_stan.streak_wl = 'w'
away_stan.streak_num = 1
if game.away_score == game.home_score + 1:
home_stan.one_run_losses += 1
away_stan.one_run_wins += 1
if away_div.division_abbrev == 'TC':
home_stan.div1_losses += 1
elif away_div.division_abbrev == 'ETSOS':
home_stan.div2_losses += 1
elif away_div.division_abbrev == 'APL':
home_stan.div3_losses += 1
elif away_div.division_abbrev == 'BBC':
home_stan.div4_losses += 1
if home_div.division_abbrev == 'TC':
away_stan.div1_wins += 1
elif home_div.division_abbrev == 'ETSOS':
away_stan.div2_wins += 1
elif home_div.division_abbrev == 'APL':
away_stan.div3_wins += 1
elif home_div.division_abbrev == 'BBC':
away_stan.div4_wins += 1
home_stan.run_diff -= game.away_score - game.home_score
away_stan.run_diff += game.away_score - game.home_score
# Bulk save all modified standings
with db.atomic():
Standings.bulk_update(
list(standings_by_team_id.values()),
fields=[
Standings.wins, Standings.losses,
Standings.home_wins, Standings.home_losses,
Standings.away_wins, Standings.away_losses,
Standings.one_run_wins, Standings.one_run_losses,
Standings.streak_wl, Standings.streak_num,
Standings.run_diff,
Standings.div1_wins, Standings.div1_losses,
Standings.div2_wins, Standings.div2_losses,
Standings.div3_wins, Standings.div3_losses,
Standings.div4_wins, Standings.div4_losses,
]
)
# Set pythag record and iterate through last 8 games for last8 record
for team in all_teams:
@ -2367,6 +2482,12 @@ class StratGame(BaseModel):
home_stan.save()
away_stan.save()
class Meta:
indexes = (
(("season",), False),
(("season", "week", "game_num"), False),
)
class StratPlay(BaseModel):
game = ForeignKeyField(StratGame)
@ -2441,6 +2562,14 @@ class StratPlay(BaseModel):
re24_primary = FloatField(null=True)
re24_running = FloatField(null=True)
class Meta:
indexes = (
(("game",), False),
(("batter",), False),
(("pitcher",), False),
(("runner",), False),
)
class Decision(BaseModel):
game = ForeignKeyField(StratGame)

View File

@ -8,6 +8,8 @@ from fastapi import Depends, FastAPI, Request
from fastapi.openapi.docs import get_swagger_ui_html
from fastapi.openapi.utils import get_openapi
from .db_engine import db
# from fastapi.openapi.docs import get_swagger_ui_html
# from fastapi.openapi.utils import get_openapi
@ -68,6 +70,17 @@ app = FastAPI(
)
@app.middleware("http")
async def db_connection_middleware(request: Request, call_next):
db.connect(reuse_if_open=True)
try:
response = await call_next(request)
finally:
if not db.is_closed():
db.close()
return response
logger.info(f"Starting up now...")

View File

@ -276,5 +276,4 @@ async def get_totalstats(
}
)
return_stats["count"] = len(return_stats["stats"])
return return_stats

View File

@ -4,7 +4,7 @@ Thin HTTP layer using PlayerService for business logic.
"""
from fastapi import APIRouter, Query, Response, Depends
from typing import Optional, List
from typing import Literal, Optional, List
from ..dependencies import (
oauth2_scheme,
@ -27,8 +27,10 @@ async def get_players(
pos: list = Query(default=None),
strat_code: list = Query(default=None),
is_injured: Optional[bool] = None,
sort: Optional[str] = None,
limit: Optional[int] = Query(default=None, ge=1),
sort: Optional[Literal["cost-asc", "cost-desc", "name-asc", "name-desc"]] = None,
limit: Optional[int] = Query(
default=None, ge=1, description="Maximum number of results to return"
),
offset: Optional[int] = Query(
default=None, ge=0, description="Number of results to skip for pagination"
),

View File

@ -17,7 +17,6 @@ from ...dependencies import (
add_cache_headers,
cache_result,
handle_db_errors,
MAX_LIMIT,
DEFAULT_LIMIT,
)
from .common import build_season_games
@ -58,7 +57,7 @@ async def get_batting_totals(
risp: Optional[bool] = None,
inning: list = Query(default=None),
sort: Optional[str] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=1000),
short_output: Optional[bool] = False,
page_num: Optional[int] = 1,
week_start: Optional[int] = None,

View File

@ -17,7 +17,6 @@ from ...dependencies import (
handle_db_errors,
add_cache_headers,
cache_result,
MAX_LIMIT,
DEFAULT_LIMIT,
)
from .common import build_season_games
@ -57,7 +56,7 @@ async def get_fielding_totals(
team_id: list = Query(default=None),
manager_id: list = Query(default=None),
sort: Optional[str] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=1000),
short_output: Optional[bool] = False,
page_num: Optional[int] = 1,
):

View File

@ -20,7 +20,6 @@ from ...dependencies import (
handle_db_errors,
add_cache_headers,
cache_result,
MAX_LIMIT,
DEFAULT_LIMIT,
)
from .common import build_season_games
@ -57,7 +56,7 @@ async def get_pitching_totals(
risp: Optional[bool] = None,
inning: list = Query(default=None),
sort: Optional[str] = None,
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=1000),
short_output: Optional[bool] = False,
csv: Optional[bool] = False,
page_num: Optional[int] = 1,

View File

@ -78,14 +78,14 @@ async def get_transactions(
transactions = transactions.where(Transaction.player << these_players)
if cancelled:
transactions = transactions.where(Transaction.cancelled == 1)
transactions = transactions.where(Transaction.cancelled == True)
else:
transactions = transactions.where(Transaction.cancelled == 0)
transactions = transactions.where(Transaction.cancelled == False)
if frozen:
transactions = transactions.where(Transaction.frozen == 1)
transactions = transactions.where(Transaction.frozen == True)
else:
transactions = transactions.where(Transaction.frozen == 0)
transactions = transactions.where(Transaction.frozen == False)
transactions = transactions.order_by(-Transaction.week, Transaction.moveid)

View File

@ -0,0 +1,24 @@
-- Migration: Add missing indexes on foreign key columns in stratplay and stratgame
-- Created: 2026-03-27
--
-- PostgreSQL does not auto-index foreign key columns. These tables are the
-- highest-volume tables in the schema and are filtered/joined on these columns
-- in batting, pitching, and running stats aggregation and standings recalculation.
-- stratplay: FK join column
CREATE INDEX IF NOT EXISTS idx_stratplay_game_id ON stratplay(game_id);
-- stratplay: filtered in batting stats aggregation
CREATE INDEX IF NOT EXISTS idx_stratplay_batter_id ON stratplay(batter_id);
-- stratplay: filtered in pitching stats aggregation
CREATE INDEX IF NOT EXISTS idx_stratplay_pitcher_id ON stratplay(pitcher_id);
-- stratplay: filtered in running stats
CREATE INDEX IF NOT EXISTS idx_stratplay_runner_id ON stratplay(runner_id);
-- stratgame: heavily filtered by season
CREATE INDEX IF NOT EXISTS idx_stratgame_season ON stratgame(season);
-- stratgame: standings recalculation query ordering
CREATE INDEX IF NOT EXISTS idx_stratgame_season_week_game_num ON stratgame(season, week, game_num);

View File

@ -569,7 +569,7 @@ class TestGroupBySbaPlayer:
# Get per-season rows
r_seasons = requests.get(
f"{api}/api/v3/plays/batting",
params={"group_by": "player", "sbaplayer_id": 1, "limit": 999},
params={"group_by": "player", "sbaplayer_id": 1, "limit": 500},
timeout=15,
)
assert r_seasons.status_code == 200