Compare commits
38 Commits
next-relea
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| ca15dfe380 | |||
| 1575be8260 | |||
| 7c7405cd1d | |||
| 0cc0cba6a9 | |||
| 41fe4f6ce2 | |||
| 14234385fe | |||
| 07aeaa8f3e | |||
|
|
701f790868 | ||
|
|
b46d8d33ef | ||
|
|
cfa6da06b7 | ||
| 40897f1cc8 | |||
| 12a76c2bb5 | |||
| aac4bf50d5 | |||
|
|
4ad445b0da | ||
| 8d9bbdd7a0 | |||
| c95459fa5d | |||
| d809590f0e | |||
| 0d8e666a75 | |||
|
|
bd19b7d913 | ||
|
|
c49f91cc19 | ||
|
|
215085b326 | ||
| c063f5c4ef | |||
|
|
d92f571960 | ||
| 81baa54681 | |||
|
|
67e87a893a | ||
|
|
16f3f8d8de | ||
|
|
b35b68a88f | ||
| a1fa54c416 | |||
|
|
eccf4d1441 | ||
|
|
d8c6ce2a5e | ||
|
|
665f275546 | ||
|
|
75a8fc8505 | ||
|
|
dcaf184ad3 | ||
|
|
1bcde424c6 | ||
|
|
3be4f71e22 | ||
|
|
c451e02c52 | ||
|
|
a21bb2a380 | ||
| da679b6d1a |
3
.env
3
.env
@ -6,6 +6,9 @@ SBA_DB_USER_PASSWORD=your_production_password
|
||||
# SBa API
|
||||
API_TOKEN=Tp3aO3jhYve5NJF1IqOmJTmk
|
||||
|
||||
# Integrations
|
||||
DISCORD_WEBHOOK_URL=
|
||||
|
||||
# Universal
|
||||
TZ=America/Chicago
|
||||
LOG_LEVEL=INFO
|
||||
@ -1,20 +1,18 @@
|
||||
# Gitea Actions: Docker Build, Push, and Notify
|
||||
#
|
||||
# CI/CD pipeline for Major Domo Database API:
|
||||
# - Builds Docker images on every push/PR
|
||||
# - Auto-generates CalVer version (YYYY.MM.BUILD) on main branch merges
|
||||
# - Pushes to Docker Hub and creates git tag on main
|
||||
# - Triggered by pushing a CalVer tag (e.g., 2026.4.5)
|
||||
# - Builds Docker image and pushes to Docker Hub with version + latest tags
|
||||
# - Sends Discord notifications on success/failure
|
||||
#
|
||||
# To release: git tag -a 2026.4.5 -m "description" && git push origin 2026.4.5
|
||||
|
||||
name: Build Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- '20*' # matches CalVer tags like 2026.4.5
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@ -24,7 +22,16 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: https://github.com/actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Full history for tag counting
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version from tag
|
||||
id: version
|
||||
run: |
|
||||
VERSION=${GITHUB_REF#refs/tags/}
|
||||
SHA_SHORT=$(git rev-parse --short HEAD)
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "sha_short=$SHA_SHORT" >> $GITHUB_OUTPUT
|
||||
echo "timestamp=$(date -u +%Y-%m-%dT%H:%M:%SZ)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: https://github.com/docker/setup-buildx-action@v3
|
||||
@ -35,80 +42,47 @@ jobs:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Generate CalVer version
|
||||
id: calver
|
||||
uses: cal/gitea-actions/calver@main
|
||||
|
||||
# Dev build: push with dev + dev-SHA tags (PR/feature branches)
|
||||
- name: Build Docker image (dev)
|
||||
if: github.ref != 'refs/heads/main'
|
||||
uses: https://github.com/docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: |
|
||||
manticorum67/major-domo-database:dev
|
||||
manticorum67/major-domo-database:dev-${{ steps.calver.outputs.sha_short }}
|
||||
cache-from: type=registry,ref=manticorum67/major-domo-database:buildcache
|
||||
cache-to: type=registry,ref=manticorum67/major-domo-database:buildcache,mode=max
|
||||
|
||||
# Production build: push with latest + CalVer tags (main only)
|
||||
- name: Build Docker image (production)
|
||||
if: github.ref == 'refs/heads/main'
|
||||
- name: Build and push Docker image
|
||||
uses: https://github.com/docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: |
|
||||
manticorum67/major-domo-database:${{ steps.version.outputs.version }}
|
||||
manticorum67/major-domo-database:latest
|
||||
manticorum67/major-domo-database:${{ steps.calver.outputs.version }}
|
||||
manticorum67/major-domo-database:${{ steps.calver.outputs.version_sha }}
|
||||
cache-from: type=registry,ref=manticorum67/major-domo-database:buildcache
|
||||
cache-to: type=registry,ref=manticorum67/major-domo-database:buildcache,mode=max
|
||||
|
||||
- name: Tag release
|
||||
if: success() && github.ref == 'refs/heads/main'
|
||||
uses: cal/gitea-actions/gitea-tag@main
|
||||
with:
|
||||
version: ${{ steps.calver.outputs.version }}
|
||||
token: ${{ github.token }}
|
||||
|
||||
- name: Build Summary
|
||||
run: |
|
||||
echo "## Docker Build Successful" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Version:** \`${{ steps.version.outputs.version }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Image Tags:**" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- \`manticorum67/major-domo-database:${{ steps.version.outputs.version }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- \`manticorum67/major-domo-database:latest\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- \`manticorum67/major-domo-database:${{ steps.calver.outputs.version }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- \`manticorum67/major-domo-database:${{ steps.calver.outputs.version_sha }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Build Details:**" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Branch: \`${{ steps.calver.outputs.branch }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Commit: \`${{ github.sha }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Timestamp: \`${{ steps.calver.outputs.timestamp }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Commit: \`${{ steps.version.outputs.sha_short }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Timestamp: \`${{ steps.version.outputs.timestamp }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
if [ "${{ github.ref }}" == "refs/heads/main" ]; then
|
||||
echo "Pushed to Docker Hub!" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Pull with: \`docker pull manticorum67/major-domo-database:latest\`" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "_PR build - image not pushed to Docker Hub_" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
echo "Pull with: \`docker pull manticorum67/major-domo-database:${{ steps.version.outputs.version }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Discord Notification - Success
|
||||
if: success() && github.ref == 'refs/heads/main'
|
||||
if: success()
|
||||
uses: cal/gitea-actions/discord-notify@main
|
||||
with:
|
||||
webhook_url: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
title: "Major Domo Database"
|
||||
status: success
|
||||
version: ${{ steps.calver.outputs.version }}
|
||||
image_tag: ${{ steps.calver.outputs.version_sha }}
|
||||
commit_sha: ${{ steps.calver.outputs.sha_short }}
|
||||
timestamp: ${{ steps.calver.outputs.timestamp }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
image_tag: ${{ steps.version.outputs.version }}
|
||||
commit_sha: ${{ steps.version.outputs.sha_short }}
|
||||
timestamp: ${{ steps.version.outputs.timestamp }}
|
||||
|
||||
- name: Discord Notification - Failure
|
||||
if: failure() && github.ref == 'refs/heads/main'
|
||||
if: failure()
|
||||
uses: cal/gitea-actions/discord-notify@main
|
||||
with:
|
||||
webhook_url: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -55,7 +55,6 @@ Include/
|
||||
pyvenv.cfg
|
||||
db_engine.py
|
||||
main.py
|
||||
migrations.py
|
||||
db_engine.py
|
||||
sba_master.db
|
||||
db_engine.py
|
||||
|
||||
@ -40,7 +40,7 @@ python migrations.py # Run migrations (SQL files in migrat
|
||||
- **Bot container**: `dev_sba_postgres` (PostgreSQL) + `dev_sba_db_api` (API) — check with `docker ps`
|
||||
- **Image**: `manticorum67/major-domo-database:dev` (Docker Hub)
|
||||
|
||||
- **CI/CD**: Gitea Actions on PR to `main` — builds Docker image, auto-generates CalVer version (`YYYY.MM.BUILD`) on merge
|
||||
- **CI/CD**: Gitea Actions — tag-triggered Docker builds. Push a CalVer tag to release: `git tag -a 2026.4.5 -m "description" && git push origin 2026.4.5`
|
||||
|
||||
## Important
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
# Use specific version for reproducible builds
|
||||
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.11
|
||||
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.12
|
||||
|
||||
# Set Python optimizations
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
1296
app/db_engine.py
1296
app/db_engine.py
File diff suppressed because it is too large
Load Diff
@ -22,6 +22,9 @@ logger = logging.getLogger("discord_app")
|
||||
# level=log_level
|
||||
# )
|
||||
|
||||
# Discord integration
|
||||
DISCORD_WEBHOOK_URL = os.environ.get("DISCORD_WEBHOOK_URL")
|
||||
|
||||
# Redis configuration
|
||||
REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
|
||||
REDIS_PORT = int(os.environ.get("REDIS_PORT", "6379"))
|
||||
@ -57,6 +60,9 @@ priv_help = (
|
||||
)
|
||||
PRIVATE_IN_SCHEMA = True if priv_help == "TRUE" else False
|
||||
|
||||
MAX_LIMIT = 500
|
||||
DEFAULT_LIMIT = 200
|
||||
|
||||
|
||||
def valid_token(token):
|
||||
return token == os.environ.get("API_TOKEN")
|
||||
@ -376,14 +382,14 @@ def update_season_pitching_stats(player_ids, season, db_connection):
|
||||
|
||||
-- RBI allowed (excluding HR) per runner opportunity
|
||||
CASE
|
||||
WHEN (SUM(CASE WHEN sp.on_first IS NOT NULL THEN 1 ELSE 0 END) +
|
||||
SUM(CASE WHEN sp.on_second IS NOT NULL THEN 1 ELSE 0 END) +
|
||||
SUM(CASE WHEN sp.on_third IS NOT NULL THEN 1 ELSE 0 END)) > 0
|
||||
WHEN (SUM(CASE WHEN sp.on_first_id IS NOT NULL THEN 1 ELSE 0 END) +
|
||||
SUM(CASE WHEN sp.on_second_id IS NOT NULL THEN 1 ELSE 0 END) +
|
||||
SUM(CASE WHEN sp.on_third_id IS NOT NULL THEN 1 ELSE 0 END)) > 0
|
||||
THEN ROUND(
|
||||
(SUM(sp.rbi) - SUM(sp.homerun))::DECIMAL /
|
||||
(SUM(CASE WHEN sp.on_first IS NOT NULL THEN 1 ELSE 0 END) +
|
||||
SUM(CASE WHEN sp.on_second IS NOT NULL THEN 1 ELSE 0 END) +
|
||||
SUM(CASE WHEN sp.on_third IS NOT NULL THEN 1 ELSE 0 END)),
|
||||
(SUM(CASE WHEN sp.on_first_id IS NOT NULL THEN 1 ELSE 0 END) +
|
||||
SUM(CASE WHEN sp.on_second_id IS NOT NULL THEN 1 ELSE 0 END) +
|
||||
SUM(CASE WHEN sp.on_third_id IS NOT NULL THEN 1 ELSE 0 END)),
|
||||
3
|
||||
)
|
||||
ELSE 0.000
|
||||
@ -513,7 +519,12 @@ def send_webhook_message(message: str) -> bool:
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
webhook_url = "https://discord.com/api/webhooks/1408811717424840876/7RXG_D5IqovA3Jwa9YOobUjVcVMuLc6cQyezABcWuXaHo5Fvz1en10M7J43o3OJ3bzGW"
|
||||
webhook_url = DISCORD_WEBHOOK_URL
|
||||
if not webhook_url:
|
||||
logger.warning(
|
||||
"DISCORD_WEBHOOK_URL env var is not set — skipping webhook message"
|
||||
)
|
||||
return False
|
||||
|
||||
try:
|
||||
payload = {"content": message}
|
||||
@ -804,6 +815,10 @@ def handle_db_errors(func):
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
# Let intentional HTTP errors (401, 404, etc.) pass through unchanged
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
elapsed_time = time.time() - start_time
|
||||
error_trace = traceback.format_exc()
|
||||
|
||||
27
app/main.py
27
app/main.py
@ -2,6 +2,7 @@ import datetime
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import os
|
||||
from urllib.parse import parse_qsl, urlencode
|
||||
|
||||
from fastapi import Depends, FastAPI, Request
|
||||
from fastapi.openapi.docs import get_swagger_ui_html
|
||||
@ -70,6 +71,32 @@ app = FastAPI(
|
||||
logger.info(f"Starting up now...")
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def db_connection_middleware(request: Request, call_next):
|
||||
from .db_engine import db
|
||||
|
||||
db.connect(reuse_if_open=True)
|
||||
try:
|
||||
response = await call_next(request)
|
||||
return response
|
||||
finally:
|
||||
if not db.is_closed():
|
||||
db.close()
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def strip_empty_query_params(request: Request, call_next):
|
||||
qs = request.scope.get("query_string", b"")
|
||||
if qs:
|
||||
pairs = parse_qsl(qs.decode(), keep_blank_values=True)
|
||||
filtered = [(k, v) for k, v in pairs if v != ""]
|
||||
new_qs = urlencode(filtered).encode()
|
||||
request.scope["query_string"] = new_qs
|
||||
if hasattr(request, "_query_params"):
|
||||
del request._query_params
|
||||
return await call_next(request)
|
||||
|
||||
|
||||
app.include_router(current.router)
|
||||
app.include_router(players.router)
|
||||
app.include_router(results.router)
|
||||
|
||||
@ -9,6 +9,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -43,6 +45,8 @@ async def get_awards(
|
||||
team_id: list = Query(default=None),
|
||||
short_output: Optional[bool] = False,
|
||||
player_name: list = Query(default=None),
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
all_awards = Award.select()
|
||||
|
||||
@ -67,11 +71,13 @@ async def get_awards(
|
||||
all_players = Player.select().where(fn.Lower(Player.name) << pname_list)
|
||||
all_awards = all_awards.where(Award.player << all_players)
|
||||
|
||||
total_count = all_awards.count()
|
||||
all_awards = all_awards.offset(offset).limit(limit)
|
||||
|
||||
return_awards = {
|
||||
"count": all_awards.count(),
|
||||
"count": total_count,
|
||||
"awards": [model_to_dict(x, recurse=not short_output) for x in all_awards],
|
||||
}
|
||||
db.close()
|
||||
return return_awards
|
||||
|
||||
|
||||
@ -80,10 +86,8 @@ async def get_awards(
|
||||
async def get_one_award(award_id: int, short_output: Optional[bool] = False):
|
||||
this_award = Award.get_or_none(Award.id == award_id)
|
||||
if this_award is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Award ID {award_id} not found")
|
||||
|
||||
db.close()
|
||||
return model_to_dict(this_award, recurse=not short_output)
|
||||
|
||||
|
||||
@ -107,7 +111,6 @@ async def patch_award(
|
||||
|
||||
this_award = Award.get_or_none(Award.id == award_id)
|
||||
if this_award is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Award ID {award_id} not found")
|
||||
|
||||
if name is not None:
|
||||
@ -129,10 +132,8 @@ async def patch_award(
|
||||
|
||||
if this_award.save() == 1:
|
||||
r_award = model_to_dict(this_award)
|
||||
db.close()
|
||||
return r_award
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(status_code=500, detail=f"Unable to patch award {award_id}")
|
||||
|
||||
|
||||
@ -171,12 +172,11 @@ async def post_award(award_list: AwardList, token: str = Depends(oauth2_scheme))
|
||||
status_code=404, detail=f"Team ID {x.team_id} not found"
|
||||
)
|
||||
|
||||
new_awards.append(x.dict())
|
||||
new_awards.append(x.model_dump())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(new_awards, 15):
|
||||
Award.insert_many(batch).on_conflict_ignore().execute()
|
||||
db.close()
|
||||
|
||||
return f"Inserted {len(new_awards)} awards"
|
||||
|
||||
@ -190,11 +190,9 @@ async def delete_award(award_id: int, token: str = Depends(oauth2_scheme)):
|
||||
|
||||
this_award = Award.get_or_none(Award.id == award_id)
|
||||
if this_award is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Award ID {award_id} not found")
|
||||
|
||||
count = this_award.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Award {award_id} has been deleted"
|
||||
|
||||
@ -19,6 +19,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -84,24 +86,21 @@ async def get_batstats(
|
||||
week_end: Optional[int] = None,
|
||||
game_num: list = Query(default=None),
|
||||
position: list = Query(default=None),
|
||||
limit: Optional[int] = None,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
sort: Optional[str] = None,
|
||||
short_output: Optional[bool] = True,
|
||||
):
|
||||
if "post" in s_type.lower():
|
||||
all_stats = BattingStat.post_season(season)
|
||||
if all_stats.count() == 0:
|
||||
db.close()
|
||||
return {"count": 0, "stats": []}
|
||||
elif s_type.lower() in ["combined", "total", "all"]:
|
||||
all_stats = BattingStat.combined_season(season)
|
||||
if all_stats.count() == 0:
|
||||
db.close()
|
||||
return {"count": 0, "stats": []}
|
||||
else:
|
||||
all_stats = BattingStat.regular_season(season)
|
||||
if all_stats.count() == 0:
|
||||
db.close()
|
||||
return {"count": 0, "stats": []}
|
||||
|
||||
if position is not None:
|
||||
@ -127,15 +126,13 @@ async def get_batstats(
|
||||
if week_end is not None:
|
||||
end = min(week_end, end)
|
||||
if start > end:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Start week {start} is after end week {end} - cannot pull stats",
|
||||
)
|
||||
all_stats = all_stats.where((BattingStat.week >= start) & (BattingStat.week <= end))
|
||||
|
||||
if limit:
|
||||
all_stats = all_stats.limit(limit)
|
||||
all_stats = all_stats.limit(limit)
|
||||
if sort:
|
||||
if sort == "newest":
|
||||
all_stats = all_stats.order_by(-BattingStat.week, -BattingStat.game)
|
||||
@ -146,7 +143,6 @@ async def get_batstats(
|
||||
# 'stats': [{'id': x.id} for x in all_stats]
|
||||
}
|
||||
|
||||
db.close()
|
||||
return return_stats
|
||||
|
||||
|
||||
@ -168,6 +164,8 @@ async def get_totalstats(
|
||||
short_output: Optional[bool] = False,
|
||||
min_pa: Optional[int] = 1,
|
||||
week: list = Query(default=None),
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
if sum(1 for x in [s_type, (week_start or week_end), week] if x is not None) > 1:
|
||||
raise HTTPException(
|
||||
@ -301,7 +299,10 @@ async def get_totalstats(
|
||||
all_players = Player.select().where(Player.id << player_id)
|
||||
all_stats = all_stats.where(BattingStat.player << all_players)
|
||||
|
||||
return_stats = {"count": all_stats.count(), "stats": []}
|
||||
total_count = all_stats.count()
|
||||
all_stats = all_stats.offset(offset).limit(limit)
|
||||
|
||||
return_stats = {"count": total_count, "stats": []}
|
||||
|
||||
for x in all_stats:
|
||||
# Handle player field based on grouping with safe access
|
||||
@ -344,7 +345,6 @@ async def get_totalstats(
|
||||
"bplo": x.sum_bplo,
|
||||
}
|
||||
)
|
||||
db.close()
|
||||
return return_stats
|
||||
|
||||
|
||||
@ -366,9 +366,10 @@ async def patch_batstats(
|
||||
if BattingStat.get_or_none(BattingStat.id == stat_id) is None:
|
||||
raise HTTPException(status_code=404, detail=f"Stat ID {stat_id} not found")
|
||||
|
||||
BattingStat.update(**new_stats.dict()).where(BattingStat.id == stat_id).execute()
|
||||
BattingStat.update(**new_stats.model_dump()).where(
|
||||
BattingStat.id == stat_id
|
||||
).execute()
|
||||
r_stat = model_to_dict(BattingStat.get_by_id(stat_id))
|
||||
db.close()
|
||||
return r_stat
|
||||
|
||||
|
||||
@ -404,7 +405,7 @@ async def post_batstats(s_list: BatStatList, token: str = Depends(oauth2_scheme)
|
||||
status_code=404, detail=f"Player ID {x.player_id} not found"
|
||||
)
|
||||
|
||||
all_stats.append(BattingStat(**x.dict()))
|
||||
all_stats.append(BattingStat(**x.model_dump()))
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(all_stats, 15):
|
||||
@ -412,5 +413,4 @@ async def post_batstats(s_list: BatStatList, token: str = Depends(oauth2_scheme)
|
||||
|
||||
# Update career stats
|
||||
|
||||
db.close()
|
||||
return f"Added {len(all_stats)} batting lines"
|
||||
|
||||
@ -41,7 +41,6 @@ async def get_current(season: Optional[int] = None):
|
||||
|
||||
if current is not None:
|
||||
r_curr = model_to_dict(current)
|
||||
db.close()
|
||||
return r_curr
|
||||
else:
|
||||
return None
|
||||
@ -100,10 +99,8 @@ async def patch_current(
|
||||
|
||||
if current.save():
|
||||
r_curr = model_to_dict(current)
|
||||
db.close()
|
||||
return r_curr
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Unable to patch current {current_id}"
|
||||
)
|
||||
@ -116,14 +113,12 @@ async def post_current(new_current: CurrentModel, token: str = Depends(oauth2_sc
|
||||
logger.warning(f"patch_current - Bad Token: {token}")
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
this_current = Current(**new_current.dict())
|
||||
this_current = Current(**new_current.model_dump())
|
||||
|
||||
if this_current.save():
|
||||
r_curr = model_to_dict(this_current)
|
||||
db.close()
|
||||
return r_curr
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Unable to post season {new_current.season} current",
|
||||
|
||||
@ -363,8 +363,6 @@ async def get_custom_commands(
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting custom commands: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
# Move this route to after the specific string routes
|
||||
@ -429,8 +427,6 @@ async def create_custom_command_endpoint(
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating custom command: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.put("/{command_id}", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@ -490,8 +486,6 @@ async def update_custom_command_endpoint(
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating custom command {command_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.patch("/{command_id}", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@ -575,8 +569,6 @@ async def patch_custom_command(
|
||||
except Exception as e:
|
||||
logger.error(f"Error patching custom command {command_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.delete("/{command_id}", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@ -612,8 +604,6 @@ async def delete_custom_command_endpoint(
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting custom command {command_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
# Creator endpoints
|
||||
@ -683,8 +673,6 @@ async def get_creators(
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting creators: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.post("/creators", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@ -728,8 +716,6 @@ async def create_creator_endpoint(
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating creator: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
@ -854,8 +840,6 @@ async def get_custom_command_stats():
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting custom command stats: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
# Special endpoints for Discord bot integration
|
||||
@ -921,8 +905,6 @@ async def get_custom_command_by_name_endpoint(command_name: str):
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting custom command by name '{command_name}': {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.patch("/by_name/{command_name}/execute", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@ -990,8 +972,6 @@ async def execute_custom_command(
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing custom command '{command_name}': {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.get("/autocomplete")
|
||||
@ -1027,8 +1007,6 @@ async def get_command_names_for_autocomplete(
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting command names for autocomplete: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.get("/{command_id}")
|
||||
@ -1077,5 +1055,3 @@ async def get_custom_command(command_id: int):
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting custom command {command_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
@ -19,6 +19,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -73,7 +75,7 @@ async def get_decisions(
|
||||
irunners_scored: list = Query(default=None),
|
||||
game_id: list = Query(default=None),
|
||||
player_id: list = Query(default=None),
|
||||
limit: Optional[int] = None,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
short_output: Optional[bool] = False,
|
||||
):
|
||||
all_dec = Decision.select().order_by(
|
||||
@ -135,16 +137,12 @@ async def get_decisions(
|
||||
if irunners_scored is not None:
|
||||
all_dec = all_dec.where(Decision.irunners_scored << irunners_scored)
|
||||
|
||||
if limit is not None:
|
||||
if limit < 1:
|
||||
limit = 1
|
||||
all_dec = all_dec.limit(limit)
|
||||
all_dec = all_dec.limit(limit)
|
||||
|
||||
return_dec = {
|
||||
"count": all_dec.count(),
|
||||
"decisions": [model_to_dict(x, recurse=not short_output) for x in all_dec],
|
||||
}
|
||||
db.close()
|
||||
return return_dec
|
||||
|
||||
|
||||
@ -169,7 +167,6 @@ async def patch_decision(
|
||||
|
||||
this_dec = Decision.get_or_none(Decision.id == decision_id)
|
||||
if this_dec is None:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Decision ID {decision_id} not found"
|
||||
)
|
||||
@ -195,10 +192,8 @@ async def patch_decision(
|
||||
|
||||
if this_dec.save() == 1:
|
||||
d_result = model_to_dict(this_dec)
|
||||
db.close()
|
||||
return d_result
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Unable to patch decision {decision_id}"
|
||||
)
|
||||
@ -222,12 +217,11 @@ async def post_decisions(dec_list: DecisionList, token: str = Depends(oauth2_sch
|
||||
status_code=404, detail=f"Player ID {x.pitcher_id} not found"
|
||||
)
|
||||
|
||||
new_dec.append(x.dict())
|
||||
new_dec.append(x.model_dump())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(new_dec, 10):
|
||||
Decision.insert_many(batch).on_conflict_ignore().execute()
|
||||
db.close()
|
||||
|
||||
return f"Inserted {len(new_dec)} decisions"
|
||||
|
||||
@ -241,13 +235,11 @@ async def delete_decision(decision_id: int, token: str = Depends(oauth2_scheme))
|
||||
|
||||
this_dec = Decision.get_or_none(Decision.id == decision_id)
|
||||
if this_dec is None:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Decision ID {decision_id} not found"
|
||||
)
|
||||
|
||||
count = this_dec.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Decision {decision_id} has been deleted"
|
||||
@ -266,11 +258,9 @@ async def delete_decisions_game(game_id: int, token: str = Depends(oauth2_scheme
|
||||
|
||||
this_game = StratGame.get_or_none(StratGame.id == game_id)
|
||||
if not this_game:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Game ID {game_id} not found")
|
||||
|
||||
count = Decision.delete().where(Decision.game == this_game).execute()
|
||||
db.close()
|
||||
|
||||
if count > 0:
|
||||
return f"Deleted {count} decisions matching Game ID {game_id}"
|
||||
|
||||
@ -9,6 +9,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -32,6 +34,8 @@ async def get_divisions(
|
||||
div_abbrev: Optional[str] = None,
|
||||
lg_name: Optional[str] = None,
|
||||
lg_abbrev: Optional[str] = None,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
all_divisions = Division.select().where(Division.season == season)
|
||||
|
||||
@ -44,11 +48,13 @@ async def get_divisions(
|
||||
if lg_abbrev is not None:
|
||||
all_divisions = all_divisions.where(Division.league_abbrev == lg_abbrev)
|
||||
|
||||
total_count = all_divisions.count()
|
||||
all_divisions = all_divisions.offset(offset).limit(limit)
|
||||
|
||||
return_div = {
|
||||
"count": all_divisions.count(),
|
||||
"count": total_count,
|
||||
"divisions": [model_to_dict(x) for x in all_divisions],
|
||||
}
|
||||
db.close()
|
||||
return return_div
|
||||
|
||||
|
||||
@ -57,13 +63,11 @@ async def get_divisions(
|
||||
async def get_one_division(division_id: int):
|
||||
this_div = Division.get_or_none(Division.id == division_id)
|
||||
if this_div is None:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Division ID {division_id} not found"
|
||||
)
|
||||
|
||||
r_div = model_to_dict(this_div)
|
||||
db.close()
|
||||
return r_div
|
||||
|
||||
|
||||
@ -83,7 +87,6 @@ async def patch_division(
|
||||
|
||||
this_div = Division.get_or_none(Division.id == division_id)
|
||||
if this_div is None:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Division ID {division_id} not found"
|
||||
)
|
||||
@ -99,10 +102,8 @@ async def patch_division(
|
||||
|
||||
if this_div.save() == 1:
|
||||
r_division = model_to_dict(this_div)
|
||||
db.close()
|
||||
return r_division
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Unable to patch division {division_id}"
|
||||
)
|
||||
@ -117,14 +118,12 @@ async def post_division(
|
||||
logger.warning(f"post_division - Bad Token: {token}")
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
this_division = Division(**new_division.dict())
|
||||
this_division = Division(**new_division.model_dump())
|
||||
|
||||
if this_division.save() == 1:
|
||||
r_division = model_to_dict(this_division)
|
||||
db.close()
|
||||
return r_division
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(status_code=500, detail=f"Unable to post division")
|
||||
|
||||
|
||||
@ -137,13 +136,11 @@ async def delete_division(division_id: int, token: str = Depends(oauth2_scheme))
|
||||
|
||||
this_div = Division.get_or_none(Division.id == division_id)
|
||||
if this_div is None:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Division ID {division_id} not found"
|
||||
)
|
||||
|
||||
count = this_div.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Division {division_id} has been deleted"
|
||||
|
||||
@ -32,7 +32,6 @@ async def get_draftdata():
|
||||
|
||||
if draft_data is not None:
|
||||
r_data = model_to_dict(draft_data)
|
||||
db.close()
|
||||
return r_data
|
||||
|
||||
raise HTTPException(status_code=404, detail=f'No draft data found')
|
||||
@ -50,7 +49,6 @@ async def patch_draftdata(
|
||||
|
||||
draft_data = DraftData.get_or_none(DraftData.id == data_id)
|
||||
if draft_data is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f'No draft data found')
|
||||
|
||||
if currentpick is not None:
|
||||
@ -68,7 +66,6 @@ async def patch_draftdata(
|
||||
|
||||
saved = draft_data.save()
|
||||
r_data = model_to_dict(draft_data)
|
||||
db.close()
|
||||
|
||||
if saved == 1:
|
||||
return r_data
|
||||
|
||||
@ -9,6 +9,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -34,6 +36,8 @@ async def get_draftlist(
|
||||
season: Optional[int],
|
||||
team_id: list = Query(default=None),
|
||||
token: str = Depends(oauth2_scheme),
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
if not valid_token(token):
|
||||
logger.warning(f"get_draftlist - Bad Token: {token}")
|
||||
@ -46,9 +50,11 @@ async def get_draftlist(
|
||||
if team_id is not None:
|
||||
all_list = all_list.where(DraftList.team_id << team_id)
|
||||
|
||||
r_list = {"count": all_list.count(), "picks": [model_to_dict(x) for x in all_list]}
|
||||
total_count = all_list.count()
|
||||
all_list = all_list.offset(offset).limit(limit)
|
||||
|
||||
r_list = {"count": total_count, "picks": [model_to_dict(x) for x in all_list]}
|
||||
|
||||
db.close()
|
||||
return r_list
|
||||
|
||||
|
||||
@ -69,7 +75,6 @@ async def get_team_draftlist(team_id: int, token: str = Depends(oauth2_scheme)):
|
||||
"picks": [model_to_dict(x) for x in this_list],
|
||||
}
|
||||
|
||||
db.close()
|
||||
return r_list
|
||||
|
||||
|
||||
@ -93,13 +98,12 @@ async def post_draftlist(
|
||||
DraftList.delete().where(DraftList.team == this_team).execute()
|
||||
|
||||
for x in draft_list.draft_list:
|
||||
new_list.append(x.dict())
|
||||
new_list.append(x.model_dump())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(new_list, 15):
|
||||
DraftList.insert_many(batch).on_conflict_ignore().execute()
|
||||
|
||||
db.close()
|
||||
return f"Inserted {len(new_list)} list values"
|
||||
|
||||
|
||||
@ -111,5 +115,4 @@ async def delete_draftlist(team_id: int, token: str = Depends(oauth2_scheme)):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
count = DraftList.delete().where(DraftList.team_id == team_id).execute()
|
||||
db.close()
|
||||
return f"Deleted {count} list values"
|
||||
|
||||
@ -9,6 +9,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -50,7 +52,7 @@ async def get_picks(
|
||||
overall_end: Optional[int] = None,
|
||||
short_output: Optional[bool] = False,
|
||||
sort: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
player_id: list = Query(default=None),
|
||||
player_taken: Optional[bool] = None,
|
||||
):
|
||||
@ -110,8 +112,7 @@ async def get_picks(
|
||||
all_picks = all_picks.where(DraftPick.overall <= overall_end)
|
||||
if player_taken is not None:
|
||||
all_picks = all_picks.where(DraftPick.player.is_null(not player_taken))
|
||||
if limit is not None:
|
||||
all_picks = all_picks.limit(limit)
|
||||
all_picks = all_picks.limit(limit)
|
||||
|
||||
if sort is not None:
|
||||
if sort == "order-asc":
|
||||
@ -123,7 +124,6 @@ async def get_picks(
|
||||
for line in all_picks:
|
||||
return_picks["picks"].append(model_to_dict(line, recurse=not short_output))
|
||||
|
||||
db.close()
|
||||
return return_picks
|
||||
|
||||
|
||||
@ -135,7 +135,6 @@ async def get_one_pick(pick_id: int, short_output: Optional[bool] = False):
|
||||
r_pick = model_to_dict(this_pick, recurse=not short_output)
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail=f"Pick ID {pick_id} not found")
|
||||
db.close()
|
||||
return r_pick
|
||||
|
||||
|
||||
@ -151,9 +150,8 @@ async def patch_pick(
|
||||
if DraftPick.get_or_none(DraftPick.id == pick_id) is None:
|
||||
raise HTTPException(status_code=404, detail=f"Pick ID {pick_id} not found")
|
||||
|
||||
DraftPick.update(**new_pick.dict()).where(DraftPick.id == pick_id).execute()
|
||||
DraftPick.update(**new_pick.model_dump()).where(DraftPick.id == pick_id).execute()
|
||||
r_pick = model_to_dict(DraftPick.get_by_id(pick_id))
|
||||
db.close()
|
||||
return r_pick
|
||||
|
||||
|
||||
@ -170,18 +168,16 @@ async def post_picks(p_list: DraftPickList, token: str = Depends(oauth2_scheme))
|
||||
DraftPick.season == pick.season, DraftPick.overall == pick.overall
|
||||
)
|
||||
if dupe:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Pick # {pick.overall} already exists for season {pick.season}",
|
||||
)
|
||||
|
||||
new_picks.append(pick.dict())
|
||||
new_picks.append(pick.model_dump())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(new_picks, 15):
|
||||
DraftPick.insert_many(batch).on_conflict_ignore().execute()
|
||||
db.close()
|
||||
|
||||
return f"Inserted {len(new_picks)} picks"
|
||||
|
||||
@ -198,7 +194,6 @@ async def delete_pick(pick_id: int, token: str = Depends(oauth2_scheme)):
|
||||
raise HTTPException(status_code=404, detail=f"Pick ID {pick_id} not found")
|
||||
|
||||
count = this_pick.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Draft pick {pick_id} has been deleted"
|
||||
|
||||
@ -3,40 +3,58 @@ from typing import List, Optional, Literal
|
||||
import logging
|
||||
import pydantic
|
||||
|
||||
from ..db_engine import db, BattingStat, Team, Player, Current, model_to_dict, chunked, fn, per_season_weeks
|
||||
from ..dependencies import oauth2_scheme, valid_token, handle_db_errors
|
||||
|
||||
logger = logging.getLogger('discord_app')
|
||||
|
||||
router = APIRouter(
|
||||
prefix='/api/v3/fieldingstats',
|
||||
tags=['fieldingstats']
|
||||
from ..db_engine import (
|
||||
db,
|
||||
BattingStat,
|
||||
Team,
|
||||
Player,
|
||||
Current,
|
||||
model_to_dict,
|
||||
chunked,
|
||||
fn,
|
||||
per_season_weeks,
|
||||
)
|
||||
from ..dependencies import (
|
||||
oauth2_scheme,
|
||||
valid_token,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
|
||||
@router.get('')
|
||||
router = APIRouter(prefix="/api/v3/fieldingstats", tags=["fieldingstats"])
|
||||
|
||||
|
||||
@router.get("")
|
||||
@handle_db_errors
|
||||
async def get_fieldingstats(
|
||||
season: int, s_type: Optional[str] = 'regular', team_abbrev: list = Query(default=None),
|
||||
player_name: list = Query(default=None), player_id: list = Query(default=None),
|
||||
week_start: Optional[int] = None, week_end: Optional[int] = None, game_num: list = Query(default=None),
|
||||
position: list = Query(default=None), limit: Optional[int] = None, sort: Optional[str] = None,
|
||||
short_output: Optional[bool] = True):
|
||||
if 'post' in s_type.lower():
|
||||
season: int,
|
||||
s_type: Optional[str] = "regular",
|
||||
team_abbrev: list = Query(default=None),
|
||||
player_name: list = Query(default=None),
|
||||
player_id: list = Query(default=None),
|
||||
week_start: Optional[int] = None,
|
||||
week_end: Optional[int] = None,
|
||||
game_num: list = Query(default=None),
|
||||
position: list = Query(default=None),
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
sort: Optional[str] = None,
|
||||
short_output: Optional[bool] = True,
|
||||
):
|
||||
if "post" in s_type.lower():
|
||||
all_stats = BattingStat.post_season(season)
|
||||
if all_stats.count() == 0:
|
||||
db.close()
|
||||
return {'count': 0, 'stats': []}
|
||||
elif s_type.lower() in ['combined', 'total', 'all']:
|
||||
return {"count": 0, "stats": []}
|
||||
elif s_type.lower() in ["combined", "total", "all"]:
|
||||
all_stats = BattingStat.combined_season(season)
|
||||
if all_stats.count() == 0:
|
||||
db.close()
|
||||
return {'count': 0, 'stats': []}
|
||||
return {"count": 0, "stats": []}
|
||||
else:
|
||||
all_stats = BattingStat.regular_season(season)
|
||||
if all_stats.count() == 0:
|
||||
db.close()
|
||||
return {'count': 0, 'stats': []}
|
||||
return {"count": 0, "stats": []}
|
||||
|
||||
all_stats = all_stats.where(
|
||||
(BattingStat.xch > 0) | (BattingStat.pb > 0) | (BattingStat.sbc > 0)
|
||||
@ -51,7 +69,9 @@ async def get_fieldingstats(
|
||||
if player_id:
|
||||
all_stats = all_stats.where(BattingStat.player_id << player_id)
|
||||
else:
|
||||
p_query = Player.select_season(season).where(fn.Lower(Player.name) << [x.lower() for x in player_name])
|
||||
p_query = Player.select_season(season).where(
|
||||
fn.Lower(Player.name) << [x.lower() for x in player_name]
|
||||
)
|
||||
all_stats = all_stats.where(BattingStat.player << p_query)
|
||||
if game_num:
|
||||
all_stats = all_stats.where(BattingStat.game == game_num)
|
||||
@ -63,75 +83,93 @@ async def get_fieldingstats(
|
||||
if week_end is not None:
|
||||
end = min(week_end, end)
|
||||
if start > end:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f'Start week {start} is after end week {end} - cannot pull stats'
|
||||
detail=f"Start week {start} is after end week {end} - cannot pull stats",
|
||||
)
|
||||
all_stats = all_stats.where(
|
||||
(BattingStat.week >= start) & (BattingStat.week <= end)
|
||||
)
|
||||
all_stats = all_stats.where((BattingStat.week >= start) & (BattingStat.week <= end))
|
||||
|
||||
if limit:
|
||||
all_stats = all_stats.limit(limit)
|
||||
total_count = all_stats.count()
|
||||
all_stats = all_stats.limit(limit)
|
||||
if sort:
|
||||
if sort == 'newest':
|
||||
if sort == "newest":
|
||||
all_stats = all_stats.order_by(-BattingStat.week, -BattingStat.game)
|
||||
|
||||
return_stats = {
|
||||
'count': all_stats.count(),
|
||||
'stats': [{
|
||||
'player': x.player_id if short_output else model_to_dict(x.player, recurse=False),
|
||||
'team': x.team_id if short_output else model_to_dict(x.team, recurse=False),
|
||||
'pos': x.pos,
|
||||
'xch': x.xch,
|
||||
'xhit': x.xhit,
|
||||
'error': x.error,
|
||||
'pb': x.pb,
|
||||
'sbc': x.sbc,
|
||||
'csc': x.csc,
|
||||
'week': x.week,
|
||||
'game': x.game,
|
||||
'season': x.season
|
||||
} for x in all_stats]
|
||||
"count": total_count,
|
||||
"stats": [
|
||||
{
|
||||
"player": x.player_id
|
||||
if short_output
|
||||
else model_to_dict(x.player, recurse=False),
|
||||
"team": x.team_id
|
||||
if short_output
|
||||
else model_to_dict(x.team, recurse=False),
|
||||
"pos": x.pos,
|
||||
"xch": x.xch,
|
||||
"xhit": x.xhit,
|
||||
"error": x.error,
|
||||
"pb": x.pb,
|
||||
"sbc": x.sbc,
|
||||
"csc": x.csc,
|
||||
"week": x.week,
|
||||
"game": x.game,
|
||||
"season": x.season,
|
||||
}
|
||||
for x in all_stats
|
||||
],
|
||||
}
|
||||
|
||||
db.close()
|
||||
return return_stats
|
||||
|
||||
|
||||
@router.get('/totals')
|
||||
@router.get("/totals")
|
||||
@handle_db_errors
|
||||
async def get_totalstats(
|
||||
season: int, s_type: Literal['regular', 'post', 'total', None] = None, team_abbrev: list = Query(default=None),
|
||||
team_id: list = Query(default=None), player_name: list = Query(default=None),
|
||||
week_start: Optional[int] = None, week_end: Optional[int] = None, game_num: list = Query(default=None),
|
||||
position: list = Query(default=None), sort: Optional[str] = None, player_id: list = Query(default=None),
|
||||
group_by: Literal['team', 'player', 'playerteam'] = 'player', short_output: Optional[bool] = False,
|
||||
min_ch: Optional[int] = 1, week: list = Query(default=None)):
|
||||
season: int,
|
||||
s_type: Literal["regular", "post", "total", None] = None,
|
||||
team_abbrev: list = Query(default=None),
|
||||
team_id: list = Query(default=None),
|
||||
player_name: list = Query(default=None),
|
||||
week_start: Optional[int] = None,
|
||||
week_end: Optional[int] = None,
|
||||
game_num: list = Query(default=None),
|
||||
position: list = Query(default=None),
|
||||
sort: Optional[str] = None,
|
||||
player_id: list = Query(default=None),
|
||||
group_by: Literal["team", "player", "playerteam"] = "player",
|
||||
short_output: Optional[bool] = False,
|
||||
min_ch: Optional[int] = 1,
|
||||
week: list = Query(default=None),
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
|
||||
# Build SELECT fields conditionally based on group_by to match GROUP BY exactly
|
||||
select_fields = []
|
||||
|
||||
if group_by == 'player':
|
||||
|
||||
if group_by == "player":
|
||||
select_fields = [BattingStat.player, BattingStat.pos]
|
||||
elif group_by == 'team':
|
||||
elif group_by == "team":
|
||||
select_fields = [BattingStat.team, BattingStat.pos]
|
||||
elif group_by == 'playerteam':
|
||||
elif group_by == "playerteam":
|
||||
select_fields = [BattingStat.player, BattingStat.team, BattingStat.pos]
|
||||
else:
|
||||
# Default case
|
||||
select_fields = [BattingStat.player, BattingStat.pos]
|
||||
|
||||
all_stats = (
|
||||
BattingStat
|
||||
.select(*select_fields,
|
||||
fn.SUM(BattingStat.xch).alias('sum_xch'),
|
||||
fn.SUM(BattingStat.xhit).alias('sum_xhit'), fn.SUM(BattingStat.error).alias('sum_error'),
|
||||
fn.SUM(BattingStat.pb).alias('sum_pb'), fn.SUM(BattingStat.sbc).alias('sum_sbc'),
|
||||
fn.SUM(BattingStat.csc).alias('sum_csc'))
|
||||
.where(BattingStat.season == season)
|
||||
.having(fn.SUM(BattingStat.xch) >= min_ch)
|
||||
BattingStat.select(
|
||||
*select_fields,
|
||||
fn.SUM(BattingStat.xch).alias("sum_xch"),
|
||||
fn.SUM(BattingStat.xhit).alias("sum_xhit"),
|
||||
fn.SUM(BattingStat.error).alias("sum_error"),
|
||||
fn.SUM(BattingStat.pb).alias("sum_pb"),
|
||||
fn.SUM(BattingStat.sbc).alias("sum_sbc"),
|
||||
fn.SUM(BattingStat.csc).alias("sum_csc"),
|
||||
)
|
||||
.where(BattingStat.season == season)
|
||||
.having(fn.SUM(BattingStat.xch) >= min_ch)
|
||||
)
|
||||
|
||||
if True in [s_type is not None, week_start is not None, week_end is not None]:
|
||||
@ -141,16 +179,20 @@ async def get_totalstats(
|
||||
elif week_start is not None or week_end is not None:
|
||||
if week_start is None or week_end is None:
|
||||
raise HTTPException(
|
||||
status_code=400, detail='Both week_start and week_end must be included if either is used.'
|
||||
status_code=400,
|
||||
detail="Both week_start and week_end must be included if either is used.",
|
||||
)
|
||||
weeks["start"] = week_start
|
||||
if week_end < weeks["start"]:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="week_end must be greater than or equal to week_start",
|
||||
)
|
||||
weeks['start'] = week_start
|
||||
if week_end < weeks['start']:
|
||||
raise HTTPException(status_code=400, detail='week_end must be greater than or equal to week_start')
|
||||
else:
|
||||
weeks['end'] = week_end
|
||||
weeks["end"] = week_end
|
||||
|
||||
all_stats = all_stats.where(
|
||||
(BattingStat.week >= weeks['start']) & (BattingStat.week <= weeks['end'])
|
||||
(BattingStat.week >= weeks["start"]) & (BattingStat.week <= weeks["end"])
|
||||
)
|
||||
|
||||
elif week is not None:
|
||||
@ -161,14 +203,20 @@ async def get_totalstats(
|
||||
if position is not None:
|
||||
p_list = [x.upper() for x in position]
|
||||
all_players = Player.select().where(
|
||||
(Player.pos_1 << p_list) | (Player.pos_2 << p_list) | (Player.pos_3 << p_list) | (Player.pos_4 << p_list) |
|
||||
(Player.pos_5 << p_list) | (Player.pos_6 << p_list) | (Player.pos_7 << p_list) | (Player.pos_8 << p_list)
|
||||
(Player.pos_1 << p_list)
|
||||
| (Player.pos_2 << p_list)
|
||||
| (Player.pos_3 << p_list)
|
||||
| (Player.pos_4 << p_list)
|
||||
| (Player.pos_5 << p_list)
|
||||
| (Player.pos_6 << p_list)
|
||||
| (Player.pos_7 << p_list)
|
||||
| (Player.pos_8 << p_list)
|
||||
)
|
||||
all_stats = all_stats.where(BattingStat.player << all_players)
|
||||
if sort is not None:
|
||||
if sort == 'player':
|
||||
if sort == "player":
|
||||
all_stats = all_stats.order_by(BattingStat.player)
|
||||
elif sort == 'team':
|
||||
elif sort == "team":
|
||||
all_stats = all_stats.order_by(BattingStat.team)
|
||||
if group_by is not None:
|
||||
# Use the same fields for GROUP BY as we used for SELECT
|
||||
@ -177,47 +225,56 @@ async def get_totalstats(
|
||||
all_teams = Team.select().where(Team.id << team_id)
|
||||
all_stats = all_stats.where(BattingStat.team << all_teams)
|
||||
elif team_abbrev is not None:
|
||||
all_teams = Team.select().where(fn.Lower(Team.abbrev) << [x.lower() for x in team_abbrev])
|
||||
all_teams = Team.select().where(
|
||||
fn.Lower(Team.abbrev) << [x.lower() for x in team_abbrev]
|
||||
)
|
||||
all_stats = all_stats.where(BattingStat.team << all_teams)
|
||||
|
||||
if player_name is not None:
|
||||
all_players = Player.select().where(fn.Lower(Player.name) << [x.lower() for x in player_name])
|
||||
all_players = Player.select().where(
|
||||
fn.Lower(Player.name) << [x.lower() for x in player_name]
|
||||
)
|
||||
all_stats = all_stats.where(BattingStat.player << all_players)
|
||||
elif player_id is not None:
|
||||
all_players = Player.select().where(Player.id << player_id)
|
||||
all_stats = all_stats.where(BattingStat.player << all_players)
|
||||
|
||||
return_stats = {
|
||||
'count': 0,
|
||||
'stats': []
|
||||
}
|
||||
|
||||
total_count = all_stats.count()
|
||||
all_stats = all_stats.offset(offset).limit(limit)
|
||||
|
||||
return_stats = {"count": total_count, "stats": []}
|
||||
|
||||
for x in all_stats:
|
||||
if x.sum_xch + x.sum_sbc <= 0:
|
||||
continue
|
||||
|
||||
# Handle player field based on grouping with safe access
|
||||
this_player = 'TOT'
|
||||
if 'player' in group_by and hasattr(x, 'player'):
|
||||
this_player = x.player_id if short_output else model_to_dict(x.player, recurse=False)
|
||||
|
||||
# Handle team field based on grouping with safe access
|
||||
this_team = 'TOT'
|
||||
if 'team' in group_by and hasattr(x, 'team'):
|
||||
this_team = x.team_id if short_output else model_to_dict(x.team, recurse=False)
|
||||
|
||||
return_stats['stats'].append({
|
||||
'player': this_player,
|
||||
'team': this_team,
|
||||
'pos': x.pos,
|
||||
'xch': x.sum_xch,
|
||||
'xhit': x.sum_xhit,
|
||||
'error': x.sum_error,
|
||||
'pb': x.sum_pb,
|
||||
'sbc': x.sum_sbc,
|
||||
'csc': x.sum_csc
|
||||
})
|
||||
|
||||
return_stats['count'] = len(return_stats['stats'])
|
||||
db.close()
|
||||
# Handle player field based on grouping with safe access
|
||||
this_player = "TOT"
|
||||
if "player" in group_by and hasattr(x, "player"):
|
||||
this_player = (
|
||||
x.player_id if short_output else model_to_dict(x.player, recurse=False)
|
||||
)
|
||||
|
||||
# Handle team field based on grouping with safe access
|
||||
this_team = "TOT"
|
||||
if "team" in group_by and hasattr(x, "team"):
|
||||
this_team = (
|
||||
x.team_id if short_output else model_to_dict(x.team, recurse=False)
|
||||
)
|
||||
|
||||
return_stats["stats"].append(
|
||||
{
|
||||
"player": this_player,
|
||||
"team": this_team,
|
||||
"pos": x.pos,
|
||||
"xch": x.sum_xch,
|
||||
"xhit": x.sum_xhit,
|
||||
"error": x.sum_error,
|
||||
"pb": x.sum_pb,
|
||||
"sbc": x.sum_sbc,
|
||||
"csc": x.sum_csc,
|
||||
}
|
||||
)
|
||||
|
||||
return_stats["count"] = len(return_stats["stats"])
|
||||
return return_stats
|
||||
|
||||
@ -138,8 +138,6 @@ async def get_help_commands(
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting help commands: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.post("/", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@ -187,8 +185,6 @@ async def create_help_command_endpoint(
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating help command: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.put("/{command_id}", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@ -238,8 +234,6 @@ async def update_help_command_endpoint(
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating help command {command_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.patch("/{command_id}/restore", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@ -277,8 +271,6 @@ async def restore_help_command_endpoint(
|
||||
except Exception as e:
|
||||
logger.error(f"Error restoring help command {command_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.delete("/{command_id}", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@ -309,8 +301,6 @@ async def delete_help_command_endpoint(
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting help command {command_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
@ -368,8 +358,6 @@ async def get_help_command_stats():
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting help command stats: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
# Special endpoints for Discord bot integration
|
||||
@ -402,8 +390,6 @@ async def get_help_command_by_name_endpoint(
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting help command by name '{command_name}': {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.patch("/by_name/{command_name}/view", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@ -439,8 +425,6 @@ async def increment_view_count(command_name: str, token: str = Depends(oauth2_sc
|
||||
except Exception as e:
|
||||
logger.error(f"Error incrementing view count for '{command_name}': {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.get("/autocomplete")
|
||||
@ -470,8 +454,6 @@ async def get_help_names_for_autocomplete(
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting help names for autocomplete: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.get("/{command_id}")
|
||||
@ -499,5 +481,3 @@ async def get_help_command(command_id: int):
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting help command {command_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
@ -9,6 +9,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -38,6 +40,8 @@ async def get_injuries(
|
||||
is_active: bool = None,
|
||||
short_output: bool = False,
|
||||
sort: Optional[str] = "start-asc",
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
all_injuries = Injury.select()
|
||||
|
||||
@ -64,11 +68,13 @@ async def get_injuries(
|
||||
elif sort == "start-desc":
|
||||
all_injuries = all_injuries.order_by(-Injury.start_week, -Injury.start_game)
|
||||
|
||||
total_count = all_injuries.count()
|
||||
all_injuries = all_injuries.offset(offset).limit(limit)
|
||||
|
||||
return_injuries = {
|
||||
"count": all_injuries.count(),
|
||||
"count": total_count,
|
||||
"injuries": [model_to_dict(x, recurse=not short_output) for x in all_injuries],
|
||||
}
|
||||
db.close()
|
||||
return return_injuries
|
||||
|
||||
|
||||
@ -85,7 +91,6 @@ async def patch_injury(
|
||||
|
||||
this_injury = Injury.get_or_none(Injury.id == injury_id)
|
||||
if this_injury is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Injury ID {injury_id} not found")
|
||||
|
||||
if is_active is not None:
|
||||
@ -93,10 +98,8 @@ async def patch_injury(
|
||||
|
||||
if this_injury.save() == 1:
|
||||
r_injury = model_to_dict(this_injury)
|
||||
db.close()
|
||||
return r_injury
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Unable to patch injury {injury_id}"
|
||||
)
|
||||
@ -109,14 +112,12 @@ async def post_injury(new_injury: InjuryModel, token: str = Depends(oauth2_schem
|
||||
logger.warning(f"post_injury - Bad Token: {token}")
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
this_injury = Injury(**new_injury.dict())
|
||||
this_injury = Injury(**new_injury.model_dump())
|
||||
|
||||
if this_injury.save():
|
||||
r_injury = model_to_dict(this_injury)
|
||||
db.close()
|
||||
return r_injury
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(status_code=500, detail=f"Unable to post injury")
|
||||
|
||||
|
||||
@ -129,11 +130,9 @@ async def delete_injury(injury_id: int, token: str = Depends(oauth2_scheme)):
|
||||
|
||||
this_injury = Injury.get_or_none(Injury.id == injury_id)
|
||||
if this_injury is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Injury ID {injury_id} not found")
|
||||
|
||||
count = this_injury.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Injury {injury_id} has been deleted"
|
||||
|
||||
@ -9,6 +9,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -34,6 +36,8 @@ async def get_keepers(
|
||||
team_id: list = Query(default=None),
|
||||
player_id: list = Query(default=None),
|
||||
short_output: bool = False,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
all_keepers = Keeper.select()
|
||||
|
||||
@ -44,11 +48,13 @@ async def get_keepers(
|
||||
if player_id is not None:
|
||||
all_keepers = all_keepers.where(Keeper.player_id << player_id)
|
||||
|
||||
total_count = all_keepers.count()
|
||||
all_keepers = all_keepers.offset(offset).limit(limit)
|
||||
|
||||
return_keepers = {
|
||||
"count": all_keepers.count(),
|
||||
"count": total_count,
|
||||
"keepers": [model_to_dict(x, recurse=not short_output) for x in all_keepers],
|
||||
}
|
||||
db.close()
|
||||
return return_keepers
|
||||
|
||||
|
||||
@ -78,10 +84,8 @@ async def patch_keeper(
|
||||
|
||||
if this_keeper.save():
|
||||
r_keeper = model_to_dict(this_keeper)
|
||||
db.close()
|
||||
return r_keeper
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Unable to patch keeper {keeper_id}"
|
||||
)
|
||||
@ -96,12 +100,11 @@ async def post_keepers(k_list: KeeperList, token: str = Depends(oauth2_scheme)):
|
||||
|
||||
new_keepers = []
|
||||
for keeper in k_list.keepers:
|
||||
new_keepers.append(keeper.dict())
|
||||
new_keepers.append(keeper.model_dump())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(new_keepers, 14):
|
||||
Keeper.insert_many(batch).on_conflict_ignore().execute()
|
||||
db.close()
|
||||
|
||||
return f"Inserted {len(new_keepers)} keepers"
|
||||
|
||||
@ -118,7 +121,6 @@ async def delete_keeper(keeper_id: int, token: str = Depends(oauth2_scheme)):
|
||||
raise HTTPException(status_code=404, detail=f"Keeper ID {keeper_id} not found")
|
||||
|
||||
count = this_keeper.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Keeper ID {keeper_id} has been deleted"
|
||||
|
||||
@ -9,6 +9,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -29,6 +31,8 @@ async def get_managers(
|
||||
name: list = Query(default=None),
|
||||
active: Optional[bool] = None,
|
||||
short_output: Optional[bool] = False,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
if active is not None:
|
||||
current = Current.latest()
|
||||
@ -61,7 +65,9 @@ async def get_managers(
|
||||
i_mgr.append(z)
|
||||
final_mgrs = [model_to_dict(y, recurse=not short_output) for y in i_mgr]
|
||||
|
||||
return_managers = {"count": len(final_mgrs), "managers": final_mgrs}
|
||||
total_count = len(final_mgrs)
|
||||
final_mgrs = final_mgrs[offset : offset + limit]
|
||||
return_managers = {"count": total_count, "managers": final_mgrs}
|
||||
|
||||
else:
|
||||
all_managers = Manager.select()
|
||||
@ -69,14 +75,15 @@ async def get_managers(
|
||||
name_list = [x.lower() for x in name]
|
||||
all_managers = all_managers.where(fn.Lower(Manager.name) << name_list)
|
||||
|
||||
total_count = all_managers.count()
|
||||
all_managers = all_managers.offset(offset).limit(limit)
|
||||
return_managers = {
|
||||
"count": all_managers.count(),
|
||||
"count": total_count,
|
||||
"managers": [
|
||||
model_to_dict(x, recurse=not short_output) for x in all_managers
|
||||
],
|
||||
}
|
||||
|
||||
db.close()
|
||||
return return_managers
|
||||
|
||||
|
||||
@ -86,7 +93,6 @@ async def get_one_manager(manager_id: int, short_output: Optional[bool] = False)
|
||||
this_manager = Manager.get_or_none(Manager.id == manager_id)
|
||||
if this_manager is not None:
|
||||
r_manager = model_to_dict(this_manager, recurse=not short_output)
|
||||
db.close()
|
||||
return r_manager
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail=f"Manager {manager_id} not found")
|
||||
@ -108,7 +114,6 @@ async def patch_manager(
|
||||
|
||||
this_manager = Manager.get_or_none(Manager.id == manager_id)
|
||||
if this_manager is None:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Manager ID {manager_id} not found"
|
||||
)
|
||||
@ -124,10 +129,8 @@ async def patch_manager(
|
||||
|
||||
if this_manager.save() == 1:
|
||||
r_manager = model_to_dict(this_manager)
|
||||
db.close()
|
||||
return r_manager
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Unable to patch manager {this_manager}"
|
||||
)
|
||||
@ -140,14 +143,12 @@ async def post_manager(new_manager: ManagerModel, token: str = Depends(oauth2_sc
|
||||
logger.warning(f"post_manager - Bad Token: {token}")
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
this_manager = Manager(**new_manager.dict())
|
||||
this_manager = Manager(**new_manager.model_dump())
|
||||
|
||||
if this_manager.save():
|
||||
r_manager = model_to_dict(this_manager)
|
||||
db.close()
|
||||
return r_manager
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Unable to post manager {this_manager.name}"
|
||||
)
|
||||
@ -162,13 +163,11 @@ async def delete_manager(manager_id: int, token: str = Depends(oauth2_scheme)):
|
||||
|
||||
this_manager = Manager.get_or_none(Manager.id == manager_id)
|
||||
if this_manager is None:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Manager ID {manager_id} not found"
|
||||
)
|
||||
|
||||
count = this_manager.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Manager {manager_id} has been deleted"
|
||||
|
||||
@ -19,6 +19,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -68,7 +70,7 @@ async def get_pitstats(
|
||||
week_start: Optional[int] = None,
|
||||
week_end: Optional[int] = None,
|
||||
game_num: list = Query(default=None),
|
||||
limit: Optional[int] = None,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
ip_min: Optional[float] = None,
|
||||
sort: Optional[str] = None,
|
||||
short_output: Optional[bool] = True,
|
||||
@ -76,17 +78,14 @@ async def get_pitstats(
|
||||
if "post" in s_type.lower():
|
||||
all_stats = PitchingStat.post_season(season)
|
||||
if all_stats.count() == 0:
|
||||
db.close()
|
||||
return {"count": 0, "stats": []}
|
||||
elif s_type.lower() in ["combined", "total", "all"]:
|
||||
all_stats = PitchingStat.combined_season(season)
|
||||
if all_stats.count() == 0:
|
||||
db.close()
|
||||
return {"count": 0, "stats": []}
|
||||
else:
|
||||
all_stats = PitchingStat.regular_season(season)
|
||||
if all_stats.count() == 0:
|
||||
db.close()
|
||||
return {"count": 0, "stats": []}
|
||||
|
||||
if team_abbrev is not None:
|
||||
@ -112,7 +111,6 @@ async def get_pitstats(
|
||||
if week_end is not None:
|
||||
end = min(week_end, end)
|
||||
if start > end:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Start week {start} is after end week {end} - cannot pull stats",
|
||||
@ -121,8 +119,7 @@ async def get_pitstats(
|
||||
(PitchingStat.week >= start) & (PitchingStat.week <= end)
|
||||
)
|
||||
|
||||
if limit:
|
||||
all_stats = all_stats.limit(limit)
|
||||
all_stats = all_stats.limit(limit)
|
||||
if sort:
|
||||
if sort == "newest":
|
||||
all_stats = all_stats.order_by(-PitchingStat.week, -PitchingStat.game)
|
||||
@ -132,7 +129,6 @@ async def get_pitstats(
|
||||
"stats": [model_to_dict(x, recurse=not short_output) for x in all_stats],
|
||||
}
|
||||
|
||||
db.close()
|
||||
return return_stats
|
||||
|
||||
|
||||
@ -154,6 +150,8 @@ async def get_totalstats(
|
||||
short_output: Optional[bool] = False,
|
||||
group_by: Literal["team", "player", "playerteam"] = "player",
|
||||
week: list = Query(default=None),
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
if sum(1 for x in [s_type, (week_start or week_end), week] if x is not None) > 1:
|
||||
raise HTTPException(
|
||||
@ -259,7 +257,10 @@ async def get_totalstats(
|
||||
all_players = Player.select().where(Player.id << player_id)
|
||||
all_stats = all_stats.where(PitchingStat.player << all_players)
|
||||
|
||||
return_stats = {"count": all_stats.count(), "stats": []}
|
||||
total_count = all_stats.count()
|
||||
all_stats = all_stats.offset(offset).limit(limit)
|
||||
|
||||
return_stats = {"count": total_count, "stats": []}
|
||||
|
||||
for x in all_stats:
|
||||
# Handle player field based on grouping with safe access
|
||||
@ -301,7 +302,6 @@ async def get_totalstats(
|
||||
"bsv": x.sum_bsv,
|
||||
}
|
||||
)
|
||||
db.close()
|
||||
return return_stats
|
||||
|
||||
|
||||
@ -317,9 +317,10 @@ async def patch_pitstats(
|
||||
if PitchingStat.get_or_none(PitchingStat.id == stat_id) is None:
|
||||
raise HTTPException(status_code=404, detail=f"Stat ID {stat_id} not found")
|
||||
|
||||
PitchingStat.update(**new_stats.dict()).where(PitchingStat.id == stat_id).execute()
|
||||
PitchingStat.update(**new_stats.model_dump()).where(
|
||||
PitchingStat.id == stat_id
|
||||
).execute()
|
||||
r_stat = model_to_dict(PitchingStat.get_by_id(stat_id))
|
||||
db.close()
|
||||
return r_stat
|
||||
|
||||
|
||||
@ -344,11 +345,10 @@ async def post_pitstats(s_list: PitStatList, token: str = Depends(oauth2_scheme)
|
||||
status_code=404, detail=f"Player ID {x.player_id} not found"
|
||||
)
|
||||
|
||||
all_stats.append(PitchingStat(**x.dict()))
|
||||
all_stats.append(PitchingStat(**x.model_dump()))
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(all_stats, 15):
|
||||
PitchingStat.insert_many(batch).on_conflict_ignore().execute()
|
||||
|
||||
db.close()
|
||||
return f"Added {len(all_stats)} batting lines"
|
||||
|
||||
@ -6,7 +6,11 @@ Thin HTTP layer using PlayerService for business logic.
|
||||
from fastapi import APIRouter, Query, Response, Depends
|
||||
from typing import Optional, List
|
||||
|
||||
from ..dependencies import oauth2_scheme, cache_result, handle_db_errors
|
||||
from ..dependencies import (
|
||||
oauth2_scheme,
|
||||
cache_result,
|
||||
handle_db_errors,
|
||||
)
|
||||
from ..services.base import BaseService
|
||||
from ..services.player_service import PlayerService
|
||||
|
||||
@ -24,9 +28,7 @@ async def get_players(
|
||||
strat_code: list = Query(default=None),
|
||||
is_injured: Optional[bool] = None,
|
||||
sort: Optional[str] = None,
|
||||
limit: Optional[int] = Query(
|
||||
default=None, ge=1, description="Maximum number of results to return"
|
||||
),
|
||||
limit: Optional[int] = Query(default=None, ge=1),
|
||||
offset: Optional[int] = Query(
|
||||
default=None, ge=0, description="Number of results to skip for pagination"
|
||||
),
|
||||
|
||||
@ -9,6 +9,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -42,6 +44,8 @@ async def get_results(
|
||||
away_abbrev: list = Query(default=None),
|
||||
home_abbrev: list = Query(default=None),
|
||||
short_output: Optional[bool] = False,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
all_results = Result.select_season(season)
|
||||
|
||||
@ -74,11 +78,13 @@ async def get_results(
|
||||
if week_end is not None:
|
||||
all_results = all_results.where(Result.week <= week_end)
|
||||
|
||||
total_count = all_results.count()
|
||||
all_results = all_results.offset(offset).limit(limit)
|
||||
|
||||
return_results = {
|
||||
"count": all_results.count(),
|
||||
"count": total_count,
|
||||
"results": [model_to_dict(x, recurse=not short_output) for x in all_results],
|
||||
}
|
||||
db.close()
|
||||
return return_results
|
||||
|
||||
|
||||
@ -90,7 +96,6 @@ async def get_one_result(result_id: int, short_output: Optional[bool] = False):
|
||||
r_result = model_to_dict(this_result, recurse=not short_output)
|
||||
else:
|
||||
r_result = None
|
||||
db.close()
|
||||
return r_result
|
||||
|
||||
|
||||
@ -142,10 +147,8 @@ async def patch_result(
|
||||
|
||||
if this_result.save() == 1:
|
||||
r_result = model_to_dict(this_result)
|
||||
db.close()
|
||||
return r_result
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Unable to patch result {result_id}"
|
||||
)
|
||||
@ -180,12 +183,11 @@ async def post_results(result_list: ResultList, token: str = Depends(oauth2_sche
|
||||
status_code=404, detail=f"Team ID {x.hometeam_id} not found"
|
||||
)
|
||||
|
||||
new_results.append(x.dict())
|
||||
new_results.append(x.model_dump())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(new_results, 15):
|
||||
Result.insert_many(batch).on_conflict_ignore().execute()
|
||||
db.close()
|
||||
|
||||
return f"Inserted {len(new_results)} results"
|
||||
|
||||
@ -199,11 +201,9 @@ async def delete_result(result_id: int, token: str = Depends(oauth2_scheme)):
|
||||
|
||||
this_result = Result.get_or_none(Result.id == result_id)
|
||||
if not this_result:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Result ID {result_id} not found")
|
||||
|
||||
count = this_result.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Result {result_id} has been deleted"
|
||||
|
||||
@ -12,6 +12,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -44,6 +46,8 @@ async def get_players(
|
||||
key_mlbam: list = Query(default=None),
|
||||
sort: Optional[str] = None,
|
||||
csv: Optional[bool] = False,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
all_players = SbaPlayer.select()
|
||||
|
||||
@ -98,14 +102,15 @@ async def get_players(
|
||||
|
||||
if csv:
|
||||
return_val = query_to_csv(all_players)
|
||||
db.close()
|
||||
return Response(content=return_val, media_type="text/csv")
|
||||
|
||||
total_count = all_players.count()
|
||||
all_players = all_players.offset(offset).limit(limit)
|
||||
|
||||
return_val = {
|
||||
"count": all_players.count(),
|
||||
"count": total_count,
|
||||
"players": [model_to_dict(x) for x in all_players],
|
||||
}
|
||||
db.close()
|
||||
return return_val
|
||||
|
||||
|
||||
@ -114,13 +119,11 @@ async def get_players(
|
||||
async def get_one_player(player_id: int):
|
||||
this_player = SbaPlayer.get_or_none(SbaPlayer.id == player_id)
|
||||
if this_player is None:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"SbaPlayer id {player_id} not found"
|
||||
)
|
||||
|
||||
r_data = model_to_dict(this_player)
|
||||
db.close()
|
||||
return r_data
|
||||
|
||||
|
||||
@ -138,7 +141,6 @@ async def patch_player(
|
||||
):
|
||||
if not valid_token(token):
|
||||
logging.warning(f"Bad Token: {token}")
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="You are not authorized to patch mlb players. This event has been logged.",
|
||||
@ -146,7 +148,6 @@ async def patch_player(
|
||||
|
||||
this_player = SbaPlayer.get_or_none(SbaPlayer.id == player_id)
|
||||
if this_player is None:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"SbaPlayer id {player_id} not found"
|
||||
)
|
||||
@ -166,10 +167,8 @@ async def patch_player(
|
||||
|
||||
if this_player.save() == 1:
|
||||
return_val = model_to_dict(this_player)
|
||||
db.close()
|
||||
return return_val
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=418,
|
||||
detail="Well slap my ass and call me a teapot; I could not save that player",
|
||||
@ -181,7 +180,6 @@ async def patch_player(
|
||||
async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f"Bad Token: {token}")
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="You are not authorized to post mlb players. This event has been logged.",
|
||||
@ -200,7 +198,6 @@ async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme))
|
||||
)
|
||||
if dupes.count() > 0:
|
||||
logger.error(f"Found a dupe for {x}")
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"{x.first_name} {x.last_name} has a key already in the database",
|
||||
@ -211,7 +208,6 @@ async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme))
|
||||
with db.atomic():
|
||||
for batch in chunked(new_players, 15):
|
||||
SbaPlayer.insert_many(batch).on_conflict_ignore().execute()
|
||||
db.close()
|
||||
|
||||
return f"Inserted {len(new_players)} new MLB players"
|
||||
|
||||
@ -221,7 +217,6 @@ async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme))
|
||||
async def post_one_player(player: SbaPlayerModel, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f"Bad Token: {token}")
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="You are not authorized to post mlb players. This event has been logged.",
|
||||
@ -236,20 +231,17 @@ async def post_one_player(player: SbaPlayerModel, token: str = Depends(oauth2_sc
|
||||
logging.info(f"POST /SbaPlayers/one - dupes found:")
|
||||
for x in dupes:
|
||||
logging.info(f"{x}")
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"{player.first_name} {player.last_name} has a key already in the database",
|
||||
)
|
||||
|
||||
new_player = SbaPlayer(**player.dict())
|
||||
new_player = SbaPlayer(**player.model_dump())
|
||||
saved = new_player.save()
|
||||
if saved == 1:
|
||||
return_val = model_to_dict(new_player)
|
||||
db.close()
|
||||
return return_val
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=418,
|
||||
detail="Well slap my ass and call me a teapot; I could not save that player",
|
||||
@ -261,7 +253,6 @@ async def post_one_player(player: SbaPlayerModel, token: str = Depends(oauth2_sc
|
||||
async def delete_player(player_id: int, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f"Bad Token: {token}")
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="You are not authorized to delete mlb players. This event has been logged.",
|
||||
@ -269,13 +260,11 @@ async def delete_player(player_id: int, token: str = Depends(oauth2_scheme)):
|
||||
|
||||
this_player = SbaPlayer.get_or_none(SbaPlayer.id == player_id)
|
||||
if this_player is None:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"SbaPlayer id {player_id} not found"
|
||||
)
|
||||
|
||||
count = this_player.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Player {player_id} has been deleted"
|
||||
|
||||
@ -9,6 +9,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -38,6 +40,8 @@ async def get_schedules(
|
||||
week_start: Optional[int] = None,
|
||||
week_end: Optional[int] = None,
|
||||
short_output: Optional[bool] = True,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
all_sched = Schedule.select_season(season)
|
||||
|
||||
@ -69,11 +73,13 @@ async def get_schedules(
|
||||
|
||||
all_sched = all_sched.order_by(Schedule.id)
|
||||
|
||||
total_count = all_sched.count()
|
||||
all_sched = all_sched.offset(offset).limit(limit)
|
||||
|
||||
return_sched = {
|
||||
"count": all_sched.count(),
|
||||
"count": total_count,
|
||||
"schedules": [model_to_dict(x, recurse=not short_output) for x in all_sched],
|
||||
}
|
||||
db.close()
|
||||
return return_sched
|
||||
|
||||
|
||||
@ -85,7 +91,6 @@ async def get_one_schedule(schedule_id: int):
|
||||
r_sched = model_to_dict(this_sched)
|
||||
else:
|
||||
r_sched = None
|
||||
db.close()
|
||||
return r_sched
|
||||
|
||||
|
||||
@ -127,10 +132,8 @@ async def patch_schedule(
|
||||
|
||||
if this_sched.save() == 1:
|
||||
r_sched = model_to_dict(this_sched)
|
||||
db.close()
|
||||
return r_sched
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Unable to patch schedule {schedule_id}"
|
||||
)
|
||||
@ -165,12 +168,11 @@ async def post_schedules(sched_list: ScheduleList, token: str = Depends(oauth2_s
|
||||
status_code=404, detail=f"Team ID {x.hometeam_id} not found"
|
||||
)
|
||||
|
||||
new_sched.append(x.dict())
|
||||
new_sched.append(x.model_dump())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(new_sched, 15):
|
||||
Schedule.insert_many(batch).on_conflict_ignore().execute()
|
||||
db.close()
|
||||
|
||||
return f"Inserted {len(new_sched)} schedules"
|
||||
|
||||
@ -189,7 +191,6 @@ async def delete_schedule(schedule_id: int, token: str = Depends(oauth2_scheme))
|
||||
)
|
||||
|
||||
count = this_sched.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Schedule {this_sched} has been deleted"
|
||||
|
||||
@ -8,6 +8,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -23,6 +25,8 @@ async def get_standings(
|
||||
league_abbrev: Optional[str] = None,
|
||||
division_abbrev: Optional[str] = None,
|
||||
short_output: Optional[bool] = False,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
standings = Standings.select_season(season)
|
||||
|
||||
@ -57,12 +61,14 @@ async def get_standings(
|
||||
div_teams = [x for x in standings]
|
||||
div_teams.sort(key=lambda team: win_pct(team), reverse=True)
|
||||
|
||||
total_count = len(div_teams)
|
||||
div_teams = div_teams[offset : offset + limit]
|
||||
|
||||
return_standings = {
|
||||
"count": len(div_teams),
|
||||
"count": total_count,
|
||||
"standings": [model_to_dict(x, recurse=not short_output) for x in div_teams],
|
||||
}
|
||||
|
||||
db.close()
|
||||
return return_standings
|
||||
|
||||
|
||||
@ -81,7 +87,7 @@ async def get_team_standings(team_id: int):
|
||||
@router.patch("/{stan_id}", include_in_schema=PRIVATE_IN_SCHEMA)
|
||||
@handle_db_errors
|
||||
async def patch_standings(
|
||||
stan_id,
|
||||
stan_id: int,
|
||||
wins: Optional[int] = None,
|
||||
losses: Optional[int] = None,
|
||||
token: str = Depends(oauth2_scheme),
|
||||
@ -93,7 +99,6 @@ async def patch_standings(
|
||||
try:
|
||||
this_stan = Standings.get_by_id(stan_id)
|
||||
except Exception as e:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"No team found with id {stan_id}")
|
||||
|
||||
if wins:
|
||||
@ -102,7 +107,6 @@ async def patch_standings(
|
||||
this_stan.losses = losses
|
||||
|
||||
this_stan.save()
|
||||
db.close()
|
||||
|
||||
return model_to_dict(this_stan)
|
||||
|
||||
@ -122,7 +126,6 @@ async def post_standings(season: int, token: str = Depends(oauth2_scheme)):
|
||||
with db.atomic():
|
||||
for batch in chunked(new_teams, 16):
|
||||
Standings.insert_many(batch).on_conflict_ignore().execute()
|
||||
db.close()
|
||||
|
||||
return f"Inserted {len(new_teams)} standings"
|
||||
|
||||
@ -135,7 +138,6 @@ async def recalculate_standings(season: int, token: str = Depends(oauth2_scheme)
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
code = Standings.recalculate(season)
|
||||
db.close()
|
||||
if code == 69:
|
||||
raise HTTPException(status_code=500, detail=f"Error recreating Standings rows")
|
||||
return f"Just recalculated standings for season {season}"
|
||||
|
||||
@ -13,6 +13,7 @@ from ..dependencies import (
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
update_season_batting_stats,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -59,6 +60,8 @@ async def get_games(
|
||||
division_id: Optional[int] = None,
|
||||
short_output: Optional[bool] = False,
|
||||
sort: Optional[str] = None,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=1000),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
) -> Any:
|
||||
all_games = StratGame.select()
|
||||
|
||||
@ -119,11 +122,13 @@ async def get_games(
|
||||
StratGame.season, StratGame.week, StratGame.game_num
|
||||
)
|
||||
|
||||
total_count = all_games.count()
|
||||
all_games = all_games.offset(offset).limit(limit)
|
||||
|
||||
return_games = {
|
||||
"count": all_games.count(),
|
||||
"count": total_count,
|
||||
"games": [model_to_dict(x, recurse=not short_output) for x in all_games],
|
||||
}
|
||||
db.close()
|
||||
return return_games
|
||||
|
||||
|
||||
@ -132,11 +137,9 @@ async def get_games(
|
||||
async def get_one_game(game_id: int) -> Any:
|
||||
this_game = StratGame.get_or_none(StratGame.id == game_id)
|
||||
if not this_game:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"StratGame ID {game_id} not found")
|
||||
|
||||
g_result = model_to_dict(this_game)
|
||||
db.close()
|
||||
return g_result
|
||||
|
||||
|
||||
@ -158,7 +161,6 @@ async def patch_game(
|
||||
|
||||
this_game = StratGame.get_or_none(StratGame.id == game_id)
|
||||
if not this_game:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"StratGame ID {game_id} not found")
|
||||
|
||||
if game_num is not None:
|
||||
@ -248,12 +250,11 @@ async def post_games(game_list: GameList, token: str = Depends(oauth2_scheme)) -
|
||||
status_code=404, detail=f"Team ID {x.home_team_id} not found"
|
||||
)
|
||||
|
||||
new_games.append(x.dict())
|
||||
new_games.append(x.model_dump())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(new_games, 16):
|
||||
StratGame.insert_many(batch).on_conflict_ignore().execute()
|
||||
db.close()
|
||||
|
||||
return f"Inserted {len(new_games)} games"
|
||||
|
||||
@ -267,7 +268,6 @@ async def wipe_game(game_id: int, token: str = Depends(oauth2_scheme)) -> Any:
|
||||
|
||||
this_game = StratGame.get_or_none(StratGame.id == game_id)
|
||||
if not this_game:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"StratGame ID {game_id} not found")
|
||||
|
||||
this_game.away_score = None
|
||||
@ -278,10 +278,8 @@ async def wipe_game(game_id: int, token: str = Depends(oauth2_scheme)) -> Any:
|
||||
|
||||
if this_game.save() == 1:
|
||||
g_result = model_to_dict(this_game)
|
||||
db.close()
|
||||
return g_result
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(status_code=500, detail=f"Unable to wipe game {game_id}")
|
||||
|
||||
|
||||
@ -294,11 +292,9 @@ async def delete_game(game_id: int, token: str = Depends(oauth2_scheme)) -> Any:
|
||||
|
||||
this_game = StratGame.get_or_none(StratGame.id == game_id)
|
||||
if not this_game:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"StratGame ID {game_id} not found")
|
||||
|
||||
count = this_game.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"StratGame {game_id} has been deleted"
|
||||
|
||||
@ -13,7 +13,13 @@ from ...db_engine import (
|
||||
fn,
|
||||
model_to_dict,
|
||||
)
|
||||
from ...dependencies import add_cache_headers, cache_result, handle_db_errors
|
||||
from ...dependencies import (
|
||||
add_cache_headers,
|
||||
cache_result,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
from .common import build_season_games
|
||||
|
||||
router = APIRouter()
|
||||
@ -52,7 +58,7 @@ async def get_batting_totals(
|
||||
risp: Optional[bool] = None,
|
||||
inning: list = Query(default=None),
|
||||
sort: Optional[str] = None,
|
||||
limit: Optional[int] = 200,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
short_output: Optional[bool] = False,
|
||||
page_num: Optional[int] = 1,
|
||||
week_start: Optional[int] = None,
|
||||
@ -423,8 +429,6 @@ async def get_batting_totals(
|
||||
run_plays = run_plays.order_by(StratPlay.game.asc())
|
||||
# For other group_by values, skip game_id/play_num sorting since they're not in GROUP BY
|
||||
|
||||
if limit < 1:
|
||||
limit = 1
|
||||
bat_plays = bat_plays.paginate(page_num, limit)
|
||||
|
||||
logger.info(f"bat_plays query: {bat_plays}")
|
||||
@ -594,5 +598,4 @@ async def get_batting_totals(
|
||||
}
|
||||
)
|
||||
|
||||
db.close()
|
||||
return return_stats
|
||||
|
||||
@ -20,10 +20,8 @@ logger = logging.getLogger("discord_app")
|
||||
@handle_db_errors
|
||||
async def get_one_play(play_id: int):
|
||||
if StratPlay.get_or_none(StratPlay.id == play_id) is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Play ID {play_id} not found")
|
||||
r_play = model_to_dict(StratPlay.get_by_id(play_id))
|
||||
db.close()
|
||||
return r_play
|
||||
|
||||
|
||||
@ -37,12 +35,10 @@ async def patch_play(
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
if StratPlay.get_or_none(StratPlay.id == play_id) is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Play ID {play_id} not found")
|
||||
|
||||
StratPlay.update(**new_play.dict()).where(StratPlay.id == play_id).execute()
|
||||
StratPlay.update(**new_play.model_dump()).where(StratPlay.id == play_id).execute()
|
||||
r_play = model_to_dict(StratPlay.get_by_id(play_id))
|
||||
db.close()
|
||||
return r_play
|
||||
|
||||
|
||||
@ -88,12 +84,11 @@ async def post_plays(p_list: PlayList, token: str = Depends(oauth2_scheme)):
|
||||
if this_play.pa == 0:
|
||||
this_play.batter_final = None
|
||||
|
||||
new_plays.append(this_play.dict())
|
||||
new_plays.append(this_play.model_dump())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(new_plays, 20):
|
||||
StratPlay.insert_many(batch).on_conflict_ignore().execute()
|
||||
db.close()
|
||||
|
||||
return f"Inserted {len(new_plays)} plays"
|
||||
|
||||
@ -107,11 +102,9 @@ async def delete_play(play_id: int, token: str = Depends(oauth2_scheme)):
|
||||
|
||||
this_play = StratPlay.get_or_none(StratPlay.id == play_id)
|
||||
if not this_play:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Play ID {play_id} not found")
|
||||
|
||||
count = this_play.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
return f"Play {play_id} has been deleted"
|
||||
@ -130,11 +123,9 @@ async def delete_plays_game(game_id: int, token: str = Depends(oauth2_scheme)):
|
||||
|
||||
this_game = StratGame.get_or_none(StratGame.id == game_id)
|
||||
if not this_game:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Game ID {game_id} not found")
|
||||
|
||||
count = StratPlay.delete().where(StratPlay.game == this_game).execute()
|
||||
db.close()
|
||||
|
||||
if count > 0:
|
||||
return f"Deleted {count} plays matching Game ID {game_id}"
|
||||
@ -155,5 +146,4 @@ async def post_erun_check(token: str = Depends(oauth2_scheme)):
|
||||
(StratPlay.e_run == 1) & (StratPlay.run == 0)
|
||||
)
|
||||
count = all_plays.execute()
|
||||
db.close()
|
||||
return count
|
||||
|
||||
@ -13,7 +13,13 @@ from ...db_engine import (
|
||||
fn,
|
||||
SQL,
|
||||
)
|
||||
from ...dependencies import handle_db_errors, add_cache_headers, cache_result
|
||||
from ...dependencies import (
|
||||
handle_db_errors,
|
||||
add_cache_headers,
|
||||
cache_result,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
from .common import build_season_games
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -51,7 +57,7 @@ async def get_fielding_totals(
|
||||
team_id: list = Query(default=None),
|
||||
manager_id: list = Query(default=None),
|
||||
sort: Optional[str] = None,
|
||||
limit: Optional[int] = 200,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
short_output: Optional[bool] = False,
|
||||
page_num: Optional[int] = 1,
|
||||
):
|
||||
@ -237,8 +243,6 @@ async def get_fielding_totals(
|
||||
def_plays = def_plays.order_by(StratPlay.game.asc())
|
||||
# For other group_by values, skip game_id/play_num sorting since they're not in GROUP BY
|
||||
|
||||
if limit < 1:
|
||||
limit = 1
|
||||
def_plays = def_plays.paginate(page_num, limit)
|
||||
|
||||
logger.info(f"def_plays query: {def_plays}")
|
||||
@ -361,5 +365,4 @@ async def get_fielding_totals(
|
||||
"week": this_week,
|
||||
}
|
||||
)
|
||||
db.close()
|
||||
return return_stats
|
||||
|
||||
@ -16,7 +16,13 @@ from ...db_engine import (
|
||||
SQL,
|
||||
complex_data_to_csv,
|
||||
)
|
||||
from ...dependencies import handle_db_errors, add_cache_headers, cache_result
|
||||
from ...dependencies import (
|
||||
handle_db_errors,
|
||||
add_cache_headers,
|
||||
cache_result,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
from .common import build_season_games
|
||||
|
||||
router = APIRouter()
|
||||
@ -51,7 +57,7 @@ async def get_pitching_totals(
|
||||
risp: Optional[bool] = None,
|
||||
inning: list = Query(default=None),
|
||||
sort: Optional[str] = None,
|
||||
limit: Optional[int] = 200,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
short_output: Optional[bool] = False,
|
||||
csv: Optional[bool] = False,
|
||||
page_num: Optional[int] = 1,
|
||||
@ -164,8 +170,6 @@ async def get_pitching_totals(
|
||||
if group_by in ["playergame", "teamgame"]:
|
||||
pitch_plays = pitch_plays.order_by(StratPlay.game.asc())
|
||||
|
||||
if limit < 1:
|
||||
limit = 1
|
||||
pitch_plays = pitch_plays.paginate(page_num, limit)
|
||||
|
||||
# Execute the Peewee query
|
||||
@ -348,7 +352,6 @@ async def get_pitching_totals(
|
||||
)
|
||||
|
||||
return_stats["count"] = len(return_stats["stats"])
|
||||
db.close()
|
||||
if csv:
|
||||
return Response(
|
||||
content=complex_data_to_csv(return_stats["stats"]), media_type="text/csv"
|
||||
|
||||
@ -16,6 +16,8 @@ from ...dependencies import (
|
||||
handle_db_errors,
|
||||
add_cache_headers,
|
||||
cache_result,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -70,7 +72,7 @@ async def get_plays(
|
||||
pitcher_team_id: list = Query(default=None),
|
||||
short_output: Optional[bool] = False,
|
||||
sort: Optional[str] = None,
|
||||
limit: Optional[int] = 200,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
page_num: Optional[int] = 1,
|
||||
s_type: Literal["regular", "post", "total", None] = None,
|
||||
):
|
||||
@ -185,8 +187,6 @@ async def get_plays(
|
||||
season_games = season_games.where(StratGame.week > 18)
|
||||
all_plays = all_plays.where(StratPlay.game << season_games)
|
||||
|
||||
if limit < 1:
|
||||
limit = 1
|
||||
bat_plays = all_plays.paginate(page_num, limit)
|
||||
|
||||
if sort == "wpa-desc":
|
||||
@ -210,5 +210,4 @@ async def get_plays(
|
||||
"count": all_plays.count(),
|
||||
"plays": [model_to_dict(x, recurse=not short_output) for x in all_plays],
|
||||
}
|
||||
db.close()
|
||||
return return_plays
|
||||
|
||||
@ -11,6 +11,8 @@ from ..dependencies import (
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
cache_result,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
from ..services.base import BaseService
|
||||
from ..services.team_service import TeamService
|
||||
|
||||
@ -10,6 +10,8 @@ from ..dependencies import (
|
||||
valid_token,
|
||||
PRIVATE_IN_SCHEMA,
|
||||
handle_db_errors,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -36,7 +38,7 @@ class TransactionList(pydantic.BaseModel):
|
||||
@router.get("")
|
||||
@handle_db_errors
|
||||
async def get_transactions(
|
||||
season,
|
||||
season: int,
|
||||
team_abbrev: list = Query(default=None),
|
||||
week_start: Optional[int] = 0,
|
||||
week_end: Optional[int] = None,
|
||||
@ -45,8 +47,9 @@ async def get_transactions(
|
||||
player_name: list = Query(default=None),
|
||||
player_id: list = Query(default=None),
|
||||
move_id: Optional[str] = None,
|
||||
is_trade: Optional[bool] = None,
|
||||
short_output: Optional[bool] = False,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
):
|
||||
if season:
|
||||
transactions = Transaction.select_season(season)
|
||||
@ -84,21 +87,20 @@ async def get_transactions(
|
||||
else:
|
||||
transactions = transactions.where(Transaction.frozen == 0)
|
||||
|
||||
if is_trade is not None:
|
||||
raise HTTPException(
|
||||
status_code=501, detail="The is_trade parameter is not implemented, yet"
|
||||
)
|
||||
|
||||
transactions = transactions.order_by(-Transaction.week, Transaction.moveid)
|
||||
|
||||
total_count = transactions.count()
|
||||
transactions = transactions.offset(offset).limit(limit)
|
||||
|
||||
return_trans = {
|
||||
"count": transactions.count(),
|
||||
"count": total_count,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
"transactions": [
|
||||
model_to_dict(x, recurse=not short_output) for x in transactions
|
||||
],
|
||||
}
|
||||
|
||||
db.close()
|
||||
return return_trans
|
||||
|
||||
|
||||
@ -116,7 +118,6 @@ async def patch_transactions(
|
||||
|
||||
these_moves = Transaction.select().where(Transaction.moveid == move_id)
|
||||
if these_moves.count() == 0:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f"Move ID {move_id} not found")
|
||||
|
||||
if frozen is not None:
|
||||
@ -128,7 +129,6 @@ async def patch_transactions(
|
||||
x.cancelled = cancelled
|
||||
x.save()
|
||||
|
||||
db.close()
|
||||
return f"Updated {these_moves.count()} transactions"
|
||||
|
||||
|
||||
@ -172,13 +172,12 @@ async def post_transactions(
|
||||
status_code=404, detail=f"Player ID {x.player_id} not found"
|
||||
)
|
||||
|
||||
all_moves.append(x.dict())
|
||||
all_moves.append(x.model_dump())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(all_moves, 15):
|
||||
Transaction.insert_many(batch).on_conflict_ignore().execute()
|
||||
|
||||
db.close()
|
||||
return f"{len(all_moves)} transactions have been added"
|
||||
|
||||
|
||||
@ -192,7 +191,6 @@ async def delete_transactions(move_id, token: str = Depends(oauth2_scheme)):
|
||||
delete_query = Transaction.delete().where(Transaction.moveid == move_id)
|
||||
|
||||
count = delete_query.execute()
|
||||
db.close()
|
||||
if count > 0:
|
||||
return f"Removed {count} transactions"
|
||||
else:
|
||||
|
||||
@ -26,6 +26,8 @@ from ..dependencies import (
|
||||
update_season_batting_stats,
|
||||
update_season_pitching_stats,
|
||||
get_cache_stats,
|
||||
MAX_LIMIT,
|
||||
DEFAULT_LIMIT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("discord_app")
|
||||
@ -72,7 +74,7 @@ async def get_season_batting_stats(
|
||||
"cs",
|
||||
] = "woba", # Sort field
|
||||
sort_order: Literal["asc", "desc"] = "desc", # asc or desc
|
||||
limit: Optional[int] = 200,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = 0,
|
||||
csv: Optional[bool] = False,
|
||||
):
|
||||
@ -218,7 +220,7 @@ async def get_season_pitching_stats(
|
||||
"re24",
|
||||
] = "era", # Sort field
|
||||
sort_order: Literal["asc", "desc"] = "asc", # asc or desc (asc default for ERA)
|
||||
limit: Optional[int] = 200,
|
||||
limit: int = Query(default=DEFAULT_LIMIT, ge=1, le=MAX_LIMIT),
|
||||
offset: int = 0,
|
||||
csv: Optional[bool] = False,
|
||||
):
|
||||
|
||||
@ -34,6 +34,7 @@ services:
|
||||
- REDIS_HOST=sba_redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_DB=0
|
||||
- DISCORD_WEBHOOK_URL=${DISCORD_WEBHOOK_URL}
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
|
||||
88
migrations.py
Normal file
88
migrations.py
Normal file
@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Apply pending SQL migrations and record them in schema_versions.
|
||||
|
||||
Usage:
|
||||
python migrations.py
|
||||
|
||||
Connects to PostgreSQL using the same environment variables as the API:
|
||||
POSTGRES_DB (default: sba_master)
|
||||
POSTGRES_USER (default: sba_admin)
|
||||
POSTGRES_PASSWORD (required)
|
||||
POSTGRES_HOST (default: sba_postgres)
|
||||
POSTGRES_PORT (default: 5432)
|
||||
|
||||
On first run against an existing database, all migrations will be applied.
|
||||
All migration files use IF NOT EXISTS guards so re-applying is safe.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import psycopg2
|
||||
|
||||
|
||||
MIGRATIONS_DIR = Path(__file__).parent / "migrations"
|
||||
|
||||
_CREATE_SCHEMA_VERSIONS = """
|
||||
CREATE TABLE IF NOT EXISTS schema_versions (
|
||||
filename VARCHAR(255) PRIMARY KEY,
|
||||
applied_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||
);
|
||||
"""
|
||||
|
||||
|
||||
def _get_connection():
|
||||
password = os.environ.get("POSTGRES_PASSWORD")
|
||||
if password is None:
|
||||
raise RuntimeError("POSTGRES_PASSWORD environment variable is not set")
|
||||
return psycopg2.connect(
|
||||
dbname=os.environ.get("POSTGRES_DB", "sba_master"),
|
||||
user=os.environ.get("POSTGRES_USER", "sba_admin"),
|
||||
password=password,
|
||||
host=os.environ.get("POSTGRES_HOST", "sba_postgres"),
|
||||
port=int(os.environ.get("POSTGRES_PORT", "5432")),
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
conn = _get_connection()
|
||||
try:
|
||||
with conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(_CREATE_SCHEMA_VERSIONS)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT filename FROM schema_versions")
|
||||
applied = {row[0] for row in cur.fetchall()}
|
||||
|
||||
migration_files = sorted(MIGRATIONS_DIR.glob("*.sql"))
|
||||
pending = [f for f in migration_files if f.name not in applied]
|
||||
|
||||
if not pending:
|
||||
print("No pending migrations.")
|
||||
return
|
||||
|
||||
for migration_file in pending:
|
||||
print(f"Applying {migration_file.name} ...", end=" ", flush=True)
|
||||
sql = migration_file.read_text()
|
||||
with conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
cur.execute(
|
||||
"INSERT INTO schema_versions (filename) VALUES (%s)",
|
||||
(migration_file.name,),
|
||||
)
|
||||
print("done")
|
||||
|
||||
print(f"\nApplied {len(pending)} migration(s).")
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
9
migrations/2026-03-27_add_schema_versions_table.sql
Normal file
9
migrations/2026-03-27_add_schema_versions_table.sql
Normal file
@ -0,0 +1,9 @@
|
||||
-- Migration: Add schema_versions table for migration tracking
|
||||
-- Date: 2026-03-27
|
||||
-- Description: Creates a table to record which SQL migrations have been applied,
|
||||
-- preventing double-application and missed migrations across environments.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS schema_versions (
|
||||
filename VARCHAR(255) PRIMARY KEY,
|
||||
applied_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||
);
|
||||
@ -81,9 +81,9 @@ class TestRouteRegistration:
|
||||
for route, methods in EXPECTED_PLAY_ROUTES.items():
|
||||
assert route in paths, f"Route {route} missing from OpenAPI schema"
|
||||
for method in methods:
|
||||
assert (
|
||||
method in paths[route]
|
||||
), f"Method {method.upper()} missing for {route}"
|
||||
assert method in paths[route], (
|
||||
f"Method {method.upper()} missing for {route}"
|
||||
)
|
||||
|
||||
def test_play_routes_have_plays_tag(self, api):
|
||||
"""All play routes should be tagged with 'plays'."""
|
||||
@ -96,9 +96,9 @@ class TestRouteRegistration:
|
||||
for method, spec in paths[route].items():
|
||||
if method in ("get", "post", "patch", "delete"):
|
||||
tags = spec.get("tags", [])
|
||||
assert (
|
||||
"plays" in tags
|
||||
), f"{method.upper()} {route} missing 'plays' tag, has {tags}"
|
||||
assert "plays" in tags, (
|
||||
f"{method.upper()} {route} missing 'plays' tag, has {tags}"
|
||||
)
|
||||
|
||||
@pytest.mark.post_deploy
|
||||
@pytest.mark.skip(
|
||||
@ -124,9 +124,9 @@ class TestRouteRegistration:
|
||||
]:
|
||||
params = paths[route]["get"].get("parameters", [])
|
||||
param_names = [p["name"] for p in params]
|
||||
assert (
|
||||
"sbaplayer_id" in param_names
|
||||
), f"sbaplayer_id parameter missing from {route}"
|
||||
assert "sbaplayer_id" in param_names, (
|
||||
f"sbaplayer_id parameter missing from {route}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@ -493,10 +493,9 @@ class TestPlayCrud:
|
||||
assert result["id"] == play_id
|
||||
|
||||
def test_get_nonexistent_play(self, api):
|
||||
"""GET /plays/999999999 returns an error (wrapped by handle_db_errors)."""
|
||||
"""GET /plays/999999999 returns 404 Not Found."""
|
||||
r = requests.get(f"{api}/api/v3/plays/999999999", timeout=10)
|
||||
# handle_db_errors wraps HTTPException as 500 with detail message
|
||||
assert r.status_code == 500
|
||||
assert r.status_code == 404
|
||||
assert "not found" in r.json().get("detail", "").lower()
|
||||
|
||||
|
||||
@ -575,9 +574,9 @@ class TestGroupBySbaPlayer:
|
||||
)
|
||||
assert r_seasons.status_code == 200
|
||||
season_pas = [s["pa"] for s in r_seasons.json()["stats"]]
|
||||
assert career_pa >= max(
|
||||
season_pas
|
||||
), f"Career PA ({career_pa}) should be >= max season PA ({max(season_pas)})"
|
||||
assert career_pa >= max(season_pas), (
|
||||
f"Career PA ({career_pa}) should be >= max season PA ({max(season_pas)})"
|
||||
)
|
||||
|
||||
@pytest.mark.post_deploy
|
||||
def test_batting_sbaplayer_short_output(self, api):
|
||||
|
||||
154
tests/unit/test_query_limits.py
Normal file
154
tests/unit/test_query_limits.py
Normal file
@ -0,0 +1,154 @@
|
||||
"""
|
||||
Tests for query limit/offset parameter validation and middleware behavior.
|
||||
|
||||
Verifies that:
|
||||
- FastAPI enforces MAX_LIMIT cap (returns 422 for limit > 500)
|
||||
- FastAPI enforces ge=1 on limit (returns 422 for limit=0 or limit=-1)
|
||||
- Transactions endpoint returns limit/offset keys in the response
|
||||
- strip_empty_query_params middleware treats ?param= as absent
|
||||
|
||||
These tests exercise FastAPI parameter validation which fires before any
|
||||
handler code runs, so most tests don't require a live DB connection.
|
||||
|
||||
The app imports redis and psycopg2 at module level, so we mock those
|
||||
system-level packages before importing app.main.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stub out C-extension / system packages that aren't installed in the test
|
||||
# environment before any app code is imported.
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_redis_stub = MagicMock()
|
||||
_redis_stub.Redis = MagicMock(return_value=MagicMock(ping=MagicMock(return_value=True)))
|
||||
sys.modules.setdefault("redis", _redis_stub)
|
||||
|
||||
_psycopg2_stub = MagicMock()
|
||||
sys.modules.setdefault("psycopg2", _psycopg2_stub)
|
||||
|
||||
_playhouse_pool_stub = MagicMock()
|
||||
sys.modules.setdefault("playhouse.pool", _playhouse_pool_stub)
|
||||
_playhouse_pool_stub.PooledPostgresqlDatabase = MagicMock()
|
||||
|
||||
_pandas_stub = MagicMock()
|
||||
sys.modules.setdefault("pandas", _pandas_stub)
|
||||
_pandas_stub.DataFrame = MagicMock()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
"""
|
||||
TestClient with the Peewee db object mocked so the app can be imported
|
||||
without a running PostgreSQL instance. FastAPI validates query params
|
||||
before calling handler code, so 422 responses don't need a real DB.
|
||||
"""
|
||||
mock_db = MagicMock()
|
||||
mock_db.is_closed.return_value = False
|
||||
mock_db.connect.return_value = None
|
||||
mock_db.close.return_value = None
|
||||
|
||||
with patch("app.db_engine.db", mock_db):
|
||||
from fastapi.testclient import TestClient
|
||||
from app.main import app
|
||||
|
||||
with TestClient(app, raise_server_exceptions=False) as c:
|
||||
yield c
|
||||
|
||||
|
||||
def test_limit_exceeds_max_returns_422(client):
|
||||
"""
|
||||
GET /api/v3/decisions with limit=1000 should return 422.
|
||||
|
||||
MAX_LIMIT is 500; the decisions endpoint declares
|
||||
limit: int = Query(ge=1, le=MAX_LIMIT), so FastAPI rejects values > 500
|
||||
before any handler code runs.
|
||||
"""
|
||||
response = client.get("/api/v3/decisions?limit=1000")
|
||||
assert response.status_code == 422
|
||||
|
||||
|
||||
def test_limit_zero_returns_422(client):
|
||||
"""
|
||||
GET /api/v3/decisions with limit=0 should return 422.
|
||||
|
||||
Query(ge=1) rejects zero values.
|
||||
"""
|
||||
response = client.get("/api/v3/decisions?limit=0")
|
||||
assert response.status_code == 422
|
||||
|
||||
|
||||
def test_limit_negative_returns_422(client):
|
||||
"""
|
||||
GET /api/v3/decisions with limit=-1 should return 422.
|
||||
|
||||
Query(ge=1) rejects negative values.
|
||||
"""
|
||||
response = client.get("/api/v3/decisions?limit=-1")
|
||||
assert response.status_code == 422
|
||||
|
||||
|
||||
def test_transactions_has_limit_in_response(client):
|
||||
"""
|
||||
GET /api/v3/transactions?season=12 should include 'limit' and 'offset'
|
||||
keys in the JSON response body.
|
||||
|
||||
The transactions endpoint was updated to return pagination metadata
|
||||
alongside results so callers know the applied page size.
|
||||
"""
|
||||
mock_qs = MagicMock()
|
||||
mock_qs.count.return_value = 0
|
||||
mock_qs.where.return_value = mock_qs
|
||||
mock_qs.order_by.return_value = mock_qs
|
||||
mock_qs.offset.return_value = mock_qs
|
||||
mock_qs.limit.return_value = mock_qs
|
||||
mock_qs.__iter__ = MagicMock(return_value=iter([]))
|
||||
|
||||
with (
|
||||
patch("app.routers_v3.transactions.Transaction") as mock_txn,
|
||||
patch("app.routers_v3.transactions.Team") as mock_team,
|
||||
patch("app.routers_v3.transactions.Player") as mock_player,
|
||||
):
|
||||
mock_txn.select_season.return_value = mock_qs
|
||||
mock_txn.select.return_value = mock_qs
|
||||
mock_team.select.return_value = mock_qs
|
||||
mock_player.select.return_value = mock_qs
|
||||
|
||||
response = client.get("/api/v3/transactions?season=12")
|
||||
|
||||
# If the mock is sufficient the response is 200 with pagination keys;
|
||||
# if some DB path still fires we at least confirm limit param is accepted.
|
||||
assert response.status_code != 422
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
assert "limit" in data, "Response missing 'limit' key"
|
||||
assert "offset" in data, "Response missing 'offset' key"
|
||||
|
||||
|
||||
def test_empty_string_param_stripped(client):
|
||||
"""
|
||||
Query params with an empty string value should be treated as absent.
|
||||
|
||||
The strip_empty_query_params middleware rewrites the query string before
|
||||
FastAPI parses it, so ?league_abbrev= is removed entirely rather than
|
||||
forwarded as an empty string to the handler.
|
||||
|
||||
Expected: the request is accepted (not 422) and the empty param is ignored.
|
||||
"""
|
||||
mock_qs = MagicMock()
|
||||
mock_qs.count.return_value = 0
|
||||
mock_qs.where.return_value = mock_qs
|
||||
mock_qs.__iter__ = MagicMock(return_value=iter([]))
|
||||
|
||||
with patch("app.routers_v3.standings.Standings") as mock_standings:
|
||||
mock_standings.select_season.return_value = mock_qs
|
||||
|
||||
# ?league_abbrev= should be stripped → treated as absent (None), not ""
|
||||
response = client.get("/api/v3/standings?season=12&league_abbrev=")
|
||||
|
||||
assert response.status_code != 422, (
|
||||
"Empty string query param caused a 422 — middleware may not be stripping it"
|
||||
)
|
||||
Loading…
Reference in New Issue
Block a user