Merge branch 'main' into ai/paper-dynasty-card-creation-16

This commit is contained in:
Claude 2026-03-23 03:51:01 +00:00
commit b55820eec8
6 changed files with 35 additions and 92 deletions

View File

@ -1,8 +1,14 @@
import asyncio
import sys
from pathlib import Path
import aiohttp
import pandas as pd
AUTH_TOKEN = {"Authorization": "Bearer Tp3aO3jhYve5NJF1IqOmJTmk"}
# Add project root so we can import db_calls
sys.path.insert(0, str(Path(__file__).resolve().parents[2]))
from db_calls import AUTH_TOKEN
PROD_URL = "https://pd.manticorum.com/api"

2
.env.example Normal file
View File

@ -0,0 +1,2 @@
# Paper Dynasty API
PD_API_TOKEN=your-bearer-token-here

View File

@ -1,10 +1,18 @@
import os
import aiohttp
import pybaseball as pb
from dotenv import load_dotenv
from typing import Literal
from exceptions import logger
AUTH_TOKEN = {"Authorization": "Bearer Tp3aO3jhYve5NJF1IqOmJTmk"}
load_dotenv()
_token = os.environ.get("PD_API_TOKEN")
if not _token:
raise EnvironmentError("PD_API_TOKEN environment variable is required")
AUTH_TOKEN = {"Authorization": f"Bearer {_token}"}
DB_URL = "https://pd.manticorum.com/api"
master_debug = True
alt_database = None
@ -25,7 +33,7 @@ def param_char(other_params):
def get_req_url(
endpoint: str, api_ver: int = 2, object_id: int = None, params: list = None
):
req_url = f'{DB_URL}/v{api_ver}/{endpoint}{"/" if object_id is not None else ""}{object_id if object_id is not None else ""}'
req_url = f"{DB_URL}/v{api_ver}/{endpoint}{'/' if object_id is not None else ''}{object_id if object_id is not None else ''}"
if params:
other_params = False
@ -39,11 +47,11 @@ def get_req_url(
def log_return_value(log_string: str):
if master_debug:
logger.info(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n'
f"return: {log_string[:1200]}{' [ S N I P P E D ]' if len(log_string) > 1200 else ''}\n"
)
else:
logger.debug(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n'
f"return: {log_string[:1200]}{' [ S N I P P E D ]' if len(log_string) > 1200 else ''}\n"
)
@ -183,4 +191,4 @@ def get_player_data(
def player_desc(this_player) -> str:
if this_player["p_name"] in this_player["description"]:
return this_player["description"]
return f'{this_player["description"]} {this_player["p_name"]}'
return f"{this_player['description']} {this_player['p_name']}"

View File

@ -196,8 +196,8 @@ async def create_new_players(
{
"p_name": f"{f_name} {l_name}",
"cost": NEW_PLAYER_COST,
"image": f'{card_base_url}/{df_data["player_id"]}/'
f'pitchingcard{urllib.parse.quote("?d=")}{release_dir}',
"image": f"{card_base_url}/{df_data['player_id']}/"
f"pitchingcard{urllib.parse.quote('?d=')}{release_dir}",
"mlbclub": CLUB_LIST[df_data["Tm_vL"]],
"franchise": FRANCHISE_LIST[df_data["Tm_vL"]],
"cardset_id": cardset["id"],
@ -268,7 +268,7 @@ async def calculate_pitching_cards(
def create_pitching_card(df_data):
logger.info(
f'Creating pitching card for {df_data["name_first"]} {df_data["name_last"]} / fg ID: {df_data["key_fangraphs"]}'
f"Creating pitching card for {df_data['name_first']} {df_data['name_last']} / fg ID: {df_data['key_fangraphs']}"
)
pow_data = cde.pow_ratings(
float(df_data["Inn_def"]), df_data["GS"], df_data["G"]
@ -298,11 +298,11 @@ async def calculate_pitching_cards(
int(df_data["GF"]), int(df_data["SV"]), int(df_data["G"])
),
"hand": df_data["pitch_hand"],
"batting": f'#1W{df_data["pitch_hand"]}-C',
"batting": f"#1W{df_data['pitch_hand']}-C",
}
)
except Exception as e:
logger.error(f'Skipping fg ID {df_data["key_fangraphs"]} due to: {e}')
logger.error(f"Skipping fg ID {df_data['key_fangraphs']} due to: {e}")
print("Calculating pitching cards...")
pitching_stats.apply(create_pitching_card, axis=1)
@ -333,7 +333,7 @@ async def create_position(
def create_pit_position(df_data):
if df_data["key_bbref"] in df_p.index:
logger.debug(f'Running P stats for {df_data["p_name"]}')
logger.debug(f"Running P stats for {df_data['p_name']}")
pit_positions.append(
{
"player_id": int(df_data["player_id"]),
@ -355,7 +355,7 @@ async def create_position(
try:
pit_positions.append(
{
"player_id": int(df_data["key_bbref"]),
"player_id": int(float(df_data["player_id"])),
"position": "P",
"innings": 1,
"range": 5,
@ -364,7 +364,7 @@ async def create_position(
)
except Exception:
logger.error(
f'Could not create pitcher position for {df_data["key_bbref"]}'
f"Could not create pitcher position for {df_data['key_bbref']}"
)
print("Calculating pitcher fielding lines now...")
@ -386,7 +386,7 @@ async def calculate_pitcher_ratings(pitching_stats: pd.DataFrame, post_pitchers:
pitching_ratings.extend(cpi.get_pitcher_ratings(df_data))
except Exception:
logger.error(
f'Could not create a pitching card for {df_data["key_fangraphs"]}'
f"Could not create a pitching card for {df_data['key_fangraphs']}"
)
print("Calculating card ratings...")
@ -525,8 +525,8 @@ async def post_player_updates(
[
(
"image",
f'{card_base_url}/{df_data["player_id"]}/pitchingcard'
f'{urllib.parse.quote("?d=")}{release_dir}',
f"{card_base_url}/{df_data['player_id']}/pitchingcard"
f"{urllib.parse.quote('?d=')}{release_dir}",
)
]
)

View File

@ -23,6 +23,8 @@ dependencies = [
"pydantic>=2.9.0",
# AWS
"boto3>=1.35.0",
# Environment
"python-dotenv>=1.0.0",
# Scraping
"beautifulsoup4>=4.12.0",
"lxml>=5.0.0",

View File

@ -1,75 +0,0 @@
from typing import Literal
import requests
from exceptions import logger, log_exception
AUTH_TOKEN = {
"Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImNucGhwbnV2aGp2cXprY2J3emRrIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc0NTgxMTc4NCwiZXhwIjoyMDYxMzg3Nzg0fQ.7dG_y2zU2PajBwTD8vut5GcWf3CSaZePkYW_hMf0fVg",
"apikey": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImNucGhwbnV2aGp2cXprY2J3emRrIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc0NTgxMTc4NCwiZXhwIjoyMDYxMzg3Nzg0fQ.7dG_y2zU2PajBwTD8vut5GcWf3CSaZePkYW_hMf0fVg",
}
DB_URL = "https://cnphpnuvhjvqzkcbwzdk.supabase.co/rest/v1"
def get_req_url(endpoint: str, params: list = None):
req_url = f"{DB_URL}/{endpoint}?"
if params:
other_params = False
for x in params:
req_url += f'{"&" if other_params else "?"}{x[0]}={x[1]}'
other_params = True
return req_url
def log_return_value(log_string: str, log_type: Literal["info", "debug"]):
if log_type == "info":
logger.info(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n'
)
else:
logger.debug(
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n'
)
def db_get(
endpoint: str,
params: dict = None,
limit: int = 1000,
offset: int = 0,
none_okay: bool = True,
timeout: int = 3,
):
req_url = f"{DB_URL}/{endpoint}?limit={limit}&offset={offset}"
logger.info(f"HTTP GET: {req_url}, params: {params}")
response = requests.request("GET", req_url, params=params, headers=AUTH_TOKEN)
logger.info(response)
if response.status_code != requests.codes.ok:
log_exception(Exception, response.text)
data = response.json()
if isinstance(data, list) and len(data) == 0:
if none_okay:
return None
else:
log_exception(Exception, "Query returned no results and none_okay = False")
return data
# async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
# async with session.get(req_url) as r:
# logger.info(f'session info: {r}')
# if r.status == 200:
# js = await r.json()
# log_return_value(f'{js}')
# return js
# elif none_okay:
# e = await r.text()
# logger.error(e)
# return None
# else:
# e = await r.text()
# logger.error(e)
# raise ValueError(f'DB: {e}')