Merge branch 'main' into ai/paper-dynasty-card-creation-20
This commit is contained in:
commit
de9604364c
@ -1,8 +1,14 @@
|
||||
import asyncio
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import aiohttp
|
||||
import pandas as pd
|
||||
|
||||
AUTH_TOKEN = {"Authorization": "Bearer Tp3aO3jhYve5NJF1IqOmJTmk"}
|
||||
# Add project root so we can import db_calls
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[2]))
|
||||
from db_calls import AUTH_TOKEN
|
||||
|
||||
PROD_URL = "https://pd.manticorum.com/api"
|
||||
|
||||
|
||||
|
||||
2
.env.example
Normal file
2
.env.example
Normal file
@ -0,0 +1,2 @@
|
||||
# Paper Dynasty API
|
||||
PD_API_TOKEN=your-bearer-token-here
|
||||
@ -573,7 +573,7 @@ def stealing_line(steal_data: dict):
|
||||
else:
|
||||
good_jump = "2-12"
|
||||
|
||||
return f'{"*" if sd[2] else ""}{good_jump}/- ({sd[1] if sd[1] else "-"}-{sd[0] if sd[0] else "-"})'
|
||||
return f"{'*' if sd[2] else ''}{good_jump}/- ({sd[1] if sd[1] else '-'}-{sd[0] if sd[0] else '-'})"
|
||||
|
||||
|
||||
def running(extra_base_pct: str):
|
||||
@ -583,7 +583,7 @@ def running(extra_base_pct: str):
|
||||
xb_pct = float(extra_base_pct.strip("%")) / 80
|
||||
except Exception as e:
|
||||
logger.error(f"calcs_batter running - {e}")
|
||||
xb_pct = 20
|
||||
return 8
|
||||
|
||||
return max(min(round(6 + (10 * xb_pct)), 17), 8)
|
||||
|
||||
@ -693,11 +693,11 @@ def get_batter_ratings(df_data) -> List[dict]:
|
||||
|
||||
logger.debug(
|
||||
f"all on base: {vl.hbp + vl.walk + vl.total_hits()} / all chances: {vl.total_chances()}"
|
||||
f'{"*******ERROR ABOVE*******" if vl.hbp + vl.walk + vl.total_hits() != vl.total_chances() else ""}'
|
||||
f"{'*******ERROR ABOVE*******' if vl.hbp + vl.walk + vl.total_hits() != vl.total_chances() else ''}"
|
||||
)
|
||||
logger.debug(
|
||||
f"all on base: {vr.hbp + vr.walk + vr.total_hits()} / all chances: {vr.total_chances()}"
|
||||
f'{"*******ERROR ABOVE*******" if vr.hbp + vr.walk + vr.total_hits() != vr.total_chances() else ""}'
|
||||
f"{'*******ERROR ABOVE*******' if vr.hbp + vr.walk + vr.total_hits() != vr.total_chances() else ''}"
|
||||
)
|
||||
|
||||
vl.calculate_strikeouts(df_data["SO_vL"], df_data["AB_vL"], df_data["H_vL"])
|
||||
|
||||
@ -3,7 +3,7 @@ import urllib.parse
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
from typing import Dict
|
||||
from typing import Any, Dict
|
||||
from creation_helpers import (
|
||||
get_all_pybaseball_ids,
|
||||
sanitize_name,
|
||||
@ -158,8 +158,8 @@ async def create_new_players(
|
||||
{
|
||||
"p_name": f"{f_name} {l_name}",
|
||||
"cost": NEW_PLAYER_COST,
|
||||
"image": f'{card_base_url}/{df_data["player_id"]}/battingcard'
|
||||
f'{urllib.parse.quote("?d=")}{release_dir}',
|
||||
"image": f"{card_base_url}/{df_data['player_id']}/battingcard"
|
||||
f"{urllib.parse.quote('?d=')}{release_dir}",
|
||||
"mlbclub": CLUB_LIST[df_data["Tm_vL"]],
|
||||
"franchise": FRANCHISE_LIST[df_data["Tm_vL"]],
|
||||
"cardset_id": cardset["id"],
|
||||
@ -302,7 +302,7 @@ async def calculate_batting_ratings(offense_stats: pd.DataFrame, to_post: bool):
|
||||
|
||||
|
||||
async def post_player_updates(
|
||||
cardset: Dict[str, any],
|
||||
cardset: Dict[str, Any],
|
||||
card_base_url: str,
|
||||
release_dir: str,
|
||||
player_desc: str,
|
||||
@ -432,8 +432,8 @@ async def post_player_updates(
|
||||
[
|
||||
(
|
||||
"image",
|
||||
f'{card_base_url}/{df_data["player_id"]}/battingcard'
|
||||
f'{urllib.parse.quote("?d=")}{release_dir}',
|
||||
f"{card_base_url}/{df_data['player_id']}/battingcard"
|
||||
f"{urllib.parse.quote('?d=')}{release_dir}",
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
@ -10,7 +10,7 @@ import requests
|
||||
import time
|
||||
|
||||
from db_calls import db_get
|
||||
from db_calls_card_creation import *
|
||||
from db_calls_card_creation import PitcherData
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# Card Creation Constants
|
||||
@ -533,7 +533,7 @@ def get_pitching_peripherals(season: int):
|
||||
row_data.append(player_id)
|
||||
if len(headers) == 0:
|
||||
col_names.append("key_bbref")
|
||||
except Exception:
|
||||
except KeyError:
|
||||
pass
|
||||
row_data.append(cell.text)
|
||||
if len(headers) == 0:
|
||||
@ -595,21 +595,21 @@ def legal_splits(tot_chances):
|
||||
|
||||
|
||||
def result_string(tba_data, row_num, split_min=None, split_max=None):
|
||||
bold1 = f'{"<b>" if tba_data["bold"] else ""}'
|
||||
bold2 = f'{"</b>" if tba_data["bold"] else ""}'
|
||||
row_string = f'{"<b> </b>" if int(row_num) < 10 else ""}{row_num}'
|
||||
bold1 = f"{'<b>' if tba_data['bold'] else ''}"
|
||||
bold2 = f"{'</b>' if tba_data['bold'] else ''}"
|
||||
row_string = f"{'<b> </b>' if int(row_num) < 10 else ''}{row_num}"
|
||||
if TESTING:
|
||||
print(
|
||||
f'adding {tba_data["string"]} to row {row_num} / '
|
||||
f"adding {tba_data['string']} to row {row_num} / "
|
||||
f"split_min: {split_min} / split_max: {split_max}"
|
||||
)
|
||||
|
||||
# No splits; standard result
|
||||
if not split_min:
|
||||
return f'{bold1}{row_string}-{tba_data["string"]}{bold2}'
|
||||
return f"{bold1}{row_string}-{tba_data['string']}{bold2}"
|
||||
|
||||
# With splits
|
||||
split_nums = f'{split_min if split_min != 20 else ""}{"-" if split_min != 20 else ""}{split_max}'
|
||||
split_nums = f"{split_min if split_min != 20 else ''}{'-' if split_min != 20 else ''}{split_max}"
|
||||
data_string = (
|
||||
tba_data["sm-string"] if "sm-string" in tba_data.keys() else tba_data["string"]
|
||||
)
|
||||
@ -618,10 +618,10 @@ def result_string(tba_data, row_num, split_min=None, split_max=None):
|
||||
spaces -= 3
|
||||
elif "SI**" in data_string:
|
||||
spaces += 1
|
||||
elif "DO**" in data_string:
|
||||
spaces -= 2
|
||||
elif "DO*" in data_string:
|
||||
spaces -= 1
|
||||
elif "DO*" in data_string:
|
||||
spaces -= 2
|
||||
elif "so" in data_string:
|
||||
spaces += 3
|
||||
elif "gb" in data_string:
|
||||
@ -638,41 +638,39 @@ def result_string(tba_data, row_num, split_min=None, split_max=None):
|
||||
row_output = "<b> </b>"
|
||||
if TESTING:
|
||||
print(f"row_output: {row_output}")
|
||||
return f'{bold1}{row_output}{data_string}{" " * spaces}{split_nums}{bold2}'
|
||||
return f"{bold1}{row_output}{data_string}{' ' * spaces}{split_nums}{bold2}"
|
||||
|
||||
|
||||
def result_data(
|
||||
tba_data, row_num, tba_data_bottom=None, top_split_max=None, fatigue=False
|
||||
):
|
||||
ret_data = {}
|
||||
top_bold1 = f'{"<b>" if tba_data["bold"] else ""}'
|
||||
top_bold2 = f'{"</b>" if tba_data["bold"] else ""}'
|
||||
top_bold1 = f"{'<b>' if tba_data['bold'] else ''}"
|
||||
top_bold2 = f"{'</b>' if tba_data['bold'] else ''}"
|
||||
bot_bold1 = None
|
||||
bot_bold2 = None
|
||||
if tba_data_bottom:
|
||||
bot_bold1 = f'{"<b>" if tba_data_bottom["bold"] else ""}'
|
||||
bot_bold2 = f'{"</b>" if tba_data_bottom["bold"] else ""}'
|
||||
bot_bold1 = f"{'<b>' if tba_data_bottom['bold'] else ''}"
|
||||
bot_bold2 = f"{'</b>' if tba_data_bottom['bold'] else ''}"
|
||||
|
||||
if tba_data_bottom is None:
|
||||
ret_data["2d6"] = f"{top_bold1}{int(row_num)}-{top_bold2}"
|
||||
ret_data["splits"] = f"{top_bold1}{top_bold2}"
|
||||
ret_data["result"] = (
|
||||
f"{top_bold1}"
|
||||
f'{tba_data["string"]}{" •" if fatigue else ""}'
|
||||
f"{top_bold2}"
|
||||
f"{top_bold1}{tba_data['string']}{' •' if fatigue else ''}{top_bold2}"
|
||||
)
|
||||
else:
|
||||
ret_data["2d6"] = f"{top_bold1}{int(row_num)}-{top_bold2}\n"
|
||||
ret_data["splits"] = (
|
||||
f'{top_bold1}1{"-" if top_split_max != 1 else ""}'
|
||||
f'{top_split_max if top_split_max != 1 else ""}{top_bold2}\n'
|
||||
f'{bot_bold1}{top_split_max+1}{"-20" if top_split_max != 19 else ""}{bot_bold2}'
|
||||
f"{top_bold1}1{'-' if top_split_max != 1 else ''}"
|
||||
f"{top_split_max if top_split_max != 1 else ''}{top_bold2}\n"
|
||||
f"{bot_bold1}{top_split_max + 1}{'-20' if top_split_max != 19 else ''}{bot_bold2}"
|
||||
)
|
||||
ret_data["result"] = (
|
||||
f'{top_bold1}{tba_data["sm-string"] if "sm-string" in tba_data.keys() else tba_data["string"]}'
|
||||
f"{top_bold1}{tba_data['sm-string'] if 'sm-string' in tba_data.keys() else tba_data['string']}"
|
||||
f"{top_bold2}\n"
|
||||
f"{bot_bold1}"
|
||||
f'{tba_data_bottom["sm-string"] if "sm-string" in tba_data_bottom.keys() else tba_data_bottom["string"]}'
|
||||
f"{tba_data_bottom['sm-string'] if 'sm-string' in tba_data_bottom.keys() else tba_data_bottom['string']}"
|
||||
f"{bot_bold2}"
|
||||
)
|
||||
|
||||
@ -688,9 +686,9 @@ def get_of(batter_hand, pitcher_hand, pull_side=True):
|
||||
|
||||
if batter_hand == "S":
|
||||
if pitcher_hand == "L":
|
||||
return "rf" if pull_side else "rf"
|
||||
return "lf" if pull_side else "rf"
|
||||
else:
|
||||
return "lf" if pull_side else "lf"
|
||||
return "rf" if pull_side else "lf"
|
||||
|
||||
|
||||
def get_col(col_num):
|
||||
@ -729,7 +727,7 @@ def get_position_string(all_pos: list, inc_p: bool):
|
||||
|
||||
for x in all_pos:
|
||||
if x.position == "OF":
|
||||
of_arm = f'{"+" if "-" not in x.arm else ""}{x.arm}'
|
||||
of_arm = f"{'+' if '-' not in x.arm else ''}{x.arm}"
|
||||
of_error = x.error
|
||||
of_innings = x.innings
|
||||
elif x.position == "CF":
|
||||
@ -744,7 +742,7 @@ def get_position_string(all_pos: list, inc_p: bool):
|
||||
elif x.position == "C":
|
||||
all_def.append(
|
||||
(
|
||||
f'c-{x.range}({"+" if int(x.arm) >= 0 else ""}{x.arm}) e{x.error} T-{x.overthrow}(pb-{x.pb})',
|
||||
f"c-{x.range}({'+' if int(x.arm) >= 0 else ''}{x.arm}) e{x.error} T-{x.overthrow}(pb-{x.pb})",
|
||||
x.innings,
|
||||
)
|
||||
)
|
||||
@ -1079,7 +1077,7 @@ def mlbteam_and_franchise(mlbam_playerid):
|
||||
p_data["franchise"] = normalize_franchise(data["currentTeam"]["name"])
|
||||
else:
|
||||
logger.error(
|
||||
f'Could not set team for {mlbam_playerid}; received {data["currentTeam"]["name"]}'
|
||||
f"Could not set team for {mlbam_playerid}; received {data['currentTeam']['name']}"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
@ -1222,5 +1220,5 @@ def get_hand(df_data):
|
||||
else:
|
||||
return "R"
|
||||
except Exception:
|
||||
logger.error(f'Error in get_hand for {df_data["Name"]}')
|
||||
logger.error(f"Error in get_hand for {df_data['Name']}")
|
||||
return "R"
|
||||
|
||||
@ -6,6 +6,7 @@ baseball archetypes with iterative review and refinement.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import copy
|
||||
import sys
|
||||
from typing import Literal
|
||||
from datetime import datetime
|
||||
@ -179,7 +180,12 @@ class CustomCardCreator:
|
||||
else:
|
||||
calc = PitcherRatingCalculator(archetype)
|
||||
ratings = calc.calculate_ratings(pitchingcard_id=0) # Temp ID
|
||||
card_data = {"ratings": ratings}
|
||||
card_data = {
|
||||
"ratings": ratings,
|
||||
"starter_rating": archetype.starter_rating,
|
||||
"relief_rating": archetype.relief_rating,
|
||||
"closer_rating": archetype.closer_rating,
|
||||
}
|
||||
|
||||
# Step 4: Review and tweak loop
|
||||
final_data = await self.review_and_tweak(
|
||||
@ -347,7 +353,7 @@ class CustomCardCreator:
|
||||
vs_hand = rating["vs_hand"]
|
||||
print(f"\nVS {vs_hand}{'HP' if player_type == 'batter' else 'HB'}:")
|
||||
print(
|
||||
f" AVG: {rating['avg']:.3f} OBP: {rating['obp']:.3f} SLG: {rating['slg']:.3f} OPS: {rating['obp']+rating['slg']:.3f}"
|
||||
f" AVG: {rating['avg']:.3f} OBP: {rating['obp']:.3f} SLG: {rating['slg']:.3f} OPS: {rating['obp'] + rating['slg']:.3f}"
|
||||
)
|
||||
|
||||
# Show hit distribution
|
||||
@ -364,7 +370,7 @@ class CustomCardCreator:
|
||||
+ rating["bp_single"]
|
||||
)
|
||||
print(
|
||||
f" Hits: {total_hits:.1f} (HR: {rating['homerun']:.1f} 3B: {rating['triple']:.1f} 2B: {rating['double_pull']+rating['double_two']+rating['double_three']:.1f} 1B: {total_hits - rating['homerun'] - rating['bp_homerun'] - rating['triple'] - rating['double_pull'] - rating['double_two'] - rating['double_three']:.1f})"
|
||||
f" Hits: {total_hits:.1f} (HR: {rating['homerun']:.1f} 3B: {rating['triple']:.1f} 2B: {rating['double_pull'] + rating['double_two'] + rating['double_three']:.1f} 1B: {total_hits - rating['homerun'] - rating['bp_homerun'] - rating['triple'] - rating['double_pull'] - rating['double_two'] - rating['double_three']:.1f})"
|
||||
)
|
||||
|
||||
# Show walk/strikeout
|
||||
@ -389,7 +395,7 @@ class CustomCardCreator:
|
||||
)
|
||||
)
|
||||
print(
|
||||
f" Outs: {outs:.1f} (K: {rating['strikeout']:.1f} LD: {rating['lineout']:.1f} FB: {rating['flyout_a']+rating['flyout_bq']+rating['flyout_lf_b']+rating['flyout_rf_b']:.1f} GB: {rating['groundout_a']+rating['groundout_b']+rating['groundout_c']:.1f})"
|
||||
f" Outs: {outs:.1f} (K: {rating['strikeout']:.1f} LD: {rating['lineout']:.1f} FB: {rating['flyout_a'] + rating['flyout_bq'] + rating['flyout_lf_b'] + rating['flyout_rf_b']:.1f} GB: {rating['groundout_a'] + rating['groundout_b'] + rating['groundout_c']:.1f})"
|
||||
)
|
||||
|
||||
# Calculate and display total OPS
|
||||
@ -420,10 +426,68 @@ class CustomCardCreator:
|
||||
print("-" * 70)
|
||||
print("\nAdjust key percentages (press Enter to keep current value):\n")
|
||||
|
||||
# TODO: Implement percentage tweaking
|
||||
# For now, return unchanged
|
||||
print("(Feature coming soon - manual adjustments available in option 3)")
|
||||
return card_data
|
||||
def prompt_float(label: str, current: float) -> float:
|
||||
val = input(f" {label} [{current:.3f}]: ").strip()
|
||||
if not val:
|
||||
return current
|
||||
try:
|
||||
return float(val)
|
||||
except ValueError:
|
||||
print(" Invalid value, keeping current.")
|
||||
return current
|
||||
|
||||
def prompt_int(label: str, current: int) -> int:
|
||||
val = input(f" {label} [{current}]: ").strip()
|
||||
if not val:
|
||||
return current
|
||||
try:
|
||||
return int(val)
|
||||
except ValueError:
|
||||
print(" Invalid value, keeping current.")
|
||||
return current
|
||||
|
||||
arch = copy.copy(archetype)
|
||||
|
||||
print("--- vs RHP/RHB ---")
|
||||
arch.avg_vs_r = prompt_float("AVG vs R", arch.avg_vs_r)
|
||||
arch.obp_vs_r = prompt_float("OBP vs R", arch.obp_vs_r)
|
||||
arch.slg_vs_r = prompt_float("SLG vs R", arch.slg_vs_r)
|
||||
arch.bb_pct_vs_r = prompt_float("BB% vs R", arch.bb_pct_vs_r)
|
||||
arch.k_pct_vs_r = prompt_float("K% vs R", arch.k_pct_vs_r)
|
||||
|
||||
print("\n--- vs LHP/LHB ---")
|
||||
arch.avg_vs_l = prompt_float("AVG vs L", arch.avg_vs_l)
|
||||
arch.obp_vs_l = prompt_float("OBP vs L", arch.obp_vs_l)
|
||||
arch.slg_vs_l = prompt_float("SLG vs L", arch.slg_vs_l)
|
||||
arch.bb_pct_vs_l = prompt_float("BB% vs L", arch.bb_pct_vs_l)
|
||||
arch.k_pct_vs_l = prompt_float("K% vs L", arch.k_pct_vs_l)
|
||||
|
||||
print("\n--- Power Profile ---")
|
||||
arch.hr_per_hit = prompt_float("HR/Hit", arch.hr_per_hit)
|
||||
arch.triple_per_hit = prompt_float("3B/Hit", arch.triple_per_hit)
|
||||
arch.double_per_hit = prompt_float("2B/Hit", arch.double_per_hit)
|
||||
|
||||
print("\n--- Batted Ball Profile ---")
|
||||
arch.gb_pct = prompt_float("GB%", arch.gb_pct)
|
||||
arch.fb_pct = prompt_float("FB%", arch.fb_pct)
|
||||
arch.ld_pct = prompt_float("LD%", arch.ld_pct)
|
||||
|
||||
if player_type == "batter":
|
||||
print("\n--- Baserunning ---")
|
||||
arch.speed_rating = prompt_int("Speed (1-10)", arch.speed_rating) # type: ignore[arg-type]
|
||||
arch.steal_jump = prompt_int("Jump (1-10)", arch.steal_jump) # type: ignore[arg-type]
|
||||
arch.xbt_pct = prompt_float("XBT%", arch.xbt_pct) # type: ignore[union-attr]
|
||||
|
||||
# Recalculate card ratings with the modified archetype
|
||||
if player_type == "batter":
|
||||
calc = BatterRatingCalculator(arch) # type: ignore[arg-type]
|
||||
ratings = calc.calculate_ratings(battingcard_id=0)
|
||||
baserunning = calc.calculate_baserunning()
|
||||
return {"ratings": ratings, "baserunning": baserunning}
|
||||
else:
|
||||
calc_p = PitcherRatingCalculator(arch) # type: ignore[arg-type]
|
||||
ratings = calc_p.calculate_ratings(pitchingcard_id=0)
|
||||
return {"ratings": ratings}
|
||||
|
||||
async def manual_adjustments(
|
||||
self, player_type: Literal["batter", "pitcher"], card_data: dict
|
||||
@ -434,10 +498,99 @@ class CustomCardCreator:
|
||||
print("-" * 70)
|
||||
print("\nDirectly edit D20 chances (must sum to 108):\n")
|
||||
|
||||
# TODO: Implement manual adjustments
|
||||
# For now, return unchanged
|
||||
print("(Feature coming soon)")
|
||||
return card_data
|
||||
D20_FIELDS = [
|
||||
"homerun",
|
||||
"bp_homerun",
|
||||
"triple",
|
||||
"double_three",
|
||||
"double_two",
|
||||
"double_pull",
|
||||
"single_two",
|
||||
"single_one",
|
||||
"single_center",
|
||||
"bp_single",
|
||||
"hbp",
|
||||
"walk",
|
||||
"strikeout",
|
||||
"lineout",
|
||||
"popout",
|
||||
"flyout_a",
|
||||
"flyout_bq",
|
||||
"flyout_lf_b",
|
||||
"flyout_rf_b",
|
||||
"groundout_a",
|
||||
"groundout_b",
|
||||
"groundout_c",
|
||||
]
|
||||
|
||||
# Choose which split to edit
|
||||
print("Which split to edit?")
|
||||
for i, rating in enumerate(card_data["ratings"]):
|
||||
vs = rating["vs_hand"]
|
||||
print(f" {i + 1}. vs {vs}{'HP' if player_type == 'batter' else 'HB'}")
|
||||
|
||||
while True:
|
||||
choice = input("\nSelect split (1-2): ").strip()
|
||||
try:
|
||||
idx = int(choice) - 1
|
||||
if 0 <= idx < len(card_data["ratings"]):
|
||||
break
|
||||
else:
|
||||
print("Invalid choice.")
|
||||
except ValueError:
|
||||
print("Invalid input.")
|
||||
|
||||
result = copy.deepcopy(card_data)
|
||||
rating = result["ratings"][idx]
|
||||
|
||||
while True:
|
||||
vs = rating["vs_hand"]
|
||||
print(
|
||||
f"\n--- VS {vs}{'HP' if player_type == 'batter' else 'HB'} D20 Chances ---"
|
||||
)
|
||||
total = 0.0
|
||||
for i, field in enumerate(D20_FIELDS, 1):
|
||||
val = rating[field]
|
||||
print(f" {i:2d}. {field:<20s}: {val:.2f}")
|
||||
total += val
|
||||
print(f"\n Total: {total:.2f} (target: 108.00)")
|
||||
|
||||
user_input = input(
|
||||
"\nEnter field number and new value (e.g. '1 3.5'), or 'done': "
|
||||
).strip()
|
||||
if user_input.lower() in ("done", "q", ""):
|
||||
break
|
||||
|
||||
parts = user_input.split()
|
||||
if len(parts) != 2:
|
||||
print(" Enter a field number and a value separated by a space.")
|
||||
continue
|
||||
|
||||
try:
|
||||
field_idx = int(parts[0]) - 1
|
||||
new_val = float(parts[1])
|
||||
except ValueError:
|
||||
print(" Invalid input.")
|
||||
continue
|
||||
|
||||
if not (0 <= field_idx < len(D20_FIELDS)):
|
||||
print(f" Field number must be between 1 and {len(D20_FIELDS)}.")
|
||||
continue
|
||||
|
||||
if new_val < 0:
|
||||
print(" Value cannot be negative.")
|
||||
continue
|
||||
|
||||
rating[D20_FIELDS[field_idx]] = new_val
|
||||
|
||||
total = sum(rating[f] for f in D20_FIELDS)
|
||||
if abs(total - 108.0) > 0.01:
|
||||
print(
|
||||
f"\nWarning: Total is {total:.2f} (expected 108.00). "
|
||||
"Ratings saved but card probabilities may be incorrect."
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
async def create_database_records(
|
||||
self,
|
||||
@ -580,9 +733,9 @@ class CustomCardCreator:
|
||||
"name_first": player_info["name_first"],
|
||||
"name_last": player_info["name_last"],
|
||||
"hand": player_info["hand"],
|
||||
"starter_rating": 5, # TODO: Get from archetype
|
||||
"relief_rating": 5, # TODO: Get from archetype
|
||||
"closer_rating": None, # TODO: Get from archetype
|
||||
"starter_rating": card_data["starter_rating"],
|
||||
"relief_rating": card_data["relief_rating"],
|
||||
"closer_rating": card_data["closer_rating"],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
34
db_calls.py
34
db_calls.py
@ -1,10 +1,18 @@
|
||||
import os
|
||||
|
||||
import aiohttp
|
||||
import pybaseball as pb
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from typing import Literal, Optional
|
||||
from exceptions import logger
|
||||
|
||||
AUTH_TOKEN = {"Authorization": "Bearer Tp3aO3jhYve5NJF1IqOmJTmk"}
|
||||
load_dotenv()
|
||||
|
||||
_token = os.environ.get("PD_API_TOKEN")
|
||||
if not _token:
|
||||
raise EnvironmentError("PD_API_TOKEN environment variable is required")
|
||||
AUTH_TOKEN = {"Authorization": f"Bearer {_token}"}
|
||||
DB_URL = "https://pd.manticorum.com/api"
|
||||
master_debug = True
|
||||
alt_database = None
|
||||
@ -59,7 +67,9 @@ async def db_get(
|
||||
log_string = f"get:\n{endpoint} id: {object_id} params: {params}"
|
||||
logger.info(log_string) if master_debug else logger.debug(log_string)
|
||||
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with aiohttp.ClientSession(
|
||||
headers=AUTH_TOKEN, timeout=aiohttp.ClientTimeout(total=timeout)
|
||||
) as session:
|
||||
async with session.get(req_url) as r:
|
||||
logger.info(f"session info: {r}")
|
||||
if r.status == 200:
|
||||
@ -80,7 +90,9 @@ async def url_get(url: str, timeout: int = 3) -> dict:
|
||||
log_string = f"get:\n{url}"
|
||||
logger.info(log_string) if master_debug else logger.debug(log_string)
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with aiohttp.ClientSession(
|
||||
timeout=aiohttp.ClientTimeout(total=timeout)
|
||||
) as session:
|
||||
async with session.get(url) as r:
|
||||
if r.status == 200:
|
||||
log_string = "200 received"
|
||||
@ -99,7 +111,9 @@ async def db_patch(
|
||||
log_string = f"patch:\n{endpoint} {params}"
|
||||
logger.info(log_string) if master_debug else logger.debug(log_string)
|
||||
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with aiohttp.ClientSession(
|
||||
headers=AUTH_TOKEN, timeout=aiohttp.ClientTimeout(total=timeout)
|
||||
) as session:
|
||||
async with session.patch(req_url) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
@ -118,7 +132,9 @@ async def db_post(
|
||||
log_string = f"post:\n{endpoint} payload: {payload}\ntype: {type(payload)}"
|
||||
logger.info(log_string) if master_debug else logger.debug(log_string)
|
||||
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with aiohttp.ClientSession(
|
||||
headers=AUTH_TOKEN, timeout=aiohttp.ClientTimeout(total=timeout)
|
||||
) as session:
|
||||
async with session.post(req_url, json=payload) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
@ -137,7 +153,9 @@ async def db_put(
|
||||
log_string = f"put:\n{endpoint} payload: {payload}\ntype: {type(payload)}"
|
||||
logger.info(log_string) if master_debug else logger.debug(log_string)
|
||||
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with aiohttp.ClientSession(
|
||||
headers=AUTH_TOKEN, timeout=aiohttp.ClientTimeout(total=timeout)
|
||||
) as session:
|
||||
async with session.put(req_url, json=payload) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
@ -154,7 +172,9 @@ async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3)
|
||||
log_string = f"delete:\n{endpoint} {object_id}"
|
||||
logger.info(log_string) if master_debug else logger.debug(log_string)
|
||||
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with aiohttp.ClientSession(
|
||||
headers=AUTH_TOKEN, timeout=aiohttp.ClientTimeout(total=timeout)
|
||||
) as session:
|
||||
async with session.delete(req_url) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import datetime
|
||||
import urllib.parse
|
||||
import pandas as pd
|
||||
from typing import Dict
|
||||
from typing import Any, Dict
|
||||
|
||||
from creation_helpers import (
|
||||
get_all_pybaseball_ids,
|
||||
@ -196,8 +196,8 @@ async def create_new_players(
|
||||
{
|
||||
"p_name": f"{f_name} {l_name}",
|
||||
"cost": NEW_PLAYER_COST,
|
||||
"image": f'{card_base_url}/{df_data["player_id"]}/'
|
||||
f'pitchingcard{urllib.parse.quote("?d=")}{release_dir}',
|
||||
"image": f"{card_base_url}/{df_data['player_id']}/"
|
||||
f"pitchingcard{urllib.parse.quote('?d=')}{release_dir}",
|
||||
"mlbclub": CLUB_LIST[df_data["Tm_vL"]],
|
||||
"franchise": FRANCHISE_LIST[df_data["Tm_vL"]],
|
||||
"cardset_id": cardset["id"],
|
||||
@ -268,7 +268,7 @@ async def calculate_pitching_cards(
|
||||
|
||||
def create_pitching_card(df_data):
|
||||
logger.info(
|
||||
f'Creating pitching card for {df_data["name_first"]} {df_data["name_last"]} / fg ID: {df_data["key_fangraphs"]}'
|
||||
f"Creating pitching card for {df_data['name_first']} {df_data['name_last']} / fg ID: {df_data['key_fangraphs']}"
|
||||
)
|
||||
pow_data = cde.pow_ratings(
|
||||
float(df_data["Inn_def"]), df_data["GS"], df_data["G"]
|
||||
@ -298,11 +298,13 @@ async def calculate_pitching_cards(
|
||||
int(df_data["GF"]), int(df_data["SV"]), int(df_data["G"])
|
||||
),
|
||||
"hand": df_data["pitch_hand"],
|
||||
"batting": f'#1W{df_data["pitch_hand"]}-C',
|
||||
"batting": f"#1W{df_data['pitch_hand']}-C",
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f'Skipping fg ID {df_data["key_fangraphs"]} due to: {e}')
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Skipping fg ID {df_data['key_fangraphs']} due to exception"
|
||||
)
|
||||
|
||||
print("Calculating pitching cards...")
|
||||
pitching_stats.apply(create_pitching_card, axis=1)
|
||||
@ -333,7 +335,7 @@ async def create_position(
|
||||
|
||||
def create_pit_position(df_data):
|
||||
if df_data["key_bbref"] in df_p.index:
|
||||
logger.debug(f'Running P stats for {df_data["p_name"]}')
|
||||
logger.debug(f"Running P stats for {df_data['p_name']}")
|
||||
pit_positions.append(
|
||||
{
|
||||
"player_id": int(df_data["player_id"]),
|
||||
@ -355,7 +357,7 @@ async def create_position(
|
||||
try:
|
||||
pit_positions.append(
|
||||
{
|
||||
"player_id": int(df_data["key_bbref"]),
|
||||
"player_id": int(float(df_data["player_id"])),
|
||||
"position": "P",
|
||||
"innings": 1,
|
||||
"range": 5,
|
||||
@ -364,7 +366,7 @@ async def create_position(
|
||||
)
|
||||
except Exception:
|
||||
logger.error(
|
||||
f'Could not create pitcher position for {df_data["key_bbref"]}'
|
||||
f"Could not create pitcher position for {df_data['key_bbref']}"
|
||||
)
|
||||
|
||||
print("Calculating pitcher fielding lines now...")
|
||||
@ -386,7 +388,7 @@ async def calculate_pitcher_ratings(pitching_stats: pd.DataFrame, post_pitchers:
|
||||
pitching_ratings.extend(cpi.get_pitcher_ratings(df_data))
|
||||
except Exception:
|
||||
logger.error(
|
||||
f'Could not create a pitching card for {df_data["key_fangraphs"]}'
|
||||
f"Could not create a pitching card for {df_data['key_fangraphs']}"
|
||||
)
|
||||
|
||||
print("Calculating card ratings...")
|
||||
@ -400,7 +402,7 @@ async def calculate_pitcher_ratings(pitching_stats: pd.DataFrame, post_pitchers:
|
||||
|
||||
|
||||
async def post_player_updates(
|
||||
cardset: Dict[str, any],
|
||||
cardset: Dict[str, Any],
|
||||
player_description: str,
|
||||
card_base_url: str,
|
||||
release_dir: str,
|
||||
@ -525,8 +527,8 @@ async def post_player_updates(
|
||||
[
|
||||
(
|
||||
"image",
|
||||
f'{card_base_url}/{df_data["player_id"]}/pitchingcard'
|
||||
f'{urllib.parse.quote("?d=")}{release_dir}',
|
||||
f"{card_base_url}/{df_data['player_id']}/pitchingcard"
|
||||
f"{urllib.parse.quote('?d=')}{release_dir}",
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
@ -23,6 +23,8 @@ dependencies = [
|
||||
"pydantic>=2.9.0",
|
||||
# AWS
|
||||
"boto3>=1.35.0",
|
||||
# Environment
|
||||
"python-dotenv>=1.0.0",
|
||||
# Scraping
|
||||
"beautifulsoup4>=4.12.0",
|
||||
"lxml>=5.0.0",
|
||||
|
||||
@ -23,9 +23,9 @@ multidict==6.1.0
|
||||
numpy==2.1.2
|
||||
packaging==24.1
|
||||
pandas==2.2.3
|
||||
peewee
|
||||
peewee==3.19.0
|
||||
pillow==11.0.0
|
||||
polars
|
||||
polars==1.36.1
|
||||
pluggy==1.5.0
|
||||
propcache==0.2.0
|
||||
# pyarrow==17.0.0
|
||||
|
||||
@ -53,21 +53,30 @@ PROMO_INCLUSION_RETRO_IDS = [
|
||||
# 'haraa001', # Aaron Harang (SP)
|
||||
# 'hofft001', # Trevor Hoffman (RP)
|
||||
]
|
||||
MIN_PA_VL = 20 if "live" in PLAYER_DESCRIPTION.lower() else 1 # 1 for PotM
|
||||
MIN_PA_VR = 40 if "live" in PLAYER_DESCRIPTION.lower() else 1 # 1 for PotM
|
||||
MIN_TBF_VL = MIN_PA_VL
|
||||
MIN_TBF_VR = MIN_PA_VR
|
||||
CARDSET_ID = (
|
||||
27 if "live" in PLAYER_DESCRIPTION.lower() else 28
|
||||
) # 27: 2005 Live, 28: 2005 Promos
|
||||
MIN_PA_VL = 20 # 1 for PotM
|
||||
MIN_PA_VR = 40 # 1 for PotM
|
||||
MIN_TBF_VL = 20
|
||||
MIN_TBF_VR = 40
|
||||
CARDSET_ID = 27 # 27: 2005 Live, 28: 2005 Promos
|
||||
|
||||
# Per-Update Parameters
|
||||
SEASON_PCT = 81 / 162 # Through end of July (~half season)
|
||||
START_DATE = 20050403 # YYYYMMDD format - 2005 Opening Day
|
||||
# END_DATE = 20050531 # YYYYMMDD format - May PotM
|
||||
END_DATE = 20050731 # End of July 2005
|
||||
SEASON_END_DATE = 20051002 # 2005 regular season end date (used to derive SEASON_PCT)
|
||||
SEASON_PCT = min(
|
||||
(
|
||||
datetime.datetime.strptime(str(END_DATE), "%Y%m%d")
|
||||
- datetime.datetime.strptime(str(START_DATE), "%Y%m%d")
|
||||
).days
|
||||
/ (
|
||||
datetime.datetime.strptime(str(SEASON_END_DATE), "%Y%m%d")
|
||||
- datetime.datetime.strptime(str(START_DATE), "%Y%m%d")
|
||||
).days,
|
||||
1.0,
|
||||
)
|
||||
POST_DATA = True
|
||||
LAST_WEEK_RATIO = 0.0 if PLAYER_DESCRIPTION == "Live" else 0.0
|
||||
LAST_WEEK_RATIO = 0.0
|
||||
LAST_TWOWEEKS_RATIO = 0.0
|
||||
LAST_MONTH_RATIO = 0.0
|
||||
|
||||
@ -1429,7 +1438,7 @@ def calc_pitching_cards(ps: pd.DataFrame, season_pct: float) -> pd.DataFrame:
|
||||
"closer_rating": [
|
||||
cpi.closer_rating(int(row["GF"]), int(row["SV"]), int(row["G"]))
|
||||
],
|
||||
"batting": [f'#1W{row["pitch_hand"].upper()}-C'],
|
||||
"batting": [f"#1W{row['pitch_hand'].upper()}-C"],
|
||||
}
|
||||
)
|
||||
return y.loc[0]
|
||||
@ -1598,7 +1607,7 @@ def calc_positions(bs: pd.DataFrame) -> pd.DataFrame:
|
||||
]:
|
||||
if row["key_bbref"] in pos_df.index:
|
||||
logger.info(
|
||||
f'Running {position} stats for {row["use_name"]} {row["last_name"]}'
|
||||
f"Running {position} stats for {row['use_name']} {row['last_name']}"
|
||||
)
|
||||
try:
|
||||
if "bis_runs_total" in pos_df.columns:
|
||||
@ -1865,8 +1874,8 @@ async def get_or_post_players(
|
||||
|
||||
def new_player_payload(row, ratings_df: pd.DataFrame):
|
||||
return {
|
||||
"p_name": f'{row["use_name"]} {row["last_name"]}',
|
||||
"cost": f'{ratings_df.loc[row['key_bbref']]["cost"]}',
|
||||
"p_name": f"{row['use_name']} {row['last_name']}",
|
||||
"cost": f"{ratings_df.loc[row['key_bbref']]['cost']}",
|
||||
"image": "change-me",
|
||||
"mlbclub": CLUB_LIST[row["Tm"]],
|
||||
"franchise": FRANCHISE_LIST[row["Tm"]],
|
||||
@ -1916,11 +1925,11 @@ async def get_or_post_players(
|
||||
# Update positions for existing players too
|
||||
all_pos = get_player_record_pos(def_rat_df, row)
|
||||
patch_params = [
|
||||
("cost", f'{bat_rat_df.loc[row['key_bbref']]["cost"]}'),
|
||||
("cost", f"{bat_rat_df.loc[row['key_bbref']]['cost']}"),
|
||||
("rarity_id", int(bat_rat_df.loc[row["key_bbref"]]["rarity_id"])),
|
||||
(
|
||||
"image",
|
||||
f'{CARD_BASE_URL}{player_id}/battingcard{urllib.parse.quote("?d=")}{RELEASE_DIRECTORY}',
|
||||
f"{CARD_BASE_URL}{player_id}/battingcard{urllib.parse.quote('?d=')}{RELEASE_DIRECTORY}",
|
||||
),
|
||||
]
|
||||
# Add position updates - set all 8 slots to clear any old positions
|
||||
@ -1964,7 +1973,7 @@ async def get_or_post_players(
|
||||
params=[
|
||||
(
|
||||
"image",
|
||||
f'{CARD_BASE_URL}{player_id}/battingcard{urllib.parse.quote("?d=")}{RELEASE_DIRECTORY}',
|
||||
f"{CARD_BASE_URL}{player_id}/battingcard{urllib.parse.quote('?d=')}{RELEASE_DIRECTORY}",
|
||||
)
|
||||
],
|
||||
)
|
||||
@ -2003,11 +2012,11 @@ async def get_or_post_players(
|
||||
|
||||
# Determine pitcher positions based on ratings
|
||||
patch_params = [
|
||||
("cost", f'{pit_rat_df.loc[row['key_bbref']]["cost"]}'),
|
||||
("cost", f"{pit_rat_df.loc[row['key_bbref']]['cost']}"),
|
||||
("rarity_id", int(pit_rat_df.loc[row["key_bbref"]]["rarity_id"])),
|
||||
(
|
||||
"image",
|
||||
f'{CARD_BASE_URL}{player_id}/pitchingcard{urllib.parse.quote("?d=")}{RELEASE_DIRECTORY}',
|
||||
f"{CARD_BASE_URL}{player_id}/pitchingcard{urllib.parse.quote('?d=')}{RELEASE_DIRECTORY}",
|
||||
),
|
||||
]
|
||||
|
||||
@ -2081,7 +2090,7 @@ async def get_or_post_players(
|
||||
params=[
|
||||
(
|
||||
"image",
|
||||
f'{CARD_BASE_URL}{player_id}/pitchingcard{urllib.parse.quote("?d=")}{RELEASE_DIRECTORY}',
|
||||
f"{CARD_BASE_URL}{player_id}/pitchingcard{urllib.parse.quote('?d=')}{RELEASE_DIRECTORY}",
|
||||
)
|
||||
],
|
||||
)
|
||||
@ -2105,10 +2114,10 @@ async def get_or_post_players(
|
||||
raise KeyError("Could not get players - not enough stat DFs were supplied")
|
||||
|
||||
pd.DataFrame(player_deltas[1:], columns=player_deltas[0]).to_csv(
|
||||
f'{"batter" if bstat_df is not None else "pitcher"}-deltas.csv'
|
||||
f"{'batter' if bstat_df is not None else 'pitcher'}-deltas.csv"
|
||||
)
|
||||
pd.DataFrame(new_players[1:], columns=new_players[0]).to_csv(
|
||||
f'new-{"batter" if bstat_df is not None else "pitcher"}s.csv'
|
||||
f"new-{'batter' if bstat_df is not None else 'pitcher'}s.csv"
|
||||
)
|
||||
|
||||
players_df = pd.DataFrame(all_players).set_index("bbref_id")
|
||||
@ -2280,7 +2289,7 @@ async def post_positions(pos_df: pd.DataFrame, delete_existing: bool = False):
|
||||
deleted_count += 1
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f'Failed to delete cardposition {pos["id"]}: {e}'
|
||||
f"Failed to delete cardposition {pos['id']}: {e}"
|
||||
)
|
||||
logger.info(f"Deleted {deleted_count} positions for players in current run")
|
||||
|
||||
|
||||
@ -1,75 +0,0 @@
|
||||
from typing import Literal
|
||||
import requests
|
||||
from exceptions import logger, log_exception
|
||||
|
||||
AUTH_TOKEN = {
|
||||
"Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImNucGhwbnV2aGp2cXprY2J3emRrIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc0NTgxMTc4NCwiZXhwIjoyMDYxMzg3Nzg0fQ.7dG_y2zU2PajBwTD8vut5GcWf3CSaZePkYW_hMf0fVg",
|
||||
"apikey": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImNucGhwbnV2aGp2cXprY2J3emRrIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc0NTgxMTc4NCwiZXhwIjoyMDYxMzg3Nzg0fQ.7dG_y2zU2PajBwTD8vut5GcWf3CSaZePkYW_hMf0fVg",
|
||||
}
|
||||
DB_URL = "https://cnphpnuvhjvqzkcbwzdk.supabase.co/rest/v1"
|
||||
|
||||
|
||||
def get_req_url(endpoint: str, params: list = None):
|
||||
req_url = f"{DB_URL}/{endpoint}?"
|
||||
|
||||
if params:
|
||||
other_params = False
|
||||
for x in params:
|
||||
req_url += f'{"&" if other_params else "?"}{x[0]}={x[1]}'
|
||||
other_params = True
|
||||
|
||||
return req_url
|
||||
|
||||
|
||||
def log_return_value(log_string: str, log_type: Literal["info", "debug"]):
|
||||
if log_type == "info":
|
||||
logger.info(
|
||||
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n'
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n'
|
||||
)
|
||||
|
||||
|
||||
def db_get(
|
||||
endpoint: str,
|
||||
params: dict = None,
|
||||
limit: int = 1000,
|
||||
offset: int = 0,
|
||||
none_okay: bool = True,
|
||||
timeout: int = 3,
|
||||
):
|
||||
req_url = f"{DB_URL}/{endpoint}?limit={limit}&offset={offset}"
|
||||
logger.info(f"HTTP GET: {req_url}, params: {params}")
|
||||
|
||||
response = requests.request("GET", req_url, params=params, headers=AUTH_TOKEN)
|
||||
logger.info(response)
|
||||
|
||||
if response.status_code != requests.codes.ok:
|
||||
log_exception(Exception, response.text)
|
||||
|
||||
data = response.json()
|
||||
if isinstance(data, list) and len(data) == 0:
|
||||
if none_okay:
|
||||
return None
|
||||
else:
|
||||
log_exception(Exception, "Query returned no results and none_okay = False")
|
||||
|
||||
return data
|
||||
|
||||
# async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
# async with session.get(req_url) as r:
|
||||
# logger.info(f'session info: {r}')
|
||||
# if r.status == 200:
|
||||
# js = await r.json()
|
||||
# log_return_value(f'{js}')
|
||||
# return js
|
||||
# elif none_okay:
|
||||
# e = await r.text()
|
||||
# logger.error(e)
|
||||
# return None
|
||||
# else:
|
||||
# e = await r.text()
|
||||
# logger.error(e)
|
||||
# raise ValueError(f'DB: {e}')
|
||||
@ -170,6 +170,7 @@ class TestDataFetcher:
|
||||
|
||||
@patch("automated_data_fetcher.pb.batting_stats_bref")
|
||||
@patch("automated_data_fetcher.pb.pitching_stats_bref")
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_baseball_reference_data(
|
||||
self,
|
||||
mock_pitching,
|
||||
@ -206,6 +207,7 @@ class TestDataFetcher:
|
||||
|
||||
@patch("automated_data_fetcher.pb.batting_stats")
|
||||
@patch("automated_data_fetcher.pb.pitching_stats")
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_fangraphs_data(
|
||||
self,
|
||||
mock_pitching,
|
||||
@ -231,6 +233,7 @@ class TestDataFetcher:
|
||||
|
||||
@patch("automated_data_fetcher.pb.batting_stats_range")
|
||||
@patch("automated_data_fetcher.pb.pitching_stats_range")
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_fangraphs_data_with_dates(
|
||||
self,
|
||||
mock_pitching,
|
||||
@ -253,6 +256,7 @@ class TestDataFetcher:
|
||||
mock_pitching.assert_called_once_with(start_date, end_date)
|
||||
|
||||
@patch("automated_data_fetcher.get_all_pybaseball_ids")
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_active_players_existing_function(self, mock_get_ids, fetcher):
|
||||
"""Test getting player IDs using existing function"""
|
||||
mock_get_ids.return_value = ["12345", "67890", "11111"]
|
||||
@ -264,6 +268,7 @@ class TestDataFetcher:
|
||||
|
||||
@patch("automated_data_fetcher.get_all_pybaseball_ids")
|
||||
@patch("automated_data_fetcher.pb.batting_stats")
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_active_players_fallback(
|
||||
self, mock_batting, mock_get_ids, fetcher, sample_batting_data
|
||||
):
|
||||
@ -279,6 +284,7 @@ class TestDataFetcher:
|
||||
assert result == expected_ids
|
||||
|
||||
@patch("automated_data_fetcher.pb.get_splits")
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_player_splits(
|
||||
self, mock_get_splits, fetcher, sample_splits_data
|
||||
):
|
||||
@ -333,6 +339,7 @@ class TestLiveSeriesDataFetcher:
|
||||
|
||||
@patch.object(DataFetcher, "fetch_baseball_reference_data")
|
||||
@patch.object(DataFetcher, "fetch_fangraphs_data")
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_live_data(self, mock_fg_data, mock_bref_data, live_fetcher):
|
||||
"""Test fetching live series data"""
|
||||
# Mock return values
|
||||
@ -360,6 +367,7 @@ class TestUtilityFunctions:
|
||||
"""Test cases for utility functions"""
|
||||
|
||||
@patch("automated_data_fetcher.DataFetcher")
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_season_data(self, mock_fetcher_class):
|
||||
"""Test fetch_season_data function"""
|
||||
# Create mock fetcher instance
|
||||
@ -389,6 +397,7 @@ class TestUtilityFunctions:
|
||||
assert any("AUTOMATED DOWNLOAD COMPLETE" in call for call in print_calls)
|
||||
|
||||
@patch("automated_data_fetcher.LiveSeriesDataFetcher")
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_live_series_data(self, mock_fetcher_class):
|
||||
"""Test fetch_live_series_data function"""
|
||||
# Create mock fetcher instance
|
||||
@ -416,6 +425,7 @@ class TestErrorHandling:
|
||||
return DataFetcher(2023, "Season")
|
||||
|
||||
@patch("automated_data_fetcher.pb.pitching_stats_bref")
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_baseball_reference_data_error(self, mock_pitching, fetcher):
|
||||
"""Test error handling in Baseball Reference data fetch"""
|
||||
# Mock function to raise an exception
|
||||
@ -425,6 +435,7 @@ class TestErrorHandling:
|
||||
await fetcher.fetch_baseball_reference_data()
|
||||
|
||||
@patch("automated_data_fetcher.pb.batting_stats")
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_fangraphs_data_error(self, mock_batting, fetcher):
|
||||
"""Test error handling in FanGraphs data fetch"""
|
||||
# Mock function to raise an exception
|
||||
@ -435,6 +446,7 @@ class TestErrorHandling:
|
||||
|
||||
@patch("automated_data_fetcher.get_all_pybaseball_ids")
|
||||
@patch("automated_data_fetcher.pb.batting_stats")
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_active_players_complete_failure(
|
||||
self, mock_batting, mock_get_ids, fetcher
|
||||
):
|
||||
@ -449,6 +461,7 @@ class TestErrorHandling:
|
||||
assert result == []
|
||||
|
||||
@patch("automated_data_fetcher.pb.get_splits")
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_player_splits_individual_errors(
|
||||
self, mock_get_splits, fetcher
|
||||
):
|
||||
@ -479,6 +492,7 @@ class TestIntegration:
|
||||
"""Integration tests that require network access"""
|
||||
|
||||
@pytest.mark.skip(reason="Requires network access and may be slow")
|
||||
@pytest.mark.asyncio
|
||||
async def test_real_data_fetch(self):
|
||||
"""Test fetching real data from pybaseball (skip by default)"""
|
||||
fetcher = DataFetcher(2022, "Season") # Use a complete season
|
||||
|
||||
@ -1,10 +1,4 @@
|
||||
from creation_helpers import pd_positions_df, mround, sanitize_chance_output
|
||||
|
||||
|
||||
def test_positions_df():
|
||||
cardset_19_pos = pd_positions_df(19)
|
||||
|
||||
assert True == True
|
||||
from creation_helpers import mround, sanitize_chance_output
|
||||
|
||||
|
||||
def test_mround():
|
||||
|
||||
Loading…
Reference in New Issue
Block a user