Scouting tables added to db
This commit is contained in:
parent
d5386f86f8
commit
8a0d094227
@ -144,6 +144,19 @@ class Cardset(BaseModel):
|
||||
db.create_tables([Cardset])
|
||||
|
||||
|
||||
class MlbPlayer(BaseModel):
|
||||
first_name = CharField()
|
||||
last_name = CharField()
|
||||
key_fangraphs = IntegerField(null=True)
|
||||
key_bbref = CharField(null=True)
|
||||
key_retro = CharField(null=True)
|
||||
key_mlbam = IntegerField(null=True)
|
||||
offense_col = IntegerField(default=1)
|
||||
|
||||
|
||||
db.create_tables([MlbPlayer])
|
||||
|
||||
|
||||
class Player(BaseModel):
|
||||
player_id = IntegerField(primary_key=True)
|
||||
p_name = CharField()
|
||||
@ -170,6 +183,7 @@ class Player(BaseModel):
|
||||
fangr_id = CharField(null=True)
|
||||
description = CharField()
|
||||
quantity = IntegerField(default=999)
|
||||
mlb_player = ForeignKeyField(MlbPlayer, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.cardset} {self.p_name} ({self.rarity.name})'
|
||||
|
||||
@ -2,6 +2,7 @@ import datetime
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
|
||||
date = f'{datetime.datetime.now().year}-{datetime.datetime.now().month}-{datetime.datetime.now().day}'
|
||||
@ -20,11 +21,175 @@ logging.basicConfig(
|
||||
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
|
||||
master_debug = False
|
||||
DB_URL = 'https://pd.manticorum.com/api/'
|
||||
AUTH_TOKEN = f'{os.environ.get("API_TOKEN")}'
|
||||
AUTH_HEADER = {'Authorization': f'Bearer {AUTH_TOKEN}'}
|
||||
|
||||
|
||||
if os.environ.get('TESTING') == 'False':
|
||||
DB_URL = 'https://pddev.manticorum.com/api/'
|
||||
|
||||
|
||||
def valid_token(token):
|
||||
return token == os.environ.get('API_TOKEN')
|
||||
return token == AUTH_TOKEN
|
||||
|
||||
|
||||
def int_timestamp(datetime_obj: datetime) -> int:
|
||||
return int(datetime.datetime.timestamp(datetime_obj) * 1000)
|
||||
|
||||
|
||||
def mround(x, prec=2, base=.05):
|
||||
return round(base * round(float(x) / base), prec)
|
||||
|
||||
|
||||
def param_char(other_params):
|
||||
if other_params:
|
||||
return '&'
|
||||
else:
|
||||
return '?'
|
||||
|
||||
|
||||
def get_req_url(endpoint: str, api_ver: int = 2, object_id: int = None, params: list = None):
|
||||
req_url = f'{DB_URL}/v{api_ver}/{endpoint}{"/" if object_id is not None else ""}{object_id if object_id is not None else ""}'
|
||||
|
||||
if params:
|
||||
other_params = False
|
||||
for x in params:
|
||||
req_url += f'{param_char(other_params)}{x[0]}={x[1]}'
|
||||
other_params = True
|
||||
|
||||
return req_url
|
||||
|
||||
|
||||
async def db_get(endpoint: str, api_ver: int = 2, object_id: int = None, params: list = None, none_okay: bool = True,
|
||||
timeout: int = 3):
|
||||
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
|
||||
log_string = f'get:\n{endpoint} id: {object_id} params: {params}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.get(req_url, timeout=timeout)
|
||||
break
|
||||
except requests.ReadTimeout as e:
|
||||
logging.error(f'Get Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [2, 5][retries]
|
||||
retries += 1
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return data
|
||||
elif none_okay:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return None
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
|
||||
async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2, timeout: int = 3):
|
||||
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
|
||||
log_string = f'patch:\n{endpoint} {params}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.patch(req_url, headers=AUTH_HEADER, timeout=timeout)
|
||||
break
|
||||
except requests.Timeout as e:
|
||||
logging.error(f'Patch Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
retries += 1
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return data
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
|
||||
async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
|
||||
req_url = get_req_url(endpoint, api_ver=api_ver)
|
||||
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.post(req_url, json=payload, headers=AUTH_HEADER, timeout=timeout)
|
||||
break
|
||||
except requests.Timeout as e:
|
||||
logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
retries += 1
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return data
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
|
||||
async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
|
||||
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id)
|
||||
log_string = f'delete:\n{endpoint} {object_id}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.delete(req_url, headers=AUTH_HEADER, timeout=timeout)
|
||||
break
|
||||
except requests.ReadTimeout as e:
|
||||
logging.error(f'Delete Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
retries += 1
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return True
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
@ -3,7 +3,7 @@ from fastapi import FastAPI
|
||||
from .routers_v2 import (
|
||||
current, teams, rarity, cardsets, players, packtypes, packs, cards, events, results, rewards,
|
||||
batstats, pitstats, notifications, paperdex, gamerewards, gauntletrewards, gauntletruns, battingcards,
|
||||
battingcardratings, pitchingcards, pitchingcardratings, cardpositions, scouting)
|
||||
battingcardratings, pitchingcards, pitchingcardratings, cardpositions, scouting, mlbplayers)
|
||||
|
||||
app = FastAPI(
|
||||
responses={404: {'description': 'Not found'}}
|
||||
@ -33,3 +33,4 @@ app.include_router(pitchingcards.router)
|
||||
app.include_router(pitchingcardratings.router)
|
||||
app.include_router(cardpositions.router)
|
||||
app.include_router(scouting.router)
|
||||
app.include_router(mlbplayers.router)
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
from typing import Literal, Optional
|
||||
from pybaseball import playerid_reverse_lookup
|
||||
from .card_creation import batter_calcs, pitcher_calcs, defense_calcs
|
||||
|
||||
import pydantic
|
||||
|
||||
@ -22,3 +23,6 @@ def get_player_ids(player_id: str, id_type: Literal['bbref', 'fangraphs']) -> Pl
|
||||
retro=q.loc[0].key_retro,
|
||||
mlbam=q.loc[0].key_mlbam
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
import random
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from typing import Literal, Optional, List
|
||||
import logging
|
||||
import pydantic
|
||||
|
||||
from ..db_engine import db, BattingCard, model_to_dict, chunked, Player
|
||||
from ..db_engine import db, BattingCard, model_to_dict, fn, chunked, Player, MlbPlayer
|
||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
||||
|
||||
logging.basicConfig(
|
||||
@ -28,7 +30,7 @@ class BattingCardModel(pydantic.BaseModel):
|
||||
bunting: str = 'C'
|
||||
hit_and_run: str = 'C'
|
||||
running: int = 10
|
||||
offense_col: int
|
||||
offense_col: int = None
|
||||
hand: Literal['R', 'L', 'S'] = 'R'
|
||||
|
||||
|
||||
@ -38,13 +40,21 @@ class BattingCardList(pydantic.BaseModel):
|
||||
|
||||
@router.get('')
|
||||
async def get_batting_cards(
|
||||
player_id: list = Query(default=None), cardset_id: list = Query(default=None), short_output: bool = False):
|
||||
player_id: list = Query(default=None), player_name: list = Query(default=None),
|
||||
cardset_id: list = Query(default=None), short_output: bool = False, limit: Optional[int] = None):
|
||||
all_cards = BattingCard.select()
|
||||
if player_id is not None:
|
||||
all_cards = all_cards.where(BattingCard.player_id << player_id)
|
||||
if cardset_id is not None:
|
||||
all_players = Player.select().where(Player.cardset_id << cardset_id)
|
||||
all_cards = all_cards.where(BattingCard.player << all_players)
|
||||
if player_name is not None:
|
||||
name_list = [x.lower() for x in player_name]
|
||||
all_players = Player.select().where(fn.lower(Player.p_name) << name_list)
|
||||
all_cards = all_cards.where(BattingCard.player << all_players)
|
||||
|
||||
if limit is not None:
|
||||
all_cards = all_cards.limit(limit)
|
||||
|
||||
return_val = {'count': all_cards.count(), 'cards': [
|
||||
model_to_dict(x, recurse=not short_output) for x in all_cards
|
||||
@ -90,16 +100,30 @@ async def put_cards(cards: BattingCardList, token: str = Depends(oauth2_scheme))
|
||||
|
||||
new_cards = []
|
||||
updates = 0
|
||||
logging.info(f'here!')
|
||||
|
||||
for x in cards.cards:
|
||||
try:
|
||||
BattingCard.get(
|
||||
old = BattingCard.get(
|
||||
(BattingCard.player_id == x.player_id) & (BattingCard.variant == x.variant)
|
||||
)
|
||||
|
||||
if x.offense_col is None:
|
||||
x.offense_col = old.offense_col
|
||||
updates += BattingCard.update(x.dict()).where(
|
||||
(BattingCard.player_id == x.player_id) & (BattingCard.variant == x.variant)
|
||||
).execute()
|
||||
except BattingCard.DoesNotExist:
|
||||
if x.offense_col is None:
|
||||
this_player = Player.get_or_none(Player.player_id == x.player_id)
|
||||
mlb_player = MlbPlayer.get_or_none(MlbPlayer.key_bbref == this_player.bbref_id)
|
||||
if mlb_player is not None:
|
||||
logging.info(f'setting offense_col to {mlb_player.offense_col} for {this_player.p_name}')
|
||||
x.offense_col = mlb_player.offense_col
|
||||
else:
|
||||
logging.info(f'randomly setting offense_col for {this_player.p_name}')
|
||||
x.offense_col = random.randint(1, 3)
|
||||
logging.info(f'x.dict(): {x.dict()}')
|
||||
new_cards.append(x.dict())
|
||||
|
||||
with db.atomic():
|
||||
@ -167,7 +191,7 @@ async def delete_card(card_id: int, token: str = Depends(oauth2_scheme)):
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail='You are not authorized to patch batting cards. This event has been logged.'
|
||||
detail='You are not authorized to delete batting cards. This event has been logged.'
|
||||
)
|
||||
|
||||
this_card = BattingCard.get_or_none(BattingCard.id == card_id)
|
||||
@ -182,3 +206,19 @@ async def delete_card(card_id: int, token: str = Depends(oauth2_scheme)):
|
||||
return f'Card {this_card} has been deleted'
|
||||
else:
|
||||
raise HTTPException(status_code=500, detail=f'Card {this_card} could not be deleted')
|
||||
|
||||
|
||||
@router.delete('')
|
||||
async def delete_all_cards(token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f'Bad Token: {token}')
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail='You are not authorized to delete batting cards. This event has been logged.'
|
||||
)
|
||||
|
||||
d_query = BattingCard.delete()
|
||||
d_query.execute()
|
||||
|
||||
return f'Deleted {d_query.count()} batting cards'
|
||||
|
||||
189
app/routers_v2/mlbplayers.py
Normal file
189
app/routers_v2/mlbplayers.py
Normal file
@ -0,0 +1,189 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Response, Query
|
||||
from typing import Optional, List
|
||||
import logging
|
||||
import pydantic
|
||||
from pandas import DataFrame
|
||||
|
||||
from ..db_engine import db, MlbPlayer, model_to_dict, fn, chunked, query_to_csv
|
||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA
|
||||
|
||||
logging.basicConfig(
|
||||
filename=LOG_DATA['filename'],
|
||||
format=LOG_DATA['format'],
|
||||
level=LOG_DATA['log_level']
|
||||
)
|
||||
|
||||
router = APIRouter(
|
||||
prefix='/api/v2/mlbplayers',
|
||||
tags=['mlbplayers']
|
||||
)
|
||||
|
||||
|
||||
class PlayerModel(pydantic.BaseModel):
|
||||
first_name: str
|
||||
last_name: str
|
||||
key_fangraphs: int = None
|
||||
key_bbref: str = None
|
||||
key_retro: str = None
|
||||
key_mlbam: int = None
|
||||
offense_col: int = None
|
||||
|
||||
|
||||
class PlayerList(pydantic.BaseModel):
|
||||
players: List[PlayerModel]
|
||||
|
||||
|
||||
@router.get('')
|
||||
async def get_players(
|
||||
full_name: list = Query(default=None), first_name: list = Query(default=None),
|
||||
last_name: list = Query(default=None), key_fangraphs: list = Query(default=None),
|
||||
key_bbref: list = Query(default=None), key_retro: list = Query(default=None),
|
||||
key_mlbam: list = Query(default=None), offense_col: list = Query(default=None), csv: Optional[bool] = False):
|
||||
all_players = MlbPlayer.select()
|
||||
|
||||
if full_name is not None:
|
||||
name_list = [x.lower() for x in full_name]
|
||||
all_players = all_players.where(
|
||||
fn.lower(MlbPlayer.first_name) + ' ' + fn.lower(MlbPlayer.last_name) << name_list
|
||||
)
|
||||
if first_name is not None:
|
||||
all_players = all_players.where(MlbPlayer.first_name << first_name)
|
||||
if first_name is not None:
|
||||
all_players = all_players.where(MlbPlayer.first_name << first_name)
|
||||
if last_name is not None:
|
||||
all_players = all_players.where(MlbPlayer.last_name << last_name)
|
||||
if key_fangraphs is not None:
|
||||
all_players = all_players.where(MlbPlayer.key_fangraphs << key_fangraphs)
|
||||
if key_bbref is not None:
|
||||
all_players = all_players.where(MlbPlayer.key_bbref << key_bbref)
|
||||
if key_retro is not None:
|
||||
all_players = all_players.where(MlbPlayer.key_retro << key_retro)
|
||||
if key_mlbam is not None:
|
||||
all_players = all_players.where(MlbPlayer.key_mlbam << key_mlbam)
|
||||
if offense_col is not None:
|
||||
all_players = all_players.where(MlbPlayer.offense_col << offense_col)
|
||||
|
||||
if csv:
|
||||
return_val = query_to_csv(all_players)
|
||||
db.close()
|
||||
return Response(content=return_val, media_type='text/csv')
|
||||
|
||||
return_val = {'count': all_players.count(), 'players': [
|
||||
model_to_dict(x) for x in all_players
|
||||
]}
|
||||
db.close()
|
||||
return return_val
|
||||
|
||||
|
||||
@router.get('/{player_id}')
|
||||
async def get_one_player(player_id: int):
|
||||
this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id)
|
||||
if this_player is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f'MlbPlayer id {player_id} not found')
|
||||
|
||||
r_data = model_to_dict(this_player)
|
||||
db.close()
|
||||
return r_data
|
||||
|
||||
|
||||
@router.patch('/{player_id}')
|
||||
async def patch_player(
|
||||
player_id: int, first_name: Optional[str] = None, last_name: Optional[str] = None,
|
||||
key_fangraphs: Optional[str] = None, key_bbref: Optional[str] = None, key_retro: Optional[str] = None,
|
||||
key_mlbam: Optional[str] = None, offense_col: Optional[str] = None, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f'Bad Token: {token}')
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail='You are not authorized to patch mlb players. This event has been logged.'
|
||||
)
|
||||
|
||||
this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id)
|
||||
if this_player is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f'MlbPlayer id {player_id} not found')
|
||||
|
||||
if first_name is not None:
|
||||
this_player.first_name = first_name
|
||||
if last_name is not None:
|
||||
this_player.last_name = last_name
|
||||
if key_fangraphs is not None:
|
||||
this_player.key_fangraphs = key_fangraphs
|
||||
if key_bbref is not None:
|
||||
this_player.key_bbref = key_bbref
|
||||
if key_retro is not None:
|
||||
this_player.key_retro = key_retro
|
||||
if key_mlbam is not None:
|
||||
this_player.key_mlbam = key_mlbam
|
||||
if offense_col is not None:
|
||||
this_player.offense_col = offense_col
|
||||
|
||||
if this_player.save() == 1:
|
||||
return_val = model_to_dict(this_player)
|
||||
db.close()
|
||||
return return_val
|
||||
else:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=418,
|
||||
detail='Well slap my ass and call me a teapot; I could not save that player'
|
||||
)
|
||||
|
||||
|
||||
@router.post('')
|
||||
async def post_players(players: PlayerList, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f'Bad Token: {token}')
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail='You are not authorized to post mlb players. This event has been logged.'
|
||||
)
|
||||
|
||||
new_players = []
|
||||
for x in players.players:
|
||||
dupes = MlbPlayer.select().where(
|
||||
(MlbPlayer.key_fangraphs == x.key_fangraphs) | (MlbPlayer.key_mlbam == x.key_mlbam) |
|
||||
(MlbPlayer.key_retro == x.key_retro) | (MlbPlayer.key_bbref == x.key_bbref)
|
||||
)
|
||||
if dupes.count() > 0:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f'{x.first_name} {x.last_name} has a key already in the database'
|
||||
)
|
||||
|
||||
new_players.append(x.dict())
|
||||
|
||||
with db.atomic():
|
||||
for batch in chunked(new_players, 15):
|
||||
MlbPlayer.insert_many(batch).on_conflict_replace().execute()
|
||||
db.close()
|
||||
|
||||
return f'Inserted {len(new_players)} new MLB players'
|
||||
|
||||
|
||||
@router.delete('/{player_id}')
|
||||
async def delete_player(player_id: int, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f'Bad Token: {token}')
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail='You are not authorized to delete mlb players. This event has been logged.'
|
||||
)
|
||||
|
||||
this_player = MlbPlayer.get_or_none(MlbPlayer.id == player_id)
|
||||
if this_player is None:
|
||||
db.close()
|
||||
raise HTTPException(status_code=404, detail=f'MlbPlayer id {player_id} not found')
|
||||
|
||||
count = this_player.delete_instance()
|
||||
db.close()
|
||||
|
||||
if count == 1:
|
||||
raise HTTPException(status_code=200, detail=f'Player {player_id} has been deleted')
|
||||
else:
|
||||
raise HTTPException(status_code=500, detail=f'Player {player_id} was not deleted')
|
||||
@ -20,7 +20,7 @@ router = APIRouter(
|
||||
|
||||
|
||||
class PlayerPydantic(pydantic.BaseModel):
|
||||
player_id: int
|
||||
player_id: int = None
|
||||
p_name: str
|
||||
cost: int
|
||||
image: str
|
||||
@ -437,7 +437,7 @@ async def v1_players_patch(
|
||||
|
||||
|
||||
@router.put('')
|
||||
async def v1_players_put(players: PlayerModel, token: str = Depends(oauth2_scheme)):
|
||||
async def put_players(players: PlayerModel, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f'Bad Token: {token}')
|
||||
db.close()
|
||||
@ -515,8 +515,39 @@ async def v1_players_put(players: PlayerModel, token: str = Depends(oauth2_schem
|
||||
raise HTTPException(status_code=200, detail=f'{len(new_players)} players have been added')
|
||||
|
||||
|
||||
@router.post('')
|
||||
async def post_players(new_player: PlayerPydantic, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f'Bad Token: {token}')
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail='You are not authorized to post players. This event has been logged.'
|
||||
)
|
||||
|
||||
dupe_query = Player.select().where(
|
||||
(fn.Lower(Player.p_name) == new_player.p_name.lower()) & (Player.cardset_id == new_player.cardset_id)
|
||||
)
|
||||
if dupe_query.count() != 0:
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f'This appears to be a duplicate with player {dupe_query[0].player_id}'
|
||||
)
|
||||
|
||||
p_query = Player.select(Player.player_id).order_by(-Player.player_id).limit(1)
|
||||
new_id = p_query[0].player_id + 1
|
||||
|
||||
new_player.player_id = new_id
|
||||
p_id = Player.insert(new_player.dict()).execute()
|
||||
|
||||
return_val = model_to_dict(Player.get_by_id(p_id))
|
||||
db.close()
|
||||
return return_val
|
||||
|
||||
|
||||
@router.delete('/{player_id}')
|
||||
async def v1_players_delete(player_id, token: str = Depends(oauth2_scheme)):
|
||||
async def delete_player(player_id, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f'Bad Token: {token}')
|
||||
db.close()
|
||||
|
||||
@ -1,11 +1,12 @@
|
||||
import csv
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, HTTPException, Response, Query
|
||||
from typing import Optional
|
||||
import logging
|
||||
import pydantic
|
||||
from pandas import DataFrame
|
||||
import pandas as pd
|
||||
|
||||
from ..db_engine import db, model_to_dict, fn, query_to_csv, complex_data_to_csv, Player
|
||||
from ..db_engine import db, model_to_dict, fn, query_to_csv, complex_data_to_csv, Player, BattingCardRatings
|
||||
from ..dependencies import oauth2_scheme, valid_token, LOG_DATA, int_timestamp
|
||||
from ..player_scouting import get_player_ids
|
||||
|
||||
@ -21,6 +22,21 @@ router = APIRouter(
|
||||
)
|
||||
|
||||
|
||||
class BattingFiles(pydantic.BaseModel):
|
||||
vl_basic: str = 'vl-basic.csv'
|
||||
vl_rate: str = 'vl-rate.csv'
|
||||
vr_basic: str = 'vr-basic.csv'
|
||||
vr_rate: str = 'vr-rate.csv'
|
||||
running: str = 'running.csv'
|
||||
|
||||
|
||||
# def csv_file_to_dataframe(filename: str) -> pd.DataFrame | None:
|
||||
# with open(filename, 'r', encoding='utf8') as file:
|
||||
# reader = csv.reader(file)
|
||||
#
|
||||
# for row in reader:
|
||||
|
||||
|
||||
@router.get('/playerkeys')
|
||||
async def get_player_keys(player_id: list = Query(default=None)):
|
||||
all_keys = []
|
||||
@ -37,3 +53,50 @@ async def get_player_keys(player_id: list = Query(default=None)):
|
||||
db.close()
|
||||
return return_val
|
||||
|
||||
|
||||
@router.post('/live-update/batting')
|
||||
def live_update_batting(files: BattingFiles, cardset_id: int, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f'Bad Token: {token}')
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail='You are not authorized to initiate live updates.'
|
||||
)
|
||||
|
||||
data = {} # <fg id>: { 'vL': [combined vl stat data], 'vR': [combined vr stat data] }
|
||||
for row in files.vl_basic:
|
||||
if row['pa'] >= 20:
|
||||
data[row['fgid']]['vL'] = row
|
||||
for row in files.vl_rate:
|
||||
if row['fgid'] in data.keys():
|
||||
data[row['fgid']]['vL'].extend(row)
|
||||
|
||||
for row in files.vr_basic:
|
||||
if row['pa'] >= 40 and row['fgid'] in data.keys():
|
||||
data[row['fgid']]['vR'] = row
|
||||
for row in files.vr_rate:
|
||||
if row['fgid'] in data.keys():
|
||||
data[row['fgid']]['vR'].extend(row)
|
||||
|
||||
for x in data.items():
|
||||
pass
|
||||
# Create BattingCardRating object for vL
|
||||
# Create BattingCardRating object for vR
|
||||
|
||||
# Read running stats and create/update BattingCard object
|
||||
|
||||
return files.dict()
|
||||
|
||||
|
||||
@router.post('/live-update/pitching')
|
||||
def live_update_pitching(files: BattingFiles, token: str = Depends(oauth2_scheme)):
|
||||
if not valid_token(token):
|
||||
logging.warning(f'Bad Token: {token}')
|
||||
db.close()
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail='You are not authorized to initiate live updates.'
|
||||
)
|
||||
|
||||
return files.dict()
|
||||
|
||||
14
db_engine.py
14
db_engine.py
@ -84,6 +84,19 @@ class Cardset(BaseModel):
|
||||
db.create_tables([Cardset])
|
||||
|
||||
|
||||
class MlbPlayer(BaseModel):
|
||||
first_name = CharField()
|
||||
last_name = CharField()
|
||||
key_fangraphs = IntegerField(null=True)
|
||||
key_bbref = CharField(null=True)
|
||||
key_retro = CharField(null=True)
|
||||
key_mlbam = IntegerField(null=True)
|
||||
offense_col = IntegerField(default=1)
|
||||
|
||||
|
||||
db.create_tables([MlbPlayer])
|
||||
|
||||
|
||||
class Player(BaseModel):
|
||||
player_id = IntegerField(primary_key=True)
|
||||
p_name = CharField()
|
||||
@ -110,6 +123,7 @@ class Player(BaseModel):
|
||||
fangr_id = CharField(null=True)
|
||||
description = CharField()
|
||||
quantity = IntegerField(default=999)
|
||||
mlb_player = ForeignKeyField(MlbPlayer, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.cardset} {self.p_name} ({self.rarity.name})'
|
||||
|
||||
@ -11,23 +11,23 @@ migrator = SqliteMigrator(db_engine.db)
|
||||
|
||||
|
||||
# pitcher_injury = IntegerField(null=True)
|
||||
# pos_1 = CharField(default='None')
|
||||
# offense_col = IntegerField(null=True)
|
||||
# pos_2 = CharField(null=True)
|
||||
# last_game = CharField(null=True)
|
||||
# game_type = CharField(null=True)
|
||||
# pack_type = ForeignKeyField(PackType, default=1, to_field='id', field_type=int)
|
||||
mlb_player = ForeignKeyField(db_engine.MlbPlayer, field=db_engine.MlbPlayer.id, null=True)
|
||||
# active_theme = ForeignKeyField(PackTheme, to_field='id', field_type=int, null=True)
|
||||
# active_theme = ForeignKeyField(db_engine.PackTheme, field=db_engine.PackTheme.id, null=True) # for careers
|
||||
# game_type = CharField(null=True)
|
||||
# pack_team = ForeignKeyField(db_engine.Team, field=db_engine.Team.id, null=True)
|
||||
pack_cardset = ForeignKeyField(db_engine.Cardset, field=db_engine.Cardset.id, null=True)
|
||||
# pack_cardset = ForeignKeyField(db_engine.Cardset, field=db_engine.Cardset.id, null=True)
|
||||
|
||||
migrate(
|
||||
# migrator.add_column('current', 'active_theme_id', active_theme),
|
||||
# migrator.add_column('pack', 'pack_team_id', pack_team),
|
||||
migrator.add_column('pack', 'pack_cardset_id', pack_cardset),
|
||||
migrator.add_column('player', 'mlb_player_id', mlb_player),
|
||||
# migrator.rename_column('cardset', 'available', 'for_purchase')
|
||||
# migrator.add_column('player', 'pos_1', pos_1),
|
||||
# migrator.add_column('player', 'offense_col', offense_col),
|
||||
# migrator.add_column('comment_tbl', 'comment', comment_field),
|
||||
# migrator.rename_column('story', 'pub_date', 'publish_date'),
|
||||
# migrator.drop_column('story', 'some_old_field'),
|
||||
|
||||
@ -5,3 +5,5 @@ python-multipart
|
||||
pandas
|
||||
pygsheets
|
||||
pybaseball
|
||||
python-multipart
|
||||
requests
|
||||
|
||||
Loading…
Reference in New Issue
Block a user