Clean up db calls, add_second_card script

This commit is contained in:
Cal Corum 2023-11-20 22:00:13 -06:00
parent 8feaf3f44d
commit 6539cdc37d
4 changed files with 51 additions and 159 deletions

50
add_second_cards.py Normal file
View File

@ -0,0 +1,50 @@
import asyncio
import datetime
import logging
import sys
from creation_helpers import get_args
from db_calls import db_get, DB_URL, player_desc, db_patch
date = f'{datetime.datetime.now().year}-{datetime.datetime.now().month}-{datetime.datetime.now().day}'
log_level = logging.INFO
logging.basicConfig(
filename=f'logs/{date}.log',
format='%(asctime)s - check-cards - %(levelname)s - %(message)s',
level=log_level
)
async def main(args):
arg_data = get_args(args)
print('Pulling batting cards...')
bc_query = await db_get('battingcards')
print('Pulling pitching cards...')
pc_query = await db_get('pitchingcards')
b_count, p_count = 0, 0
now = datetime.datetime.now()
for x in bc_query['cards']:
today_url = f'{DB_URL}/v2/players/{x["player"]["player_id"]}/battingcard?d={now.year}-{now.month}-{now.day}'
if 'batting' not in x['player']['image']:
await db_patch('players', object_id=x["player"]["player_id"], params=[('image2', today_url)])
print(f'Adding batting card for {player_desc(x["player"])}')
b_count += 1
run_time = datetime.datetime.now() - now
print(f'\nTotal Batters: {b_count}\nBatter runtime: {round(run_time.total_seconds())} seconds\n\n########\n')
now = datetime.datetime.now()
for x in pc_query['cards']:
today_url = f'{DB_URL}/v2/players/{x["player"]["player_id"]}/pitchingcard?d={now.year}-{now.month}-{now.day}'
if 'pitching' not in x['player']['image']:
await db_patch('players', object_id=x["player"]["player_id"], params=[('image2', today_url)])
print(f'Adding pitching card for {player_desc(x["player"])}')
p_count += 1
run_time = datetime.datetime.now() - now
print(f'\nTotal Pitchers: {p_count}\nPitcher runtime: {round(run_time.total_seconds())} seconds')
if __name__ == '__main__':
asyncio.run(main(sys.argv[1:]))

View File

@ -145,6 +145,7 @@ async def calculate_batting_cards(offense_stats: pd.DataFrame, cardset: dict, se
batting_cards = [] batting_cards = []
def create_batting_card(df_data): def create_batting_card(df_data):
logging.info(df_data['player_id'])
s_data = cba.stealing( s_data = cba.stealing(
chances=df_data['SBO'], chances=df_data['SBO'],
sb2s=df_data['SB2'], sb2s=df_data['SB2'],

View File

@ -3,8 +3,6 @@ import datetime
import logging import logging
import sys import sys
import requests
from creation_helpers import get_args from creation_helpers import get_args
from db_calls import db_get, url_get from db_calls import db_get, url_get

View File

@ -61,39 +61,6 @@ async def db_get(
logging.error(e) logging.error(e)
raise ValueError(f'DB: {e}') raise ValueError(f'DB: {e}')
# retries = 0
# while True:
# try:
# resp = requests.get(req_url, timeout=timeout, headers=AUTH_TOKEN)
# break
# except requests.ReadTimeout as e:
# logging.error(f'Get Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
# if retries > 1:
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
# f'hang on a few extra seconds and try again.')
# timeout += [2, 5][retries]
# retries += 1
#
# if resp.status_code == 200:
# data = resp.json()
# log_string = f'{data}'
# if master_debug:
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# else:
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# return data
# elif none_okay:
# data = resp.json()
# log_string = f'{data}'
# if master_debug:
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# else:
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# return None
# else:
# logging.warning(resp.text)
# raise ValueError(f'DB: {resp.text}')
async def url_get(url: str, timeout: int = 3): async def url_get(url: str, timeout: int = 3):
log_string = f'get:\n{url}' log_string = f'get:\n{url}'
@ -110,30 +77,6 @@ async def url_get(url: str, timeout: int = 3):
logging.error(e) logging.error(e)
raise ValueError(f'DB: {r.text()}') raise ValueError(f'DB: {r.text()}')
# retries = 0
# while True:
# try:
# resp = requests.get(url, timeout=timeout)
# break
# except requests.ReadTimeout as e:
# logging.error(f'Get Timeout: {url} / retries: {retries} / timeout: {timeout}')
# if retries > 1:
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
# f'hang on a few extra seconds and try again.')
# timeout += [2, 5][retries]
# retries += 1
#
# if resp.status_code == 200:
# log_string = f'200 received'
# if master_debug:
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# else:
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# return resp
# else:
# logging.warning(resp.text)
# raise ValueError(f'DB: {resp.text}')
async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2, timeout: int = 3): async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2, timeout: int = 3):
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params) req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
@ -151,31 +94,6 @@ async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2
logging.error(e) logging.error(e)
raise ValueError(f'DB: {e}') raise ValueError(f'DB: {e}')
# retries = 0
# while True:
# try:
# resp = requests.patch(req_url, headers=AUTH_TOKEN, timeout=timeout)
# break
# except requests.Timeout as e:
# logging.error(f'Patch Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
# if retries > 1:
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
# f'hang on a few extra seconds and try again.')
# timeout += [min(3, timeout), min(5, timeout)][retries]
# retries += 1
#
# if resp.status_code == 200:
# data = resp.json()
# log_string = f'{data}'
# if master_debug:
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# else:
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# return data
# else:
# logging.warning(resp.text)
# raise ValueError(f'DB: {resp.text}')
async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3): async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
req_url = get_req_url(endpoint, api_ver=api_ver) req_url = get_req_url(endpoint, api_ver=api_ver)
@ -193,31 +111,6 @@ async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout
logging.error(e) logging.error(e)
raise ValueError(f'DB: {e}') raise ValueError(f'DB: {e}')
# retries = 0
# while True:
# try:
# resp = requests.post(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
# break
# except requests.Timeout as e:
# logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
# if retries > 1:
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
# f'hang on a few extra seconds and try again.')
# timeout += [min(3, timeout), min(5, timeout)][retries]
# retries += 1
#
# if resp.status_code == 200:
# data = resp.json()
# log_string = f'{data}'
# if master_debug:
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# else:
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# return data
# else:
# logging.warning(resp.text)
# raise ValueError(f'DB: {resp.text}')
async def db_put(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3): async def db_put(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
req_url = get_req_url(endpoint, api_ver=api_ver) req_url = get_req_url(endpoint, api_ver=api_ver)
@ -235,31 +128,6 @@ async def db_put(endpoint: str, api_ver: int = 2, payload: dict = None, timeout:
logging.error(e) logging.error(e)
raise ValueError(f'DB: {e}') raise ValueError(f'DB: {e}')
# retries = 0
# while True:
# try:
# resp = requests.put(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
# break
# except requests.Timeout as e:
# logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
# if retries > 1:
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
# f'hang on a few extra seconds and try again.')
# timeout += [min(3, timeout), min(5, timeout)][retries]
# retries += 1
#
# if resp.status_code == 200:
# data = resp.json()
# log_string = f'{data}'
# if master_debug:
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# else:
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# return data
# else:
# logging.warning(resp.text)
# raise ValueError(f'DB: {resp.text}')
async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3): async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id) req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id)
@ -277,31 +145,6 @@ async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
logging.error(e) logging.error(e)
raise ValueError(f'DB: {e}') raise ValueError(f'DB: {e}')
# retries = 0
# while True:
# try:
# resp = requests.delete(req_url, headers=AUTH_TOKEN, timeout=timeout)
# break
# except requests.ReadTimeout as e:
# logging.error(f'Delete Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
# if retries > 1:
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
# f'hang on a few extra seconds and try again.')
# timeout += [min(3, timeout), min(5, timeout)][retries]
# retries += 1
#
# if resp.status_code == 200:
# data = resp.json()
# log_string = f'{data}'
# if master_debug:
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# else:
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# return True
# else:
# logging.warning(resp.text)
# raise ValueError(f'DB: {resp.text}')
def get_player_data( def get_player_data(
player_id: str, id_type: Literal['bbref', 'fangraphs'], return_type: Literal['dict', 'Series'] = 'dict'): player_id: str, id_type: Literal['bbref', 'fangraphs'], return_type: Literal['dict', 'Series'] = 'dict'):