Migrate db calls to aiohttp
This commit is contained in:
parent
dae6b7e8df
commit
0955261e7b
66
README.txt
66
README.txt
@ -1,5 +1,67 @@
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#######
|
||||
DATA REQUIREMENTS
|
||||
CARD CREATION PROCESS
|
||||
#######
|
||||
|
||||
1) Download stats
|
||||
FanGraphs / https://www.fangraphs.com/leaders/splits-leaderboards
|
||||
- Batting
|
||||
- vL Standard / vlhp-basic.csv
|
||||
- vL Batted Balls / vlhp-rate.csv
|
||||
- vR Standard / vrhp-basic.csv
|
||||
- vR Batted Balls / vrhp-rate.csv
|
||||
- Pitching
|
||||
- vL Standard / vlhh-basic.csv
|
||||
- vL Batted Balls / vlhh-rate.csv
|
||||
- vR Standard / vrhh-basic.csv
|
||||
- vR Batted Balls / vrhh-rate.csv
|
||||
Baseball Reference
|
||||
- running.csv
|
||||
- https://www.baseball-reference.com/leagues/majors/2023-baserunning-batting.shtml
|
||||
- Remove header lines
|
||||
- pitching.csv
|
||||
- https://www.baseball-reference.com/leagues/majors/2023-standard-pitching.shtml
|
||||
2) Run Card Updates (Python Configuration)
|
||||
3) Check Card Validity (Python Configuration)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#######
|
||||
OLD DATA REQUIREMENTS
|
||||
#######
|
||||
|
||||
- Add any new players to players.csv for import
|
||||
@ -29,7 +91,7 @@ DATA REQUIREMENTS
|
||||
- 20 TBF vL / 40 TBF vR for Live || 50 TBF vL / 75 TBF vR for legacy seasons
|
||||
|
||||
#######
|
||||
CARD CREATION PROCESS
|
||||
OLD CARD CREATION PROCESS
|
||||
#######
|
||||
|
||||
1) Import new players for sba_id with `1. Import Players`
|
||||
|
||||
364
db_calls.py
364
db_calls.py
@ -1,6 +1,6 @@
|
||||
import aiohttp
|
||||
import requests
|
||||
import logging
|
||||
import os
|
||||
import pybaseball as pb
|
||||
|
||||
from typing import Literal, Optional
|
||||
@ -8,7 +8,7 @@ from typing import Literal, Optional
|
||||
AUTH_TOKEN = {'Authorization': f'Bearer Tp3aO3jhYve5NJF1IqOmJTmk'}
|
||||
DB_URL = 'https://pd.manticorum.com/api'
|
||||
master_debug = True
|
||||
alt_database = 'dev'
|
||||
alt_database = False
|
||||
|
||||
if alt_database == 'dev':
|
||||
DB_URL = 'https://pddev.manticorum.com/api'
|
||||
@ -33,6 +33,13 @@ def get_req_url(endpoint: str, api_ver: int = 2, object_id: int = None, params:
|
||||
return req_url
|
||||
|
||||
|
||||
def log_return_value(log_string: str):
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n')
|
||||
|
||||
|
||||
async def db_get(
|
||||
endpoint: str, api_ver: int = 2, object_id: int = None, params: list = None, none_okay: bool = True,
|
||||
timeout: int = 3):
|
||||
@ -40,67 +47,92 @@ async def db_get(
|
||||
log_string = f'get:\n{endpoint} id: {object_id} params: {params}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.get(req_url, timeout=timeout, headers=AUTH_TOKEN)
|
||||
break
|
||||
except requests.ReadTimeout as e:
|
||||
logging.error(f'Get Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [2, 5][retries]
|
||||
retries += 1
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return data
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with session.get(req_url) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
log_return_value(f'{js}')
|
||||
return js
|
||||
elif none_okay:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
return None
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
raise ValueError(f'DB: {e}')
|
||||
|
||||
# retries = 0
|
||||
# while True:
|
||||
# try:
|
||||
# resp = requests.get(req_url, timeout=timeout, headers=AUTH_TOKEN)
|
||||
# break
|
||||
# except requests.ReadTimeout as e:
|
||||
# logging.error(f'Get Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
# if retries > 1:
|
||||
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
# f'hang on a few extra seconds and try again.')
|
||||
# timeout += [2, 5][retries]
|
||||
# retries += 1
|
||||
#
|
||||
# if resp.status_code == 200:
|
||||
# data = resp.json()
|
||||
# log_string = f'{data}'
|
||||
# if master_debug:
|
||||
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# else:
|
||||
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# return data
|
||||
# elif none_okay:
|
||||
# data = resp.json()
|
||||
# log_string = f'{data}'
|
||||
# if master_debug:
|
||||
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# else:
|
||||
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# return None
|
||||
# else:
|
||||
# logging.warning(resp.text)
|
||||
# raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
|
||||
async def url_get(url: str, timeout: int = 3):
|
||||
log_string = f'get:\n{url}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.get(url, timeout=timeout)
|
||||
break
|
||||
except requests.ReadTimeout as e:
|
||||
logging.error(f'Get Timeout: {url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [2, 5][retries]
|
||||
retries += 1
|
||||
|
||||
if resp.status_code == 200:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url) as r:
|
||||
if r.status == 200:
|
||||
log_string = f'200 received'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
log_return_value(log_string)
|
||||
return r
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return resp
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
logging.error(r.text())
|
||||
raise ValueError(f'DB: {r.text()}')
|
||||
|
||||
# retries = 0
|
||||
# while True:
|
||||
# try:
|
||||
# resp = requests.get(url, timeout=timeout)
|
||||
# break
|
||||
# except requests.ReadTimeout as e:
|
||||
# logging.error(f'Get Timeout: {url} / retries: {retries} / timeout: {timeout}')
|
||||
# if retries > 1:
|
||||
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
# f'hang on a few extra seconds and try again.')
|
||||
# timeout += [2, 5][retries]
|
||||
# retries += 1
|
||||
#
|
||||
# if resp.status_code == 200:
|
||||
# log_string = f'200 received'
|
||||
# if master_debug:
|
||||
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# else:
|
||||
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# return resp
|
||||
# else:
|
||||
# logging.warning(resp.text)
|
||||
# raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
|
||||
async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2, timeout: int = 3):
|
||||
@ -108,30 +140,41 @@ async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2
|
||||
log_string = f'patch:\n{endpoint} {params}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.patch(req_url, headers=AUTH_TOKEN, timeout=timeout)
|
||||
break
|
||||
except requests.Timeout as e:
|
||||
logging.error(f'Patch Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
retries += 1
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with session.patch(req_url) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
log_return_value(f'{js}')
|
||||
return js
|
||||
else:
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
raise ValueError(f'DB: {e}')
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return data
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
# retries = 0
|
||||
# while True:
|
||||
# try:
|
||||
# resp = requests.patch(req_url, headers=AUTH_TOKEN, timeout=timeout)
|
||||
# break
|
||||
# except requests.Timeout as e:
|
||||
# logging.error(f'Patch Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
# if retries > 1:
|
||||
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
# f'hang on a few extra seconds and try again.')
|
||||
# timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
# retries += 1
|
||||
#
|
||||
# if resp.status_code == 200:
|
||||
# data = resp.json()
|
||||
# log_string = f'{data}'
|
||||
# if master_debug:
|
||||
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# else:
|
||||
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# return data
|
||||
# else:
|
||||
# logging.warning(resp.text)
|
||||
# raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
|
||||
async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
|
||||
@ -139,30 +182,41 @@ async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout
|
||||
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.post(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
|
||||
break
|
||||
except requests.Timeout as e:
|
||||
logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
retries += 1
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with session.post(req_url, json=payload) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
log_return_value(f'{js}')
|
||||
return js
|
||||
else:
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
raise ValueError(f'DB: {e}')
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return data
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
# retries = 0
|
||||
# while True:
|
||||
# try:
|
||||
# resp = requests.post(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
|
||||
# break
|
||||
# except requests.Timeout as e:
|
||||
# logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
# if retries > 1:
|
||||
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
# f'hang on a few extra seconds and try again.')
|
||||
# timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
# retries += 1
|
||||
#
|
||||
# if resp.status_code == 200:
|
||||
# data = resp.json()
|
||||
# log_string = f'{data}'
|
||||
# if master_debug:
|
||||
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# else:
|
||||
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# return data
|
||||
# else:
|
||||
# logging.warning(resp.text)
|
||||
# raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
|
||||
async def db_put(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
|
||||
@ -170,30 +224,41 @@ async def db_put(endpoint: str, api_ver: int = 2, payload: dict = None, timeout:
|
||||
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.put(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
|
||||
break
|
||||
except requests.Timeout as e:
|
||||
logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
retries += 1
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with session.put(req_url, json=payload) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
log_return_value(f'{js}')
|
||||
return js
|
||||
else:
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
raise ValueError(f'DB: {e}')
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return data
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
# retries = 0
|
||||
# while True:
|
||||
# try:
|
||||
# resp = requests.put(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
|
||||
# break
|
||||
# except requests.Timeout as e:
|
||||
# logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
# if retries > 1:
|
||||
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
# f'hang on a few extra seconds and try again.')
|
||||
# timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
# retries += 1
|
||||
#
|
||||
# if resp.status_code == 200:
|
||||
# data = resp.json()
|
||||
# log_string = f'{data}'
|
||||
# if master_debug:
|
||||
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# else:
|
||||
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# return data
|
||||
# else:
|
||||
# logging.warning(resp.text)
|
||||
# raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
|
||||
async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
|
||||
@ -201,30 +266,41 @@ async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
|
||||
log_string = f'delete:\n{endpoint} {object_id}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.delete(req_url, headers=AUTH_TOKEN, timeout=timeout)
|
||||
break
|
||||
except requests.ReadTimeout as e:
|
||||
logging.error(f'Delete Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
retries += 1
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with session.delete(req_url) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
log_return_value(f'{js}')
|
||||
return js
|
||||
else:
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
raise ValueError(f'DB: {e}')
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_string = f'{data}'
|
||||
if master_debug:
|
||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
else:
|
||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
return True
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
# retries = 0
|
||||
# while True:
|
||||
# try:
|
||||
# resp = requests.delete(req_url, headers=AUTH_TOKEN, timeout=timeout)
|
||||
# break
|
||||
# except requests.ReadTimeout as e:
|
||||
# logging.error(f'Delete Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
# if retries > 1:
|
||||
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
# f'hang on a few extra seconds and try again.')
|
||||
# timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
# retries += 1
|
||||
#
|
||||
# if resp.status_code == 200:
|
||||
# data = resp.json()
|
||||
# log_string = f'{data}'
|
||||
# if master_debug:
|
||||
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# else:
|
||||
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# return True
|
||||
# else:
|
||||
# logging.warning(resp.text)
|
||||
# raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
|
||||
def get_player_data(
|
||||
|
||||
@ -75,16 +75,16 @@ async def main(args):
|
||||
start_time = datetime.datetime.now()
|
||||
release_directory = f'{start_time.year}-{start_time.month}-{start_time.day}'
|
||||
|
||||
# data = await batters.creation.run_batters(
|
||||
# cardset, input_path, post_players, CARD_BASE_URL, release_directory, player_description, season_pct,
|
||||
# post_batters, pull_fielding, season, is_liveseries
|
||||
# )
|
||||
data = await batters.creation.run_batters(
|
||||
cardset, input_path, post_players, CARD_BASE_URL, release_directory, player_description, season_pct,
|
||||
post_batters, pull_fielding, season, is_liveseries
|
||||
)
|
||||
|
||||
print(f'Batter updates are complete')
|
||||
start_time_two = datetime.datetime.now()
|
||||
run_time = start_time_two - start_time
|
||||
# print(f'Total batting cards: {data["tot_batters"]}\nNew cardset batters: {data["new_batters"]}\n'
|
||||
# f'Batter runtime: {round(run_time.total_seconds())} seconds\n')
|
||||
print(f'Total batting cards: {data["tot_batters"]}\nNew cardset batters: {data["new_batters"]}\n'
|
||||
f'Batter runtime: {round(run_time.total_seconds())} seconds\n')
|
||||
|
||||
data = await pitchers.creation.run_pitchers(
|
||||
cardset, input_path, CARD_BASE_URL, season, release_directory, player_description, season_pct, post_players,
|
||||
|
||||
Loading…
Reference in New Issue
Block a user