Migrate db calls to aiohttp
This commit is contained in:
parent
dae6b7e8df
commit
0955261e7b
66
README.txt
66
README.txt
@ -1,5 +1,67 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#######
|
#######
|
||||||
DATA REQUIREMENTS
|
CARD CREATION PROCESS
|
||||||
|
#######
|
||||||
|
|
||||||
|
1) Download stats
|
||||||
|
FanGraphs / https://www.fangraphs.com/leaders/splits-leaderboards
|
||||||
|
- Batting
|
||||||
|
- vL Standard / vlhp-basic.csv
|
||||||
|
- vL Batted Balls / vlhp-rate.csv
|
||||||
|
- vR Standard / vrhp-basic.csv
|
||||||
|
- vR Batted Balls / vrhp-rate.csv
|
||||||
|
- Pitching
|
||||||
|
- vL Standard / vlhh-basic.csv
|
||||||
|
- vL Batted Balls / vlhh-rate.csv
|
||||||
|
- vR Standard / vrhh-basic.csv
|
||||||
|
- vR Batted Balls / vrhh-rate.csv
|
||||||
|
Baseball Reference
|
||||||
|
- running.csv
|
||||||
|
- https://www.baseball-reference.com/leagues/majors/2023-baserunning-batting.shtml
|
||||||
|
- Remove header lines
|
||||||
|
- pitching.csv
|
||||||
|
- https://www.baseball-reference.com/leagues/majors/2023-standard-pitching.shtml
|
||||||
|
2) Run Card Updates (Python Configuration)
|
||||||
|
3) Check Card Validity (Python Configuration)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#######
|
||||||
|
OLD DATA REQUIREMENTS
|
||||||
#######
|
#######
|
||||||
|
|
||||||
- Add any new players to players.csv for import
|
- Add any new players to players.csv for import
|
||||||
@ -29,7 +91,7 @@ DATA REQUIREMENTS
|
|||||||
- 20 TBF vL / 40 TBF vR for Live || 50 TBF vL / 75 TBF vR for legacy seasons
|
- 20 TBF vL / 40 TBF vR for Live || 50 TBF vL / 75 TBF vR for legacy seasons
|
||||||
|
|
||||||
#######
|
#######
|
||||||
CARD CREATION PROCESS
|
OLD CARD CREATION PROCESS
|
||||||
#######
|
#######
|
||||||
|
|
||||||
1) Import new players for sba_id with `1. Import Players`
|
1) Import new players for sba_id with `1. Import Players`
|
||||||
|
|||||||
370
db_calls.py
370
db_calls.py
@ -1,6 +1,6 @@
|
|||||||
|
import aiohttp
|
||||||
import requests
|
import requests
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import pybaseball as pb
|
import pybaseball as pb
|
||||||
|
|
||||||
from typing import Literal, Optional
|
from typing import Literal, Optional
|
||||||
@ -8,7 +8,7 @@ from typing import Literal, Optional
|
|||||||
AUTH_TOKEN = {'Authorization': f'Bearer Tp3aO3jhYve5NJF1IqOmJTmk'}
|
AUTH_TOKEN = {'Authorization': f'Bearer Tp3aO3jhYve5NJF1IqOmJTmk'}
|
||||||
DB_URL = 'https://pd.manticorum.com/api'
|
DB_URL = 'https://pd.manticorum.com/api'
|
||||||
master_debug = True
|
master_debug = True
|
||||||
alt_database = 'dev'
|
alt_database = False
|
||||||
|
|
||||||
if alt_database == 'dev':
|
if alt_database == 'dev':
|
||||||
DB_URL = 'https://pddev.manticorum.com/api'
|
DB_URL = 'https://pddev.manticorum.com/api'
|
||||||
@ -33,6 +33,13 @@ def get_req_url(endpoint: str, api_ver: int = 2, object_id: int = None, params:
|
|||||||
return req_url
|
return req_url
|
||||||
|
|
||||||
|
|
||||||
|
def log_return_value(log_string: str):
|
||||||
|
if master_debug:
|
||||||
|
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n')
|
||||||
|
else:
|
||||||
|
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n')
|
||||||
|
|
||||||
|
|
||||||
async def db_get(
|
async def db_get(
|
||||||
endpoint: str, api_ver: int = 2, object_id: int = None, params: list = None, none_okay: bool = True,
|
endpoint: str, api_ver: int = 2, object_id: int = None, params: list = None, none_okay: bool = True,
|
||||||
timeout: int = 3):
|
timeout: int = 3):
|
||||||
@ -40,67 +47,92 @@ async def db_get(
|
|||||||
log_string = f'get:\n{endpoint} id: {object_id} params: {params}'
|
log_string = f'get:\n{endpoint} id: {object_id} params: {params}'
|
||||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||||
|
|
||||||
retries = 0
|
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||||
while True:
|
async with session.get(req_url) as r:
|
||||||
try:
|
if r.status == 200:
|
||||||
resp = requests.get(req_url, timeout=timeout, headers=AUTH_TOKEN)
|
js = await r.json()
|
||||||
break
|
log_return_value(f'{js}')
|
||||||
except requests.ReadTimeout as e:
|
return js
|
||||||
logging.error(f'Get Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
elif none_okay:
|
||||||
if retries > 1:
|
e = await r.text()
|
||||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
logging.error(e)
|
||||||
f'hang on a few extra seconds and try again.')
|
return None
|
||||||
timeout += [2, 5][retries]
|
else:
|
||||||
retries += 1
|
e = await r.text()
|
||||||
|
logging.error(e)
|
||||||
|
raise ValueError(f'DB: {e}')
|
||||||
|
|
||||||
if resp.status_code == 200:
|
# retries = 0
|
||||||
data = resp.json()
|
# while True:
|
||||||
log_string = f'{data}'
|
# try:
|
||||||
if master_debug:
|
# resp = requests.get(req_url, timeout=timeout, headers=AUTH_TOKEN)
|
||||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# break
|
||||||
else:
|
# except requests.ReadTimeout as e:
|
||||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# logging.error(f'Get Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||||
return data
|
# if retries > 1:
|
||||||
elif none_okay:
|
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||||
data = resp.json()
|
# f'hang on a few extra seconds and try again.')
|
||||||
log_string = f'{data}'
|
# timeout += [2, 5][retries]
|
||||||
if master_debug:
|
# retries += 1
|
||||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
#
|
||||||
else:
|
# if resp.status_code == 200:
|
||||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# data = resp.json()
|
||||||
return None
|
# log_string = f'{data}'
|
||||||
else:
|
# if master_debug:
|
||||||
logging.warning(resp.text)
|
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
raise ValueError(f'DB: {resp.text}')
|
# else:
|
||||||
|
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# return data
|
||||||
|
# elif none_okay:
|
||||||
|
# data = resp.json()
|
||||||
|
# log_string = f'{data}'
|
||||||
|
# if master_debug:
|
||||||
|
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# else:
|
||||||
|
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# return None
|
||||||
|
# else:
|
||||||
|
# logging.warning(resp.text)
|
||||||
|
# raise ValueError(f'DB: {resp.text}')
|
||||||
|
|
||||||
|
|
||||||
async def url_get(url: str, timeout: int = 3):
|
async def url_get(url: str, timeout: int = 3):
|
||||||
log_string = f'get:\n{url}'
|
log_string = f'get:\n{url}'
|
||||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||||
|
|
||||||
retries = 0
|
async with aiohttp.ClientSession() as session:
|
||||||
while True:
|
async with session.get(url) as r:
|
||||||
try:
|
if r.status == 200:
|
||||||
resp = requests.get(url, timeout=timeout)
|
log_string = f'200 received'
|
||||||
break
|
log_return_value(log_string)
|
||||||
except requests.ReadTimeout as e:
|
return r
|
||||||
logging.error(f'Get Timeout: {url} / retries: {retries} / timeout: {timeout}')
|
else:
|
||||||
if retries > 1:
|
logging.error(r.text())
|
||||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
raise ValueError(f'DB: {r.text()}')
|
||||||
f'hang on a few extra seconds and try again.')
|
|
||||||
timeout += [2, 5][retries]
|
|
||||||
retries += 1
|
|
||||||
|
|
||||||
if resp.status_code == 200:
|
# retries = 0
|
||||||
log_string = f'200 received'
|
# while True:
|
||||||
if master_debug:
|
# try:
|
||||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# resp = requests.get(url, timeout=timeout)
|
||||||
else:
|
# break
|
||||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# except requests.ReadTimeout as e:
|
||||||
return resp
|
# logging.error(f'Get Timeout: {url} / retries: {retries} / timeout: {timeout}')
|
||||||
else:
|
# if retries > 1:
|
||||||
logging.warning(resp.text)
|
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||||
raise ValueError(f'DB: {resp.text}')
|
# f'hang on a few extra seconds and try again.')
|
||||||
|
# timeout += [2, 5][retries]
|
||||||
|
# retries += 1
|
||||||
|
#
|
||||||
|
# if resp.status_code == 200:
|
||||||
|
# log_string = f'200 received'
|
||||||
|
# if master_debug:
|
||||||
|
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# else:
|
||||||
|
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# return resp
|
||||||
|
# else:
|
||||||
|
# logging.warning(resp.text)
|
||||||
|
# raise ValueError(f'DB: {resp.text}')
|
||||||
|
|
||||||
|
|
||||||
async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2, timeout: int = 3):
|
async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2, timeout: int = 3):
|
||||||
@ -108,30 +140,41 @@ async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2
|
|||||||
log_string = f'patch:\n{endpoint} {params}'
|
log_string = f'patch:\n{endpoint} {params}'
|
||||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||||
|
|
||||||
retries = 0
|
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||||
while True:
|
async with session.patch(req_url) as r:
|
||||||
try:
|
if r.status == 200:
|
||||||
resp = requests.patch(req_url, headers=AUTH_TOKEN, timeout=timeout)
|
js = await r.json()
|
||||||
break
|
log_return_value(f'{js}')
|
||||||
except requests.Timeout as e:
|
return js
|
||||||
logging.error(f'Patch Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
else:
|
||||||
if retries > 1:
|
e = await r.text()
|
||||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
logging.error(e)
|
||||||
f'hang on a few extra seconds and try again.')
|
raise ValueError(f'DB: {e}')
|
||||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
|
||||||
retries += 1
|
|
||||||
|
|
||||||
if resp.status_code == 200:
|
# retries = 0
|
||||||
data = resp.json()
|
# while True:
|
||||||
log_string = f'{data}'
|
# try:
|
||||||
if master_debug:
|
# resp = requests.patch(req_url, headers=AUTH_TOKEN, timeout=timeout)
|
||||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# break
|
||||||
else:
|
# except requests.Timeout as e:
|
||||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# logging.error(f'Patch Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||||
return data
|
# if retries > 1:
|
||||||
else:
|
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||||
logging.warning(resp.text)
|
# f'hang on a few extra seconds and try again.')
|
||||||
raise ValueError(f'DB: {resp.text}')
|
# timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||||
|
# retries += 1
|
||||||
|
#
|
||||||
|
# if resp.status_code == 200:
|
||||||
|
# data = resp.json()
|
||||||
|
# log_string = f'{data}'
|
||||||
|
# if master_debug:
|
||||||
|
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# else:
|
||||||
|
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# return data
|
||||||
|
# else:
|
||||||
|
# logging.warning(resp.text)
|
||||||
|
# raise ValueError(f'DB: {resp.text}')
|
||||||
|
|
||||||
|
|
||||||
async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
|
async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
|
||||||
@ -139,30 +182,41 @@ async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout
|
|||||||
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
|
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
|
||||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||||
|
|
||||||
retries = 0
|
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||||
while True:
|
async with session.post(req_url, json=payload) as r:
|
||||||
try:
|
if r.status == 200:
|
||||||
resp = requests.post(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
|
js = await r.json()
|
||||||
break
|
log_return_value(f'{js}')
|
||||||
except requests.Timeout as e:
|
return js
|
||||||
logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
else:
|
||||||
if retries > 1:
|
e = await r.text()
|
||||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
logging.error(e)
|
||||||
f'hang on a few extra seconds and try again.')
|
raise ValueError(f'DB: {e}')
|
||||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
|
||||||
retries += 1
|
|
||||||
|
|
||||||
if resp.status_code == 200:
|
# retries = 0
|
||||||
data = resp.json()
|
# while True:
|
||||||
log_string = f'{data}'
|
# try:
|
||||||
if master_debug:
|
# resp = requests.post(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
|
||||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# break
|
||||||
else:
|
# except requests.Timeout as e:
|
||||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||||
return data
|
# if retries > 1:
|
||||||
else:
|
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||||
logging.warning(resp.text)
|
# f'hang on a few extra seconds and try again.')
|
||||||
raise ValueError(f'DB: {resp.text}')
|
# timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||||
|
# retries += 1
|
||||||
|
#
|
||||||
|
# if resp.status_code == 200:
|
||||||
|
# data = resp.json()
|
||||||
|
# log_string = f'{data}'
|
||||||
|
# if master_debug:
|
||||||
|
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# else:
|
||||||
|
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# return data
|
||||||
|
# else:
|
||||||
|
# logging.warning(resp.text)
|
||||||
|
# raise ValueError(f'DB: {resp.text}')
|
||||||
|
|
||||||
|
|
||||||
async def db_put(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
|
async def db_put(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
|
||||||
@ -170,30 +224,41 @@ async def db_put(endpoint: str, api_ver: int = 2, payload: dict = None, timeout:
|
|||||||
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
|
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
|
||||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||||
|
|
||||||
retries = 0
|
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||||
while True:
|
async with session.put(req_url, json=payload) as r:
|
||||||
try:
|
if r.status == 200:
|
||||||
resp = requests.put(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
|
js = await r.json()
|
||||||
break
|
log_return_value(f'{js}')
|
||||||
except requests.Timeout as e:
|
return js
|
||||||
logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
else:
|
||||||
if retries > 1:
|
e = await r.text()
|
||||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
logging.error(e)
|
||||||
f'hang on a few extra seconds and try again.')
|
raise ValueError(f'DB: {e}')
|
||||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
|
||||||
retries += 1
|
|
||||||
|
|
||||||
if resp.status_code == 200:
|
# retries = 0
|
||||||
data = resp.json()
|
# while True:
|
||||||
log_string = f'{data}'
|
# try:
|
||||||
if master_debug:
|
# resp = requests.put(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
|
||||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# break
|
||||||
else:
|
# except requests.Timeout as e:
|
||||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||||
return data
|
# if retries > 1:
|
||||||
else:
|
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||||
logging.warning(resp.text)
|
# f'hang on a few extra seconds and try again.')
|
||||||
raise ValueError(f'DB: {resp.text}')
|
# timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||||
|
# retries += 1
|
||||||
|
#
|
||||||
|
# if resp.status_code == 200:
|
||||||
|
# data = resp.json()
|
||||||
|
# log_string = f'{data}'
|
||||||
|
# if master_debug:
|
||||||
|
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# else:
|
||||||
|
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# return data
|
||||||
|
# else:
|
||||||
|
# logging.warning(resp.text)
|
||||||
|
# raise ValueError(f'DB: {resp.text}')
|
||||||
|
|
||||||
|
|
||||||
async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
|
async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
|
||||||
@ -201,30 +266,41 @@ async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
|
|||||||
log_string = f'delete:\n{endpoint} {object_id}'
|
log_string = f'delete:\n{endpoint} {object_id}'
|
||||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||||
|
|
||||||
retries = 0
|
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||||
while True:
|
async with session.delete(req_url) as r:
|
||||||
try:
|
if r.status == 200:
|
||||||
resp = requests.delete(req_url, headers=AUTH_TOKEN, timeout=timeout)
|
js = await r.json()
|
||||||
break
|
log_return_value(f'{js}')
|
||||||
except requests.ReadTimeout as e:
|
return js
|
||||||
logging.error(f'Delete Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
else:
|
||||||
if retries > 1:
|
e = await r.text()
|
||||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
logging.error(e)
|
||||||
f'hang on a few extra seconds and try again.')
|
raise ValueError(f'DB: {e}')
|
||||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
|
||||||
retries += 1
|
|
||||||
|
|
||||||
if resp.status_code == 200:
|
# retries = 0
|
||||||
data = resp.json()
|
# while True:
|
||||||
log_string = f'{data}'
|
# try:
|
||||||
if master_debug:
|
# resp = requests.delete(req_url, headers=AUTH_TOKEN, timeout=timeout)
|
||||||
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# break
|
||||||
else:
|
# except requests.ReadTimeout as e:
|
||||||
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
# logging.error(f'Delete Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||||
return True
|
# if retries > 1:
|
||||||
else:
|
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||||
logging.warning(resp.text)
|
# f'hang on a few extra seconds and try again.')
|
||||||
raise ValueError(f'DB: {resp.text}')
|
# timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||||
|
# retries += 1
|
||||||
|
#
|
||||||
|
# if resp.status_code == 200:
|
||||||
|
# data = resp.json()
|
||||||
|
# log_string = f'{data}'
|
||||||
|
# if master_debug:
|
||||||
|
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# else:
|
||||||
|
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||||
|
# return True
|
||||||
|
# else:
|
||||||
|
# logging.warning(resp.text)
|
||||||
|
# raise ValueError(f'DB: {resp.text}')
|
||||||
|
|
||||||
|
|
||||||
def get_player_data(
|
def get_player_data(
|
||||||
|
|||||||
@ -75,16 +75,16 @@ async def main(args):
|
|||||||
start_time = datetime.datetime.now()
|
start_time = datetime.datetime.now()
|
||||||
release_directory = f'{start_time.year}-{start_time.month}-{start_time.day}'
|
release_directory = f'{start_time.year}-{start_time.month}-{start_time.day}'
|
||||||
|
|
||||||
# data = await batters.creation.run_batters(
|
data = await batters.creation.run_batters(
|
||||||
# cardset, input_path, post_players, CARD_BASE_URL, release_directory, player_description, season_pct,
|
cardset, input_path, post_players, CARD_BASE_URL, release_directory, player_description, season_pct,
|
||||||
# post_batters, pull_fielding, season, is_liveseries
|
post_batters, pull_fielding, season, is_liveseries
|
||||||
# )
|
)
|
||||||
|
|
||||||
print(f'Batter updates are complete')
|
print(f'Batter updates are complete')
|
||||||
start_time_two = datetime.datetime.now()
|
start_time_two = datetime.datetime.now()
|
||||||
run_time = start_time_two - start_time
|
run_time = start_time_two - start_time
|
||||||
# print(f'Total batting cards: {data["tot_batters"]}\nNew cardset batters: {data["new_batters"]}\n'
|
print(f'Total batting cards: {data["tot_batters"]}\nNew cardset batters: {data["new_batters"]}\n'
|
||||||
# f'Batter runtime: {round(run_time.total_seconds())} seconds\n')
|
f'Batter runtime: {round(run_time.total_seconds())} seconds\n')
|
||||||
|
|
||||||
data = await pitchers.creation.run_pitchers(
|
data = await pitchers.creation.run_pitchers(
|
||||||
cardset, input_path, CARD_BASE_URL, season, release_directory, player_description, season_pct, post_players,
|
cardset, input_path, CARD_BASE_URL, season, release_directory, player_description, season_pct, post_players,
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user