Migrate db calls to aiohttp library

This commit is contained in:
Cal Corum 2023-11-19 14:30:43 -06:00
parent d60c6a1957
commit e927e5a7a4

View File

@ -1,6 +1,6 @@
import requests import requests
import logging import logging
import json import aiohttp
import os import os
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
import csv import csv
@ -34,44 +34,33 @@ def get_req_url(endpoint: str, api_ver: int = 3, object_id: int = None, params:
return req_url return req_url
def log_return_value(log_string: str):
if master_debug:
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n')
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n')
async def db_get(endpoint: str, api_ver: int = 3, object_id: int = None, params: list = None, none_okay: bool = True, async def db_get(endpoint: str, api_ver: int = 3, object_id: int = None, params: list = None, none_okay: bool = True,
timeout: int = 3): timeout: int = 3):
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params) req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
log_string = f'get:\n{endpoint} id: {object_id} params: {params}' log_string = f'get:\n{endpoint} id: {object_id} params: {params}'
logging.info(log_string) if master_debug else logging.debug(log_string) logging.info(log_string) if master_debug else logging.debug(log_string)
retries = 0 async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
while True: async with session.get(req_url) as r:
try: if r.status == 200:
resp = requests.get(req_url, headers=AUTH_TOKEN, timeout=timeout) js = await r.json()
break log_return_value(f'{js}')
except requests.ReadTimeout as e: return js
logging.error(f'Get Timeout: {req_url} / retries: {retries} / timeout: {timeout}') elif none_okay:
if retries > 1: e = await r.text()
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please ' logging.error(e)
f'hang on a few extra seconds and try again.') return None
timeout += [2, 5][retries] else:
retries += 1 e = await r.text()
logging.error(e)
if resp.status_code == 200: raise ValueError(f'DB: {e}')
data = resp.json()
log_string = f'{data}'
if master_debug:
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
return data
elif none_okay:
data = resp.json()
log_string = f'{data}'
if master_debug:
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
return None
else:
logging.warning(resp.text)
raise ValueError(f'DB: {resp.text}')
async def db_patch( async def db_patch(
@ -80,30 +69,16 @@ async def db_patch(
log_string = f'patch:\n{endpoint}/{object_id} {params}' log_string = f'patch:\n{endpoint}/{object_id} {params}'
logging.info(log_string) if master_debug else logging.debug(log_string) logging.info(log_string) if master_debug else logging.debug(log_string)
retries = 0 async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
while True: async with session.patch(req_url) as r:
try: if r.status == 200:
resp = requests.patch(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout) js = await r.json()
break log_return_value(f'{js}')
except requests.Timeout as e: return js
logging.error(f'Patch Timeout: {req_url} / retries: {retries} / timeout: {timeout}') else:
if retries > 1: e = await r.text()
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please ' logging.error(e)
f'hang on a few extra seconds and try again.') raise ValueError(f'DB: {e}')
timeout += [min(3, timeout), min(5, timeout)][retries]
retries += 1
if resp.status_code == 200:
data = resp.json()
log_string = f'{data}'
if master_debug:
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
return data
else:
logging.warning(resp.text)
raise ValueError(f'DB: {resp.text}')
async def db_post(endpoint: str, api_ver: int = 3, payload: dict = None, timeout: int = 3): async def db_post(endpoint: str, api_ver: int = 3, payload: dict = None, timeout: int = 3):
@ -111,30 +86,58 @@ async def db_post(endpoint: str, api_ver: int = 3, payload: dict = None, timeout
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}' log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
logging.info(log_string) if master_debug else logging.debug(log_string) logging.info(log_string) if master_debug else logging.debug(log_string)
retries = 0 async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
while True: async with session.post(req_url, json=payload) as r:
try: if r.status == 200:
resp = requests.post(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout) js = await r.json()
break log_return_value(f'{js}')
except requests.Timeout as e: return js
logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}') else:
if retries > 1: e = await r.text()
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please ' logging.error(e)
f'hang on a few extra seconds and try again.') raise ValueError(f'DB: {e}')
timeout += [min(3, timeout), min(5, timeout)][retries]
retries += 1
if resp.status_code == 200:
data = resp.json() async def db_put(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
log_string = f'{data}' req_url = get_req_url(endpoint, api_ver=api_ver)
if master_debug: log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}') logging.info(log_string) if master_debug else logging.debug(log_string)
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}') async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
return data async with session.put(req_url, json=payload) as r:
else: if r.status == 200:
logging.warning(resp.text) js = await r.json()
raise ValueError(f'DB: {resp.text}') log_return_value(f'{js}')
return js
else:
e = await r.text()
logging.error(e)
raise ValueError(f'DB: {e}')
# retries = 0
# while True:
# try:
# resp = requests.put(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
# break
# except requests.Timeout as e:
# logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
# if retries > 1:
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
# f'hang on a few extra seconds and try again.')
# timeout += [min(3, timeout), min(5, timeout)][retries]
# retries += 1
#
# if resp.status_code == 200:
# data = resp.json()
# log_string = f'{data}'
# if master_debug:
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# else:
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
# return data
# else:
# logging.warning(resp.text)
# raise ValueError(f'DB: {resp.text}')
async def db_delete(endpoint: str, object_id: int, api_ver: int = 3, timeout=3): async def db_delete(endpoint: str, object_id: int, api_ver: int = 3, timeout=3):
@ -142,30 +145,16 @@ async def db_delete(endpoint: str, object_id: int, api_ver: int = 3, timeout=3):
log_string = f'delete:\n{endpoint} {object_id}' log_string = f'delete:\n{endpoint} {object_id}'
logging.info(log_string) if master_debug else logging.debug(log_string) logging.info(log_string) if master_debug else logging.debug(log_string)
retries = 0 async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
while True: async with session.delete(req_url) as r:
try: if r.status == 200:
resp = requests.delete(req_url, headers=AUTH_TOKEN, timeout=timeout) js = await r.json()
break log_return_value(f'{js}')
except requests.ReadTimeout as e: return js
logging.error(f'Delete Timeout: {req_url} / retries: {retries} / timeout: {timeout}') else:
if retries > 1: e = await r.text()
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please ' logging.error(e)
f'hang on a few extra seconds and try again.') raise ValueError(f'DB: {e}')
timeout += [min(3, timeout), min(5, timeout)][retries]
retries += 1
if resp.status_code == 200:
data = resp.json()
log_string = f'{data}'
if master_debug:
logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
else:
logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
return True
else:
logging.warning(resp.text)
raise ValueError(f'DB: {resp.text}')
async def get_team_by_abbrev(team_abbrev: str, season: int): async def get_team_by_abbrev(team_abbrev: str, season: int):