Migrate db calls to aiohttp library
This commit is contained in:
parent
726f8324ed
commit
bc10141258
170
db_calls.py
170
db_calls.py
@ -4,7 +4,7 @@ from typing import Optional
|
||||
|
||||
import requests
|
||||
import logging
|
||||
import json
|
||||
import aiohttp
|
||||
import os
|
||||
|
||||
AUTH_TOKEN = {'Authorization': f'Bearer {os.environ.get("API_TOKEN")}'}
|
||||
@ -110,30 +110,20 @@ async def db_get(endpoint: str, api_ver: int = 2, object_id: int = None, params:
|
||||
log_string = f'db_get - get: {endpoint} id: {object_id} params: {params}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.get(req_url, timeout=timeout)
|
||||
break
|
||||
except requests.ReadTimeout as e:
|
||||
logging.error(f'Get Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [2, 5][retries]
|
||||
retries += 1
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_return_value(f'{data}')
|
||||
return data
|
||||
elif none_okay:
|
||||
data = resp.json()
|
||||
log_return_value(f'{data}')
|
||||
return None
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with session.get(req_url) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
log_return_value(f'{js}')
|
||||
return js
|
||||
elif none_okay:
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
return None
|
||||
else:
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
raise ValueError(f'DB: {e}')
|
||||
|
||||
|
||||
async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2, timeout: int = 3):
|
||||
@ -141,26 +131,16 @@ async def db_patch(endpoint: str, object_id: int, params: list, api_ver: int = 2
|
||||
log_string = f'db_patch - patch: {endpoint} {params}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.patch(req_url, headers=AUTH_TOKEN, timeout=timeout)
|
||||
break
|
||||
except requests.Timeout as e:
|
||||
logging.error(f'Patch Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
retries += 1
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_return_value(f'{data}')
|
||||
return data
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with session.patch(req_url) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
log_return_value(f'{js}')
|
||||
return js
|
||||
else:
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
raise ValueError(f'DB: {e}')
|
||||
|
||||
|
||||
async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
|
||||
@ -168,26 +148,58 @@ async def db_post(endpoint: str, api_ver: int = 2, payload: dict = None, timeout
|
||||
log_string = f'db_post - post: {endpoint} payload: {payload}\ntype: {type(payload)}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.post(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
|
||||
break
|
||||
except requests.Timeout as e:
|
||||
logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
retries += 1
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with session.post(req_url, json=payload) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
log_return_value(f'{js}')
|
||||
return js
|
||||
else:
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
raise ValueError(f'DB: {e}')
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_return_value(f'{data}')
|
||||
return data
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
async def db_put(endpoint: str, api_ver: int = 2, payload: dict = None, timeout: int = 3):
|
||||
req_url = get_req_url(endpoint, api_ver=api_ver)
|
||||
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with session.put(req_url, json=payload) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
log_return_value(f'{js}')
|
||||
return js
|
||||
else:
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
raise ValueError(f'DB: {e}')
|
||||
|
||||
# retries = 0
|
||||
# while True:
|
||||
# try:
|
||||
# resp = requests.put(req_url, json=payload, headers=AUTH_TOKEN, timeout=timeout)
|
||||
# break
|
||||
# except requests.Timeout as e:
|
||||
# logging.error(f'Post Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
# if retries > 1:
|
||||
# raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
# f'hang on a few extra seconds and try again.')
|
||||
# timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
# retries += 1
|
||||
#
|
||||
# if resp.status_code == 200:
|
||||
# data = resp.json()
|
||||
# log_string = f'{data}'
|
||||
# if master_debug:
|
||||
# logging.info(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# else:
|
||||
# logging.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}')
|
||||
# return data
|
||||
# else:
|
||||
# logging.warning(resp.text)
|
||||
# raise ValueError(f'DB: {resp.text}')
|
||||
|
||||
|
||||
async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
|
||||
@ -195,26 +207,16 @@ async def db_delete(endpoint: str, object_id: int, api_ver: int = 2, timeout=3):
|
||||
log_string = f'db_delete - delete: {endpoint} {object_id}'
|
||||
logging.info(log_string) if master_debug else logging.debug(log_string)
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
resp = requests.delete(req_url, headers=AUTH_TOKEN, timeout=timeout)
|
||||
break
|
||||
except requests.ReadTimeout as e:
|
||||
logging.error(f'Delete Timeout: {req_url} / retries: {retries} / timeout: {timeout}')
|
||||
if retries > 1:
|
||||
raise ConnectionError(f'DB: The internet was a bit too slow for me to grab the data I needed. Please '
|
||||
f'hang on a few extra seconds and try again.')
|
||||
timeout += [min(3, timeout), min(5, timeout)][retries]
|
||||
retries += 1
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
log_return_value(f'{data}')
|
||||
return True
|
||||
else:
|
||||
logging.warning(resp.text)
|
||||
raise ValueError(f'DB: {resp.text}')
|
||||
async with aiohttp.ClientSession(headers=AUTH_TOKEN) as session:
|
||||
async with session.delete(req_url) as r:
|
||||
if r.status == 200:
|
||||
js = await r.json()
|
||||
log_return_value(f'{js}')
|
||||
return js
|
||||
else:
|
||||
e = await r.text()
|
||||
logging.error(e)
|
||||
raise ValueError(f'DB: {e}')
|
||||
|
||||
|
||||
async def get_team_by_abbrev(abbrev: str):
|
||||
|
||||
Loading…
Reference in New Issue
Block a user