Parameterize DB location
Optimize dockerfile for smaller builds
This commit is contained in:
parent
8d9e2189a7
commit
fdf80fcdc1
32
Dockerfile
32
Dockerfile
@ -1,10 +1,34 @@
|
|||||||
FROM python:3.8-slim
|
# Stage 1: Builder (optional but good for native deps or compile-time tasks)
|
||||||
|
FROM python:3.12-slim AS builder
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
WORKDIR /install
|
||||||
|
|
||||||
|
# Optional: install build dependencies if your requirements need it
|
||||||
|
# RUN apt-get update && apt-get install -y build-essential libpq-dev && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --prefix=/install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Stage 2: Final image
|
||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
# Add non-root user
|
||||||
|
RUN adduser --disabled-password --no-create-home --gecos "" appuser
|
||||||
|
|
||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
COPY requirements.txt ./
|
# Copy installed packages
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
COPY --from=builder /install /usr/local
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
CMD [ "python", "./majordomo.py" ]
|
USER appuser
|
||||||
|
|
||||||
|
CMD ["python", "majordomo.py"]
|
||||||
|
|||||||
24
db_calls.py
24
db_calls.py
@ -1,3 +1,4 @@
|
|||||||
|
from typing import Optional
|
||||||
import requests
|
import requests
|
||||||
import logging
|
import logging
|
||||||
import aiohttp
|
import aiohttp
|
||||||
@ -8,7 +9,9 @@ import csv
|
|||||||
|
|
||||||
AUTH_TOKEN = {'Authorization': f'Bearer {os.environ.get("API_TOKEN")}'}
|
AUTH_TOKEN = {'Authorization': f'Bearer {os.environ.get("API_TOKEN")}'}
|
||||||
# DB_URL = 'http://database/api'
|
# DB_URL = 'http://database/api'
|
||||||
DB_URL = 'https://sba.manticorum.com/api'
|
DB_URL = os.environ.get("DB_URL")
|
||||||
|
if DB_URL is None:
|
||||||
|
raise AttributeError('DB_URL is not defined')
|
||||||
master_debug = True
|
master_debug = True
|
||||||
logger = logging.getLogger('discord_app')
|
logger = logging.getLogger('discord_app')
|
||||||
|
|
||||||
@ -20,7 +23,7 @@ def param_char(other_params):
|
|||||||
return '?'
|
return '?'
|
||||||
|
|
||||||
|
|
||||||
def get_req_url(endpoint: str, api_ver: int = 3, object_id: int = None, params: list = None):
|
def get_req_url(endpoint: str, api_ver: int = 3, object_id: Optional[int] = None, params: Optional[list] = None):
|
||||||
# Checking for hard-coded url
|
# Checking for hard-coded url
|
||||||
if '/api/' in endpoint:
|
if '/api/' in endpoint:
|
||||||
return endpoint
|
return endpoint
|
||||||
@ -42,8 +45,7 @@ def log_return_value(log_string: str):
|
|||||||
logger.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n')
|
logger.debug(f'return: {log_string[:1200]}{" [ S N I P P E D ]" if len(log_string) > 1200 else ""}\n')
|
||||||
|
|
||||||
|
|
||||||
async def db_get(endpoint: str, api_ver: int = 3, object_id: int = None, params: list = None, none_okay: bool = True,
|
async def db_get(endpoint: str, api_ver: int = 3, object_id: Optional[int] = None, params: Optional[list] = None, none_okay: bool = True, timeout: int = 3):
|
||||||
timeout: int = 3):
|
|
||||||
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
|
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
|
||||||
log_string = f'get:\n{endpoint} id: {object_id} params: {params}'
|
log_string = f'get:\n{endpoint} id: {object_id} params: {params}'
|
||||||
logger.info(log_string) if master_debug else logger.debug(log_string)
|
logger.info(log_string) if master_debug else logger.debug(log_string)
|
||||||
@ -65,7 +67,7 @@ async def db_get(endpoint: str, api_ver: int = 3, object_id: int = None, params:
|
|||||||
|
|
||||||
|
|
||||||
async def db_patch(
|
async def db_patch(
|
||||||
endpoint: str, object_id: int, params: list, api_ver: int = 3, timeout: int = 3, payload: dict = None):
|
endpoint: str, object_id: int, params: list, api_ver: int = 3, timeout: int = 3, payload: Optional[dict] = None):
|
||||||
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
|
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id, params=params)
|
||||||
log_string = f'patch:\n{endpoint}/{object_id} {params}'
|
log_string = f'patch:\n{endpoint}/{object_id} {params}'
|
||||||
logger.info(log_string) if master_debug else logger.debug(log_string)
|
logger.info(log_string) if master_debug else logger.debug(log_string)
|
||||||
@ -82,7 +84,7 @@ async def db_patch(
|
|||||||
raise ValueError(f'DB: {e}')
|
raise ValueError(f'DB: {e}')
|
||||||
|
|
||||||
|
|
||||||
async def db_post(endpoint: str, api_ver: int = 3, payload: dict = None, timeout: int = 3):
|
async def db_post(endpoint: str, api_ver: int = 3, payload: Optional[dict] = None, timeout: int = 3):
|
||||||
req_url = get_req_url(endpoint, api_ver=api_ver)
|
req_url = get_req_url(endpoint, api_ver=api_ver)
|
||||||
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
|
log_string = f'post:\n{endpoint} payload: {payload}\ntype: {type(payload)}'
|
||||||
logger.info(log_string) if master_debug else logger.debug(log_string)
|
logger.info(log_string) if master_debug else logger.debug(log_string)
|
||||||
@ -99,7 +101,7 @@ async def db_post(endpoint: str, api_ver: int = 3, payload: dict = None, timeout
|
|||||||
raise ValueError(f'DB: {e}')
|
raise ValueError(f'DB: {e}')
|
||||||
|
|
||||||
|
|
||||||
async def db_put(endpoint: str, api_ver: int = 3, payload: dict = None, object_id: int = None, timeout: int = 3):
|
async def db_put(endpoint: str, api_ver: int = 3, payload: Optional[dict] = None, object_id: Optional[int] = None, timeout: int = 3):
|
||||||
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id)
|
req_url = get_req_url(endpoint, api_ver=api_ver, object_id=object_id)
|
||||||
log_string = f'put:{req_url}\npayload: {payload}\nobject_id: {object_id}\ntype: {type(payload)}'
|
log_string = f'put:{req_url}\npayload: {payload}\nobject_id: {object_id}\ntype: {type(payload)}'
|
||||||
logger.info(log_string) if master_debug else logger.debug(log_string)
|
logger.info(log_string) if master_debug else logger.debug(log_string)
|
||||||
@ -160,7 +162,7 @@ async def db_delete(endpoint: str, object_id: int, api_ver: int = 3, timeout=3):
|
|||||||
|
|
||||||
async def get_team_by_abbrev(team_abbrev: str, season: int):
|
async def get_team_by_abbrev(team_abbrev: str, season: int):
|
||||||
t_query = await db_get('teams', params=[('season', season), ('team_abbrev', team_abbrev)])
|
t_query = await db_get('teams', params=[('season', season), ('team_abbrev', team_abbrev)])
|
||||||
if t_query['count'] == 0:
|
if not t_query or t_query['count'] == 0:
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return t_query['teams'][0]
|
return t_query['teams'][0]
|
||||||
@ -169,15 +171,15 @@ async def get_team_by_abbrev(team_abbrev: str, season: int):
|
|||||||
async def get_team_by_owner(season, owner_id):
|
async def get_team_by_owner(season, owner_id):
|
||||||
params = [('active_only', True), ('season', season), ('owner_id', owner_id)]
|
params = [('active_only', True), ('season', season), ('owner_id', owner_id)]
|
||||||
resp = await db_get('teams', params=params)
|
resp = await db_get('teams', params=params)
|
||||||
if resp['count'] > 0:
|
if resp and resp['count'] > 0:
|
||||||
return resp['teams'][0]
|
return resp['teams'][0]
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
async def get_player_by_name(season: int, player_name: str):
|
async def get_player_by_name(season: int, player_name: str) -> Optional[dict]:
|
||||||
p_query = await db_get('players', params=[('name', player_name), ('season', season)])
|
p_query = await db_get('players', params=[('name', player_name), ('season', season)])
|
||||||
if p_query['count'] == 0:
|
if not p_query or p_query['count'] == 0:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
player = p_query['players'][0]
|
player = p_query['players'][0]
|
||||||
|
|||||||
41
exceptions.py
Normal file
41
exceptions.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
import logging
|
||||||
|
from typing import Literal, NoReturn, Type, Union
|
||||||
|
|
||||||
|
logger = logging.getLogger('discord_app')
|
||||||
|
|
||||||
|
|
||||||
|
def log_errors(func):
|
||||||
|
"""
|
||||||
|
This wrapper function will force all exceptions to be logged with execution and stack info.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def wrap(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
result = func(*args, **kwargs)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(func.__name__)
|
||||||
|
log_exception(e)
|
||||||
|
return result # type: ignore
|
||||||
|
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
def log_exception(e: Union[Exception, Type[Exception]], msg: str = '', level: Literal['debug', 'error', 'info', 'warn'] = 'error') -> NoReturn:
|
||||||
|
if level == 'debug':
|
||||||
|
logger.debug(msg, exc_info=True, stack_info=True)
|
||||||
|
elif level == 'error':
|
||||||
|
logger.error(msg, exc_info=True, stack_info=True)
|
||||||
|
elif level == 'info':
|
||||||
|
logger.info(msg, exc_info=True, stack_info=True)
|
||||||
|
else:
|
||||||
|
logger.warning(msg, exc_info=True, stack_info=True)
|
||||||
|
|
||||||
|
# Check if 'e' is an exception class or instance
|
||||||
|
if isinstance(e, Exception):
|
||||||
|
raise e # If 'e' is already an instance of an exception
|
||||||
|
else:
|
||||||
|
raise e(msg) # If 'e' is an exception class
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class ApiException(Exception):
|
||||||
|
pass
|
||||||
Loading…
Reference in New Issue
Block a user