diff --git a/.gitignore b/.gitignore index 3601644..374f012 100644 --- a/.gitignore +++ b/.gitignore @@ -129,4 +129,7 @@ dmypy.json .pyre/ # Project specifics -.idea/ \ No newline at end of file +.idea/ + +# Project Specific +storage/* \ No newline at end of file diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..18f8896 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,117 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/in_game/gameplay_db.py b/in_game/gameplay_db.py new file mode 100644 index 0000000..124906b --- /dev/null +++ b/in_game/gameplay_db.py @@ -0,0 +1,134 @@ +import datetime +from sqlmodel import Session, SQLModel, create_engine, select, Field, Relationship + + +sqlite_url = 'sqlite:///./storage/gameplay.db' +connect_args = {"check_same_thread": False} +engine = create_engine(sqlite_url, echo=False, connect_args=connect_args) + + +class GameCardsetLink(SQLModel, table=True): + game_id: int | None = Field(default=None, foreign_key='game.id', primary_key=True) + cardset_id: int | None = Field(default=None, foreign_key='cardset.id', primary_key=True) + priority: int | None = Field(default=1, index=True) + + game: 'Game' = Relationship(back_populates='cardset_links') + cardset: 'Cardset' = Relationship(back_populates='game_links') + + +class Game(SQLModel, table=True): + id: int | None = Field(default=None, primary_key=True) + away_team_id: int + home_team_id: int + channel_id: int = Field(index=True) + season: int + active: bool | None = Field(default=True) + is_pd: bool | None = Field(default=True) + ranked: bool | None = Field(default=False) + short_game: bool | None = Field(default=False) + week_num: int | None = Field(default=None) + game_num: int | None = Field(default=None) + away_roster_id: int | None = Field(default=None) + home_roster_id: int | None = Field(default=None) + first_message: int | None = Field(default=None) + ai_team: str | None = Field(default=None) + game_type: str | None = Field(default=None) + + cardset_links: list[GameCardsetLink] = Relationship(back_populates='game') + lineups: list['Lineup'] = Relationship(back_populates='game') + + @property + def game_prop(self) -> str: + return f'Game {self.id} / Week {self.week_num} / Type {self.game_type}' + + +class Cardset(SQLModel, table=True): + id: int | None = Field(default=None, primary_key=True) + name: str + ranked_legal: bool | None = Field(default=False) + + game_links: list[GameCardsetLink] = Relationship(back_populates='cardset') + + +class Lineup(SQLModel, table=True): + id: int | None = Field(default=None, primary_key=True) + team_id: int + player_id: int + card_id: int + position: str = Field(index=True) + batting_order: int = Field(index=True) + after_play: int | None = Field(default=0) + replacing_id: int | None = Field(default=None) + active: bool = Field(default=True, index=True) + is_fatigued: bool | None = Field(default=None) + + game_id: int = Field(foreign_key='game.id', index=True) + game: Game = Relationship(back_populates='lineups') + + +def create_db_and_tables(): + SQLModel.metadata.create_all(engine) + + +def create_test_games(): + with Session(engine) as session: + game_1 = Game( + away_team_id=1, + home_team_id=2, + channel_id=1234, + season=9, + ) + game_2 = Game( + away_team_id=3, + home_team_id=4, + channel_id=5678, + season=9, + ) + + cardset_2024 = Cardset(name='2024 Season', ranked_legal=True) + cardset_2022 = Cardset(name='2022 Season', ranked_legal=False) + + game_1_cardset_2024_link = GameCardsetLink(game=game_1, cardset=cardset_2024, priority=1) + game_1_cardset_2022_link = GameCardsetLink(game=game_1, cardset=cardset_2022, priority=2) + game_2_cardset_2024_link = GameCardsetLink(game=game_2, cardset=cardset_2024, priority=1) + + for team_id in [1, 2]: + for (order, pos) in [(1, 'C'), (2, '1B'), (3, '2B'), (4, '3B'), (5, 'SS'), (6, 'LF'), (7, 'CF'), (8, 'RF'), (9, 'DH')]: + this_lineup = Lineup(team_id=team_id, card_id=order, player_id=68+order, position=pos, batting_order=order, game=game_1) + + for team_id in [3, 4]: + for (order, pos) in [(1, 'C'), (2, '1B'), (3, '2B'), (4, '3B'), (5, 'SS'), (6, 'LF'), (7, 'CF'), (8, 'RF'), (9, 'DH')]: + this_lineup = Lineup(team_id=team_id, card_id=order, player_id=100+order, position=pos, batting_order=order, game=game_2) + + session.add(game_1) + session.add(game_2) + session.commit() + + +def select_testing(): + with Session(engine) as session: + game_1 = session.exec(select(Game).where(Game.id == 1)).one() + ss_search_start = datetime.datetime.now() + man_ss = [x for x in game_1.lineups if x.position == 'SS' and x.active] + ss_search_end = datetime.datetime.now() + + ss_query_start = datetime.datetime.now() + query_ss = session.exec(select(Lineup).where(Lineup.game == game_1, Lineup.position == 'SS', Lineup.active == True)).all() + ss_query_end = datetime.datetime.now() + + manual_time = ss_search_end - ss_search_start + query_time = ss_query_end - ss_query_start + + print(f'Manual Shortstops: time: {manual_time.microseconds} ms / {man_ss}') + print(f'Query Shortstops: time: {query_time.microseconds} ms / {query_ss}') + print(f'Game: {game_1}') + + +def main(): + # create_db_and_tables() + # create_test_games() + select_testing() + + +if __name__ == "__main__": + main() diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..b33e098 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,82 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +from in_game.gameplay_db import sqlite_url, Game +from sqlmodel import SQLModel + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config +config.set_main_option('sqlalchemy.url', sqlite_url) + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = SQLModel.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/requirements.txt b/requirements.txt index 2ff3058..9d082ff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,8 @@ -discord.py==2.1.1 +discord.py pygsheets pydantic gsheets bs4 peewee - +sqlmodel +alembic