refactor: migrate Discord bot to hexagonal adapter, remove old app/ directory

Discord bot inbound adapter (adapters/inbound/discord_bot.py):
- ChatService injected directly — no HTTP roundtrip to FastAPI API
- No module-level singleton: create_bot() factory for construction
- Pure functions extracted for testing: build_answer_embed,
  build_error_embed, parse_conversation_id
- Uses message.reference.resolved cache before fetch_message
- Error embeds never leak exception details
- 19 new tests covering embed building, footer parsing, error safety

Removed old app/ directory (9 files):
- All functionality preserved in hexagonal domain/, adapters/, config/
- Old test_basic.py removed (superseded by 120 adapter/domain tests)

Other changes:
- docker-compose: api uses main:app, discord-bot uses run_discord.py
  with direct ChatService injection (no API dependency)
- Removed unused openai dependency from pyproject.toml
- Removed app/ from hatch build targets

Test suite: 120 passed, 1 skipped

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Cal Corum 2026-03-08 16:07:36 -05:00
parent 2fe7163c89
commit 1f1048ee08
16 changed files with 553 additions and 1435 deletions

View File

@ -0,0 +1,284 @@
"""Discord inbound adapter — translates Discord events into ChatService calls.
Key design decisions vs the old app/discord_bot.py:
- No module-level singleton: the bot is constructed via create_bot() factory
- ChatService is injected directly no HTTP roundtrip to the FastAPI API
- Pure functions (build_answer_embed, parse_conversation_id, etc.) are
extracted and independently testable
- All logging, no print()
- Error embeds never leak exception details
"""
import logging
from typing import Optional
import discord
from discord import app_commands
from discord.ext import commands
from domain.models import ChatResult
from domain.services import ChatService
logger = logging.getLogger(__name__)
CONFIDENCE_THRESHOLD = 0.4
FOOTER_PREFIX = "conv:"
MAX_EMBED_DESCRIPTION = 4000
# ---------------------------------------------------------------------------
# Pure helper functions (testable without Discord)
# ---------------------------------------------------------------------------
def build_answer_embed(
result: ChatResult,
title: str = "Rules Answer",
color: discord.Color | None = None,
) -> discord.Embed:
"""Build a Discord embed from a ChatResult.
Handles truncation, cited rules, confidence warnings, and footer.
"""
if color is None:
color = discord.Color.blue()
# Truncate long responses with a notice
text = result.response
if len(text) > MAX_EMBED_DESCRIPTION:
text = (
text[: MAX_EMBED_DESCRIPTION - 60]
+ "\n\n*(Response truncated — ask a more specific question)*"
)
embed = discord.Embed(title=title, description=text, color=color)
# Cited rules
if result.cited_rules:
embed.add_field(
name="📋 Cited Rules",
value=", ".join(f"`{rid}`" for rid in result.cited_rules),
inline=False,
)
# Low confidence warning
if result.confidence < CONFIDENCE_THRESHOLD:
embed.add_field(
name="⚠️ Confidence",
value=f"Low ({result.confidence:.0%}) — a human review has been requested",
inline=False,
)
# Footer with full conversation ID for follow-ups
embed.set_footer(
text=f"{FOOTER_PREFIX}{result.conversation_id} | Reply to ask a follow-up"
)
return embed
def build_error_embed(error: Exception) -> discord.Embed:
"""Build a safe error embed that never leaks exception internals."""
_ = error # logged by the caller, not exposed to users
return discord.Embed(
title="❌ Error",
description=(
"Something went wrong while processing your request. "
"Please try again later."
),
color=discord.Color.red(),
)
def parse_conversation_id(footer_text: Optional[str]) -> Optional[str]:
"""Extract conversation UUID from embed footer text.
Expected format: "conv:<uuid> | Reply to ask a follow-up"
Returns None if the footer is missing, malformed, or empty.
"""
if not footer_text or FOOTER_PREFIX not in footer_text:
return None
try:
raw = footer_text.split(FOOTER_PREFIX)[1].split(" ")[0].strip()
return raw if raw else None
except (IndexError, AttributeError):
return None
# ---------------------------------------------------------------------------
# Bot class
# ---------------------------------------------------------------------------
class StratChatbot(commands.Bot):
"""Discord bot that answers Strat-O-Matic rules questions.
Unlike the old implementation, this bot calls ChatService directly
instead of going through the HTTP API, eliminating the roundtrip.
"""
def __init__(
self,
chat_service: ChatService,
guild_id: Optional[str] = None,
):
intents = discord.Intents.default()
intents.message_content = True
super().__init__(command_prefix="!", intents=intents)
self.chat_service = chat_service
self.guild_id = guild_id
# Register commands and events
self._register_commands()
def _register_commands(self) -> None:
"""Register slash commands and event handlers."""
@self.tree.command(
name="ask",
description="Ask a question about Strat-O-Matic league rules",
)
@app_commands.describe(
question="Your rules question (e.g., 'Can a runner steal on a 2-2 count?')"
)
async def ask_command(interaction: discord.Interaction, question: str):
await self._handle_ask(interaction, question)
@self.event
async def on_ready():
if not self.user:
return
logger.info("Bot logged in as %s (ID: %s)", self.user, self.user.id)
@self.event
async def on_message(message: discord.Message):
await self._handle_follow_up(message)
async def setup_hook(self) -> None:
"""Sync slash commands on startup."""
if self.guild_id:
guild = discord.Object(id=int(self.guild_id))
self.tree.copy_global_to(guild=guild)
await self.tree.sync(guild=guild)
logger.info("Slash commands synced to guild %s", self.guild_id)
else:
await self.tree.sync()
logger.info("Slash commands synced globally")
# ------------------------------------------------------------------
# /ask command handler
# ------------------------------------------------------------------
async def _handle_ask(
self, interaction: discord.Interaction, question: str
) -> None:
"""Handle the /ask slash command."""
await interaction.response.defer(ephemeral=False)
try:
result = await self.chat_service.answer_question(
message=question,
user_id=str(interaction.user.id),
channel_id=str(interaction.channel_id),
)
embed = build_answer_embed(result, title="Rules Answer")
await interaction.followup.send(embed=embed)
except Exception as e:
logger.error(
"Error in /ask from user %s: %s",
interaction.user.id,
e,
exc_info=True,
)
await interaction.followup.send(embed=build_error_embed(e))
# ------------------------------------------------------------------
# Follow-up reply handler
# ------------------------------------------------------------------
async def _handle_follow_up(self, message: discord.Message) -> None:
"""Handle reply-based follow-up questions."""
if message.author.bot:
return
if not message.reference or message.reference.message_id is None:
return
# Use cached resolved message first, fetch only if needed
referenced = message.reference.resolved
if referenced is None or not isinstance(referenced, discord.Message):
referenced = await message.channel.fetch_message(
message.reference.message_id
)
if referenced.author != self.user:
return
# Extract conversation ID from the referenced embed footer
embed = referenced.embeds[0] if referenced.embeds else None
footer_text = embed.footer.text if embed and embed.footer else None
conversation_id = parse_conversation_id(footer_text)
if conversation_id is None:
await message.reply(
"❓ Could not find conversation context. Use `/ask` to start fresh.",
mention_author=True,
)
return
parent_message_id = str(referenced.id)
loading_msg = await message.reply(
"🔍 Looking into that follow-up...", mention_author=True
)
try:
result = await self.chat_service.answer_question(
message=message.content,
user_id=str(message.author.id),
channel_id=str(message.channel.id),
conversation_id=conversation_id,
parent_message_id=parent_message_id,
)
response_embed = build_answer_embed(
result, title="Follow-up Answer", color=discord.Color.green()
)
await loading_msg.edit(content=None, embed=response_embed)
except Exception as e:
logger.error(
"Error in follow-up from user %s: %s",
message.author.id,
e,
exc_info=True,
)
await loading_msg.edit(content=None, embed=build_error_embed(e))
# ---------------------------------------------------------------------------
# Factory + entry point
# ---------------------------------------------------------------------------
def create_bot(
chat_service: ChatService,
guild_id: Optional[str] = None,
) -> StratChatbot:
"""Construct a StratChatbot with injected dependencies."""
return StratChatbot(chat_service=chat_service, guild_id=guild_id)
def run_bot(
token: str,
chat_service: ChatService,
guild_id: Optional[str] = None,
) -> None:
"""Construct and run the Discord bot (blocking call)."""
if not token:
raise ValueError("Discord bot token must not be empty")
bot = create_bot(chat_service=chat_service, guild_id=guild_id)
bot.run(token)

View File

@ -1 +0,0 @@
"""Strat-Chatbot application package."""

View File

@ -1,53 +0,0 @@
"""Configuration management using Pydantic Settings."""
from pathlib import Path
from pydantic_settings import BaseSettings
from pydantic import Field
class Settings(BaseSettings):
"""Application settings with environment variable overrides."""
# OpenRouter
openrouter_api_key: str = Field(default="", env="OPENROUTER_API_KEY")
openrouter_model: str = Field(
default="stepfun/step-3.5-flash:free", env="OPENROUTER_MODEL"
)
# Discord
discord_bot_token: str = Field(default="", env="DISCORD_BOT_TOKEN")
discord_guild_id: str | None = Field(default=None, env="DISCORD_GUILD_ID")
# Gitea
gitea_token: str = Field(default="", env="GITEA_TOKEN")
gitea_owner: str = Field(default="cal", env="GITEA_OWNER")
gitea_repo: str = Field(default="strat-chatbot", env="GITEA_REPO")
gitea_base_url: str = Field(
default="https://git.manticorum.com/api/v1", env="GITEA_BASE_URL"
)
# Paths
data_dir: Path = Field(default=Path("./data"), env="DATA_DIR")
rules_dir: Path = Field(default=Path("./data/rules"), env="RULES_DIR")
chroma_dir: Path = Field(default=Path("./data/chroma"), env="CHROMA_DIR")
# Database
db_url: str = Field(
default="sqlite+aiosqlite:///./data/conversations.db", env="DB_URL"
)
# Conversation state TTL (seconds)
conversation_ttl: int = Field(default=1800, env="CONVERSATION_TTL")
# Vector search
top_k_rules: int = Field(default=10, env="TOP_K_RULES")
embedding_model: str = Field(
default="sentence-transformers/all-MiniLM-L6-v2", env="EMBEDDING_MODEL"
)
class Config:
env_file = ".env"
env_file_encoding = "utf-8"
settings = Settings()

View File

@ -1,162 +0,0 @@
"""SQLAlchemy-based conversation state management with aiosqlite."""
from datetime import datetime, timedelta
from typing import Optional
import uuid
import sqlalchemy as sa
from fastapi import Request
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy import Column, String, DateTime, Boolean, ForeignKey, select
from .config import settings
Base = declarative_base()
class ConversationTable(Base):
"""SQLAlchemy model for conversations."""
__tablename__ = "conversations"
id = Column(String, primary_key=True)
user_id = Column(String, nullable=False)
channel_id = Column(String, nullable=False)
created_at = Column(DateTime, default=datetime.utcnow)
last_activity = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
class MessageTable(Base):
"""SQLAlchemy model for messages."""
__tablename__ = "messages"
id = Column(String, primary_key=True)
conversation_id = Column(String, ForeignKey("conversations.id"), nullable=False)
content = Column(String, nullable=False)
is_user = Column(Boolean, nullable=False)
parent_id = Column(String, ForeignKey("messages.id"), nullable=True)
created_at = Column(DateTime, default=datetime.utcnow)
class ConversationManager:
"""Manages conversation state in SQLite."""
def __init__(self, db_url: str):
"""Initialize database engine and session factory."""
self.engine = create_async_engine(db_url, echo=False)
self.async_session = sessionmaker(
self.engine, class_=AsyncSession, expire_on_commit=False
)
async def init_db(self):
"""Create tables if they don't exist."""
async with self.engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
async def get_or_create_conversation(
self, user_id: str, channel_id: str, conversation_id: Optional[str] = None
) -> str:
"""Get existing conversation or create a new one."""
async with self.async_session() as session:
if conversation_id:
result = await session.execute(
select(ConversationTable).where(
ConversationTable.id == conversation_id
)
)
conv = result.scalar_one_or_none()
if conv:
conv.last_activity = datetime.utcnow()
await session.commit()
return conv.id
# Create new conversation
new_id = str(uuid.uuid4())
conv = ConversationTable(id=new_id, user_id=user_id, channel_id=channel_id)
session.add(conv)
await session.commit()
return new_id
async def add_message(
self,
conversation_id: str,
content: str,
is_user: bool,
parent_id: Optional[str] = None,
) -> str:
"""Add a message to a conversation."""
message_id = str(uuid.uuid4())
async with self.async_session() as session:
msg = MessageTable(
id=message_id,
conversation_id=conversation_id,
content=content,
is_user=is_user,
parent_id=parent_id,
)
session.add(msg)
# Update conversation activity
result = await session.execute(
select(ConversationTable).where(ConversationTable.id == conversation_id)
)
conv = result.scalar_one_or_none()
if conv:
conv.last_activity = datetime.utcnow()
await session.commit()
return message_id
async def get_conversation_history(
self, conversation_id: str, limit: int = 10
) -> list[dict]:
"""Get recent messages from a conversation in OpenAI format."""
async with self.async_session() as session:
result = await session.execute(
select(MessageTable)
.where(MessageTable.conversation_id == conversation_id)
.order_by(MessageTable.created_at.desc())
.limit(limit)
)
messages = result.scalars().all()
# Reverse to get chronological order and convert to API format
history = []
for msg in reversed(messages):
role = "user" if msg.is_user else "assistant"
history.append({"role": role, "content": msg.content})
return history
async def cleanup_old_conversations(self, ttl_seconds: int = 1800):
"""Delete conversations older than TTL to free up storage."""
cutoff = datetime.utcnow() - timedelta(seconds=ttl_seconds)
async with self.async_session() as session:
# Find old conversations
result = await session.execute(
select(ConversationTable).where(
ConversationTable.last_activity < cutoff
)
)
old_convs = result.scalars().all()
conv_ids = [conv.id for conv in old_convs]
if conv_ids:
# Delete messages first (cascade would handle but explicit is clear)
await session.execute(
sa.delete(MessageTable).where(
MessageTable.conversation_id.in_(conv_ids)
)
)
# Delete conversations
await session.execute(
sa.delete(ConversationTable).where(
ConversationTable.id.in_(conv_ids)
)
)
await session.commit()
print(f"Cleaned up {len(conv_ids)} old conversations")
async def get_conversation_manager(request: Request) -> ConversationManager:
"""Dependency for FastAPI to get the singleton ConversationManager from app state."""
return request.app.state.db_manager

View File

@ -1,276 +0,0 @@
"""Discord bot for Strat-O-Matic rules Q&A."""
import logging
import discord
from discord import app_commands
from discord.ext import commands
import aiohttp
from typing import Optional
from .config import settings
logger = logging.getLogger(__name__)
class StratChatbotBot(commands.Bot):
"""Discord bot for the rules chatbot."""
def __init__(self):
"""Initialize the bot with default intents."""
intents = discord.Intents.default()
intents.message_content = True
super().__init__(command_prefix="!", intents=intents)
self.api_base_url: Optional[str] = None
self.session: Optional[aiohttp.ClientSession] = None
async def setup_hook(self):
"""Set up the bot's HTTP session and sync commands."""
self.session = aiohttp.ClientSession()
# Sync slash commands with Discord
if settings.discord_guild_id:
guild = discord.Object(id=int(settings.discord_guild_id))
self.tree.copy_global_to(guild=guild)
await self.tree.sync(guild=guild)
logger.info("Slash commands synced to guild %s", settings.discord_guild_id)
else:
await self.tree.sync()
logger.info("Slash commands synced globally")
async def close(self):
"""Cleanup on shutdown."""
if self.session:
await self.session.close()
await super().close()
async def query_chat_api(
self,
message: str,
user_id: str,
channel_id: str,
conversation_id: Optional[str] = None,
parent_message_id: Optional[str] = None,
) -> dict:
"""Send a request to the FastAPI chat endpoint."""
if not self.session:
raise RuntimeError("Bot HTTP session not initialized")
payload = {
"message": message,
"user_id": user_id,
"channel_id": channel_id,
"conversation_id": conversation_id,
"parent_message_id": parent_message_id,
}
async with self.session.post(
f"{self.api_base_url}/chat",
json=payload,
timeout=aiohttp.ClientTimeout(total=120),
) as response:
if response.status != 200:
error_text = await response.text()
logger.error(
"API returned %s for %s %s — body: %s",
response.status,
response.method,
response.url,
error_text,
)
raise RuntimeError(f"API error {response.status}")
return await response.json()
bot = StratChatbotBot()
@bot.event
async def on_ready():
"""Called when the bot is ready."""
if not bot.user:
return
logger.info("Bot logged in as %s (ID: %s)", bot.user, bot.user.id)
logger.info("Ready to answer Strat-O-Matic rules questions!")
@bot.tree.command(
name="ask", description="Ask a question about Strat-O-Matic league rules"
)
@app_commands.describe(
question="Your rules question (e.g., 'Can a runner steal on a 2-2 count?')"
)
async def ask_command(interaction: discord.Interaction, question: str):
"""Handle /ask command."""
await interaction.response.defer(ephemeral=False)
try:
result = await bot.query_chat_api(
message=question,
user_id=str(interaction.user.id),
channel_id=str(interaction.channel_id),
conversation_id=None, # New conversation
parent_message_id=None,
)
# Build response embed
embed = discord.Embed(
title="Rules Answer",
description=result["response"][:4000], # Discord limit
color=discord.Color.blue(),
)
# Add cited rules if any
if result.get("cited_rules"):
embed.add_field(
name="📋 Cited Rules",
value=", ".join([f"`{rid}`" for rid in result["cited_rules"]]),
inline=False,
)
# Add confidence indicator
confidence = result.get("confidence", 0.0)
if confidence < 0.4:
embed.add_field(
name="⚠️ Confidence",
value=f"Low ({confidence:.0%}) - A human review has been requested",
inline=False,
)
# Add conversation ID for follow-ups (full UUID so replies can be threaded)
embed.set_footer(
text=f"conv:{result['conversation_id']} | Reply to ask a follow-up"
)
await interaction.followup.send(embed=embed)
except Exception as e:
logger.error(
"Error handling /ask from user %s: %s",
interaction.user.id,
e,
exc_info=True,
)
await interaction.followup.send(
embed=discord.Embed(
title="❌ Error",
description="Something went wrong while fetching your answer. Please try again later.",
color=discord.Color.red(),
)
)
@bot.event
async def on_message(message: discord.Message):
"""Handle follow-up messages via reply."""
# Ignore bot messages
if message.author.bot:
return
# Only handle replies to the bot's messages
if not message.reference or message.reference.message_id is None:
return
referenced = await message.channel.fetch_message(message.reference.message_id)
# Check if the referenced message was from this bot
if referenced.author != bot.user:
return
# Try to extract conversation ID from the footer
embed = referenced.embeds[0] if referenced.embeds else None
if not embed or not embed.footer:
await message.reply(
"❓ I couldn't find this conversation. Please use `/ask` to start a new question.",
mention_author=True,
)
return
footer_text = embed.footer.text or ""
if "conv:" not in footer_text:
await message.reply(
"❓ Could not determine conversation. Use `/ask` to start fresh.",
mention_author=True,
)
return
# Extract full conversation UUID from "conv:<uuid> | ..." format
try:
conversation_id = footer_text.split("conv:")[1].split(" ")[0].strip()
except (IndexError, AttributeError):
await message.reply(
"❓ Could not parse conversation ID. Use `/ask` to start fresh.",
mention_author=True,
)
return
# Get parent message ID (the original answer message)
parent_message_id = str(referenced.id)
# Send a loading placeholder and replace it with the real answer when ready
loading_msg = await message.reply(
"🔍 Looking into that follow-up...", mention_author=True
)
try:
result = await bot.query_chat_api(
message=message.content,
user_id=str(message.author.id),
channel_id=str(message.channel.id),
conversation_id=conversation_id,
parent_message_id=parent_message_id,
)
response_embed = discord.Embed(
title="Follow-up Answer",
description=result["response"][:4000],
color=discord.Color.green(),
)
if result.get("cited_rules"):
response_embed.add_field(
name="📋 Cited Rules",
value=", ".join([f"`{rid}`" for rid in result["cited_rules"]]),
inline=False,
)
if result.get("confidence", 0.0) < 0.4:
response_embed.add_field(
name="⚠️ Confidence",
value="Low - Human review requested",
inline=False,
)
# Carry the conversation ID forward so further replies stay in the same thread
response_embed.set_footer(
text=f"conv:{result['conversation_id']} | Reply to ask a follow-up"
)
await loading_msg.edit(content=None, embed=response_embed)
except Exception as e:
logger.error(
"Error handling follow-up from user %s in channel %s: %s",
message.author.id,
message.channel.id,
e,
exc_info=True,
)
await loading_msg.edit(
content=None,
embed=discord.Embed(
title="❌ Error",
description="Something went wrong while processing your follow-up. Please try again later.",
color=discord.Color.red(),
),
)
def run_bot(api_base_url: str = "http://localhost:8000"):
"""Entry point to run the Discord bot."""
bot.api_base_url = api_base_url
if not settings.discord_bot_token:
logger.critical("DISCORD_BOT_TOKEN environment variable is required")
exit(1)
bot.run(settings.discord_bot_token)

View File

@ -1,95 +0,0 @@
"""Gitea client for creating issues when questions need human review."""
import httpx
from typing import Optional
from .config import settings
class GiteaClient:
"""Client for Gitea API interactions."""
def __init__(self):
"""Initialize Gitea client with credentials."""
self.token = settings.gitea_token
self.owner = settings.gitea_owner
self.repo = settings.gitea_repo
self.base_url = settings.gitea_base_url.rstrip("/")
self.headers = {
"Authorization": f"token {self.token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
async def create_issue(
self,
title: str,
body: str,
labels: Optional[list[str]] = None,
assignee: Optional[str] = None,
) -> dict:
"""Create a new issue in the configured repository."""
url = f"{self.base_url}/repos/{self.owner}/{self.repo}/issues"
payload: dict = {"title": title, "body": body}
if labels:
payload["labels"] = labels
if assignee:
payload["assignee"] = assignee
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(url, headers=self.headers, json=payload)
if response.status_code not in (200, 201):
error_detail = response.text
raise RuntimeError(
f"Gitea API error creating issue: {response.status_code} - {error_detail}"
)
return response.json()
async def create_unanswered_issue(
self,
question: str,
user_id: str,
channel_id: str,
attempted_rules: list[str],
conversation_id: str,
) -> str:
"""Create an issue for an unanswered question needing human review."""
title = f"🤔 Unanswered rules question: {question[:80]}{'...' if len(question) > 80 else ''}"
body = f"""## Unanswered Question
**User:** {user_id}
**Channel:** {channel_id}
**Conversation ID:** {conversation_id}
**Question:**
{question}
**Searched Rules:**
{', '.join(attempted_rules) if attempted_rules else 'None'}
**Additional Context:**
This question was asked in Discord and the bot could not provide a confident answer. The rules either don't cover this question or the information was ambiguous.
---
*This issue was automatically created by the Strat-Chatbot.*"""
labels = ["rules-gap", "ai-generated", "needs-review"]
issue = await self.create_issue(title=title, body=body, labels=labels)
return issue.get("html_url", "")
def get_gitea_client() -> Optional[GiteaClient]:
"""Factory to get Gitea client if token is configured."""
if settings.gitea_token:
return GiteaClient()
return None

View File

@ -1,179 +0,0 @@
"""OpenRouter LLM integration for answering rules questions."""
from typing import Optional
import json
import httpx
from .config import settings
from .models import RuleSearchResult, ChatResponse
SYSTEM_PROMPT = """You are a helpful assistant for a Strat-O-Matic baseball league.
Your job is to answer questions about league rules and procedures using the provided rule excerpts.
CRITICAL RULES:
1. ONLY use information from the provided rules. If the rules don't contain the answer, say so clearly.
2. ALWAYS cite rule IDs when referencing a rule (e.g., "Rule 5.2.1(b) states that...")
3. If multiple rules are relevant, cite all of them.
4. If you're uncertain or the rules are ambiguous, say so and suggest asking a league administrator.
5. Keep responses concise but complete. Use examples when helpful from the rules.
6. Do NOT make up rules or infer beyond what's explicitly stated.
When answering:
- Start with a direct answer to the question
- Support with rule citations
- Include relevant details from the rules
- If no relevant rules found, explicitly state: "I don't have a rule that addresses this question."
Response format (JSON):
{
"answer": "Your response text",
"cited_rules": ["rule_id_1", "rule_id_2"],
"confidence": 0.0-1.0,
"needs_human": boolean
}
Higher confidence (0.8-1.0) when rules clearly answer the question.
Lower confidence (0.3-0.7) when rules partially address the question or are ambiguous.
Very low confidence (0.0-0.2) when rules don't address the question at all.
"""
class OpenRouterClient:
"""Client for OpenRouter API."""
def __init__(self):
"""Initialize the client."""
self.api_key = settings.openrouter_api_key
if not self.api_key:
raise ValueError("OPENROUTER_API_KEY is required")
self.model = settings.openrouter_model
self.base_url = "https://openrouter.ai/api/v1/chat/completions"
async def generate_response(
self,
question: str,
rules: list[RuleSearchResult],
conversation_history: Optional[list[dict]] = None,
) -> ChatResponse:
"""Generate a response using the LLM with retrieved rules as context."""
# Build context from rules
rules_context = "\n\n".join(
[f"Rule {r.rule_id}: {r.title}\n{r.content}" for r in rules]
)
if rules:
context_msg = (
f"Here are the relevant rules for the question:\n\n{rules_context}"
)
else:
context_msg = "No relevant rules were found in the knowledge base."
# Build conversation history
messages = [{"role": "system", "content": SYSTEM_PROMPT}]
if conversation_history:
# Add last few turns of conversation (limit to avoid token overflow)
messages.extend(
conversation_history[-6:]
) # Last 3 exchanges (user+assistant)
# Add current question with context
user_message = f"{context_msg}\n\nUser question: {question}\n\nAnswer the question based on the rules provided."
messages.append({"role": "user", "content": user_message})
# Call OpenRouter API
async with httpx.AsyncClient(timeout=120.0) as client:
response = await client.post(
self.base_url,
headers={
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
},
json={
"model": self.model,
"messages": messages,
"temperature": 0.3,
"max_tokens": 1000,
"top_p": 0.9,
},
)
if response.status_code != 200:
error_detail = response.text
raise RuntimeError(
f"OpenRouter API error: {response.status_code} - {error_detail}"
)
result = response.json()
content = result["choices"][0]["message"]["content"]
# Parse the JSON response
try:
# Extract JSON from response (LLM might add markdown formatting)
if "```json" in content:
json_str = content.split("```json")[1].split("```")[0].strip()
else:
json_str = content.strip()
parsed = json.loads(json_str)
cited_rules = parsed.get("cited_rules", [])
if not cited_rules and rules:
# Fallback: extract rule IDs from the text if not properly returned
import re
rule_ids = re.findall(
r"Rule\s+([\d\.\(\)a-b]+)", parsed.get("answer", "")
)
cited_rules = list(set(rule_ids))
return ChatResponse(
response=parsed["answer"],
conversation_id="", # Will be set by caller
message_id="", # Will be set by caller
cited_rules=cited_rules,
confidence=float(parsed.get("confidence", 0.5)),
needs_human=bool(parsed.get("needs_human", False)),
)
except (json.JSONDecodeError, KeyError) as e:
# If parsing fails, return what we can extract
return ChatResponse(
response=content,
conversation_id="",
message_id="",
cited_rules=[],
confidence=0.5,
needs_human=False,
)
class MockLLMClient:
"""Mock LLM client for testing without API calls."""
async def generate_response(
self,
question: str,
rules: list[RuleSearchResult],
conversation_history: Optional[list[dict]] = None,
) -> ChatResponse:
"""Return a mock response."""
if rules:
rule_list = ", ".join([r.rule_id for r in rules])
answer = f"Based on rule(s) {rule_list}, here's what you need to know..."
else:
answer = "I don't have a rule that addresses this question. You should ask a league administrator."
return ChatResponse(
response=answer,
conversation_id="",
message_id="",
cited_rules=[r.rule_id for r in rules],
confidence=1.0 if rules else 0.0,
needs_human=not rules,
)
def get_llm_client(use_mock: bool = False):
"""Factory to get the appropriate LLM client."""
if use_mock or not settings.openrouter_api_key:
return MockLLMClient()
return OpenRouterClient()

View File

@ -1,198 +0,0 @@
"""FastAPI application for Strat-O-Matic rules chatbot."""
from contextlib import asynccontextmanager
from typing import Optional
import uuid
from fastapi import FastAPI, HTTPException, Depends
import uvicorn
import sqlalchemy as sa
from .config import settings
from .models import ChatRequest, ChatResponse
from .vector_store import VectorStore
from .database import ConversationManager, get_conversation_manager
from .llm import get_llm_client
from .gitea import GiteaClient
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Manage application lifespan - startup and shutdown."""
# Startup
print("Initializing Strat-Chatbot...")
# Initialize vector store
chroma_dir = settings.data_dir / "chroma"
vector_store = VectorStore(chroma_dir, settings.embedding_model)
print(f"Vector store ready at {chroma_dir} ({vector_store.count()} rules loaded)")
# Initialize database
db_manager = ConversationManager(settings.db_url)
await db_manager.init_db()
print("Database initialized")
# Initialize LLM client
llm_client = get_llm_client(use_mock=not settings.openrouter_api_key)
print(f"LLM client ready (model: {settings.openrouter_model})")
# Initialize Gitea client
gitea_client = GiteaClient() if settings.gitea_token else None
# Store in app state
app.state.vector_store = vector_store
app.state.db_manager = db_manager
app.state.llm_client = llm_client
app.state.gitea_client = gitea_client
print("Strat-Chatbot ready!")
yield
# Shutdown
print("Shutting down...")
app = FastAPI(
title="Strat-Chatbot",
description="Strat-O-Matic rules Q&A API",
version="0.1.0",
lifespan=lifespan,
)
@app.get("/health")
async def health_check():
"""Health check endpoint."""
vector_store: VectorStore = app.state.vector_store
stats = vector_store.get_stats()
return {
"status": "healthy",
"rules_count": stats["total_rules"],
"sections": stats["sections"],
}
@app.post("/chat", response_model=ChatResponse)
async def chat(
request: ChatRequest,
db_manager: ConversationManager = Depends(get_conversation_manager),
):
"""Handle chat requests from Discord."""
vector_store: VectorStore = app.state.vector_store
llm_client = app.state.llm_client
gitea_client = app.state.gitea_client
# Validate API key if using real LLM
if not settings.openrouter_api_key:
return ChatResponse(
response="⚠️ OpenRouter API key not configured. Set OPENROUTER_API_KEY environment variable.",
conversation_id=request.conversation_id or str(uuid.uuid4()),
message_id=str(uuid.uuid4()),
cited_rules=[],
confidence=0.0,
needs_human=True,
)
# Get or create conversation
conversation_id = await db_manager.get_or_create_conversation(
user_id=request.user_id,
channel_id=request.channel_id,
conversation_id=request.conversation_id,
)
# Save user message
user_message_id = await db_manager.add_message(
conversation_id=conversation_id,
content=request.message,
is_user=True,
parent_id=request.parent_message_id,
)
try:
# Search for relevant rules
search_results = vector_store.search(
query=request.message, top_k=settings.top_k_rules
)
# Get conversation history for context
history = await db_manager.get_conversation_history(conversation_id, limit=10)
# Generate response from LLM
response = await llm_client.generate_response(
question=request.message, rules=search_results, conversation_history=history
)
# Save assistant message
assistant_message_id = await db_manager.add_message(
conversation_id=conversation_id,
content=response.response,
is_user=False,
parent_id=user_message_id,
)
# If needs human or confidence is low, create Gitea issue
if gitea_client and (response.needs_human or response.confidence < 0.4):
try:
issue_url = await gitea_client.create_unanswered_issue(
question=request.message,
user_id=request.user_id,
channel_id=request.channel_id,
attempted_rules=[r.rule_id for r in search_results],
conversation_id=conversation_id,
)
print(f"Created Gitea issue: {issue_url}")
except Exception as e:
print(f"Failed to create Gitea issue: {e}")
# Build final response
return ChatResponse(
response=response.response,
conversation_id=conversation_id,
message_id=assistant_message_id,
parent_message_id=user_message_id,
cited_rules=response.cited_rules,
confidence=response.confidence,
needs_human=response.needs_human,
)
except Exception as e:
print(f"Error processing chat request: {e}")
raise HTTPException(status_code=500, detail=str(e))
@app.get("/stats")
async def stats():
"""Get statistics about the knowledge base and system."""
vector_store: VectorStore = app.state.vector_store
db_manager: ConversationManager = app.state.db_manager
# Get vector store stats
vs_stats = vector_store.get_stats()
# Get database stats
async with db_manager.async_session() as session:
conv_count = await session.execute(
sa.text("SELECT COUNT(*) FROM conversations")
)
msg_count = await session.execute(sa.text("SELECT COUNT(*) FROM messages"))
total_conversations = conv_count.scalar() or 0
total_messages = msg_count.scalar() or 0
return {
"knowledge_base": vs_stats,
"conversations": {
"total": total_conversations,
"total_messages": total_messages,
},
"config": {
"openrouter_model": settings.openrouter_model,
"top_k_rules": settings.top_k_rules,
"embedding_model": settings.embedding_model,
},
}
if __name__ == "__main__":
uvicorn.run("app.main:app", host="0.0.0.0", port=8000, reload=True)

View File

@ -1,100 +0,0 @@
"""Data models for rules and conversations."""
from pydantic import BaseModel, Field
from typing import Optional
from datetime import datetime
class RuleMetadata(BaseModel):
"""Frontmatter metadata for a rule document."""
rule_id: str = Field(..., description="Unique rule identifier, e.g. '5.2.1(b)'")
title: str = Field(..., description="Rule title")
section: str = Field(..., description="Section/category name")
parent_rule: Optional[str] = Field(
None, description="Parent rule ID for hierarchical rules"
)
last_updated: str = Field(
default_factory=lambda: datetime.now().strftime("%Y-%m-%d"),
description="Last update date",
)
page_ref: Optional[str] = Field(
None, description="Reference to page number in rulebook"
)
class RuleDocument(BaseModel):
"""Complete rule document with metadata and content."""
metadata: RuleMetadata
content: str = Field(..., description="Rule text and examples")
source_file: str = Field(..., description="Source file path")
embedding: Optional[list[float]] = None
def to_chroma_metadata(self) -> dict:
"""Convert to ChromaDB metadata format."""
return {
"rule_id": self.metadata.rule_id,
"title": self.metadata.title,
"section": self.metadata.section,
"parent_rule": self.metadata.parent_rule or "",
"page_ref": self.metadata.page_ref or "",
"last_updated": self.metadata.last_updated,
"source_file": self.source_file,
}
class Conversation(BaseModel):
"""Conversation session."""
id: str
user_id: str # Discord user ID
channel_id: str # Discord channel ID
created_at: datetime = Field(default_factory=datetime.now)
last_activity: datetime = Field(default_factory=datetime.now)
class Message(BaseModel):
"""Individual message in a conversation."""
id: str
conversation_id: str
content: str
is_user: bool
parent_id: Optional[str] = None
created_at: datetime = Field(default_factory=datetime.now)
class ChatRequest(BaseModel):
"""Incoming chat request from Discord."""
message: str
conversation_id: Optional[str] = None
parent_message_id: Optional[str] = None
user_id: str
channel_id: str
class ChatResponse(BaseModel):
"""Response to chat request."""
response: str
conversation_id: str
message_id: str
parent_message_id: Optional[str] = None
cited_rules: list[str] = Field(default_factory=list)
confidence: float = Field(..., ge=0.0, le=1.0)
needs_human: bool = Field(
default=False,
description="Whether the question needs human review (unanswered)",
)
class RuleSearchResult(BaseModel):
"""Result from vector search."""
rule_id: str
title: str
content: str
section: str
similarity: float = Field(..., ge=0.0, le=1.0)

View File

@ -1,168 +0,0 @@
"""ChromaDB vector store for rule embeddings."""
from pathlib import Path
from typing import Optional
import chromadb
from chromadb.config import Settings as ChromaSettings
from sentence_transformers import SentenceTransformer
import numpy as np
from .config import settings
from .models import RuleDocument, RuleSearchResult
class VectorStore:
"""Wrapper around ChromaDB for rule retrieval."""
def __init__(self, persist_dir: Path, embedding_model: str):
"""Initialize vector store with embedding model."""
self.persist_dir = Path(persist_dir)
self.persist_dir.mkdir(parents=True, exist_ok=True)
chroma_settings = ChromaSettings(
anonymized_telemetry=False, is_persist_directory_actually_writable=True
)
self.client = chromadb.PersistentClient(
path=str(self.persist_dir), settings=chroma_settings
)
self.embedding_model = SentenceTransformer(embedding_model)
def get_collection(self):
"""Get or create the rules collection."""
return self.client.get_or_create_collection(
name="rules", metadata={"hnsw:space": "cosine"}
)
def add_document(self, doc: RuleDocument) -> None:
"""Add a single rule document to the vector store."""
embedding = self.embedding_model.encode(doc.content).tolist()
collection = self.get_collection()
collection.add(
ids=[doc.metadata.rule_id],
embeddings=[embedding],
documents=[doc.content],
metadatas=[doc.to_chroma_metadata()],
)
def add_documents(self, docs: list[RuleDocument]) -> None:
"""Add multiple documents in batch."""
if not docs:
return
ids = [doc.metadata.rule_id for doc in docs]
contents = [doc.content for doc in docs]
embeddings = self.embedding_model.encode(contents).tolist()
metadatas = [doc.to_chroma_metadata() for doc in docs]
collection = self.get_collection()
collection.add(
ids=ids, embeddings=embeddings, documents=contents, metadatas=metadatas
)
def search(
self, query: str, top_k: int = 10, section_filter: Optional[str] = None
) -> list[RuleSearchResult]:
"""Search for relevant rules using semantic similarity."""
query_embedding = self.embedding_model.encode(query).tolist()
collection = self.get_collection()
where = None
if section_filter:
where = {"section": section_filter}
results = collection.query(
query_embeddings=[query_embedding],
n_results=top_k,
where=where,
include=["documents", "metadatas", "distances"],
)
search_results = []
if results and results["documents"] and results["documents"][0]:
for i in range(len(results["documents"][0])):
metadata = results["metadatas"][0][i]
distance = results["distances"][0][i]
similarity = max(
0.0, min(1.0, 1 - distance)
) # Clamp to [0, 1]: cosine distance ranges 02
search_results.append(
RuleSearchResult(
rule_id=metadata["rule_id"],
title=metadata["title"],
content=results["documents"][0][i],
section=metadata["section"],
similarity=similarity,
)
)
return search_results
def delete_rule(self, rule_id: str) -> None:
"""Remove a rule by its ID."""
collection = self.get_collection()
collection.delete(ids=[rule_id])
def clear_all(self) -> None:
"""Delete all rules from the collection."""
self.client.delete_collection("rules")
self.get_collection() # Recreate empty collection
def get_rule(self, rule_id: str) -> Optional[RuleSearchResult]:
"""Retrieve a specific rule by ID."""
collection = self.get_collection()
result = collection.get(ids=[rule_id], include=["documents", "metadatas"])
if result and result["documents"] and result["documents"][0]:
metadata = result["metadatas"][0][0]
return RuleSearchResult(
rule_id=metadata["rule_id"],
title=metadata["title"],
content=result["documents"][0][0],
section=metadata["section"],
similarity=1.0,
)
return None
def list_all_rules(self) -> list[RuleSearchResult]:
"""Return all rules in the store."""
collection = self.get_collection()
result = collection.get(include=["documents", "metadatas"])
all_rules = []
if result and result["documents"]:
for i in range(len(result["documents"])):
metadata = result["metadatas"][i]
all_rules.append(
RuleSearchResult(
rule_id=metadata["rule_id"],
title=metadata["title"],
content=result["documents"][i],
section=metadata["section"],
similarity=1.0,
)
)
return all_rules
def count(self) -> int:
"""Return the number of rules in the store."""
collection = self.get_collection()
return collection.count()
def get_stats(self) -> dict:
"""Get statistics about the vector store."""
collection = self.get_collection()
all_rules = self.list_all_rules()
sections = {}
for rule in all_rules:
sections[rule.section] = sections.get(rule.section, 0) + 1
return {
"total_rules": len(all_rules),
"sections": sections,
"persist_directory": str(self.persist_dir),
}

View File

@ -20,7 +20,6 @@ services:
dockerfile: Dockerfile dockerfile: Dockerfile
volumes: volumes:
- ./data:/app/data - ./data:/app/data
- ./app:/app/app
ports: ports:
- "127.0.0.1:8000:8000" - "127.0.0.1:8000:8000"
environment: environment:
@ -40,7 +39,7 @@ services:
depends_on: depends_on:
chroma: chroma:
condition: service_healthy condition: service_healthy
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 command: uvicorn main:app --host 0.0.0.0 --port 8000
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"] test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 15s interval: 15s
@ -54,23 +53,24 @@ services:
dockerfile: Dockerfile dockerfile: Dockerfile
volumes: volumes:
- ./data:/app/data - ./data:/app/data
- ./app:/app/app
environment: environment:
# The bot now calls ChatService directly — needs its own adapter config
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY:-} - OPENROUTER_API_KEY=${OPENROUTER_API_KEY:-}
- OPENROUTER_MODEL=${OPENROUTER_MODEL:-stepfun/step-3.5-flash:free} - OPENROUTER_MODEL=${OPENROUTER_MODEL:-stepfun/step-3.5-flash:free}
- DISCORD_BOT_TOKEN=${DISCORD_BOT_TOKEN:-} - DISCORD_BOT_TOKEN=${DISCORD_BOT_TOKEN:-}
- DISCORD_GUILD_ID=${DISCORD_GUILD_ID:-} - DISCORD_GUILD_ID=${DISCORD_GUILD_ID:-}
- API_BASE_URL=http://api:8000 - GITEA_TOKEN=${GITEA_TOKEN:-}
- API_SECRET=${API_SECRET:-} - GITEA_OWNER=${GITEA_OWNER:-cal}
- GITEA_REPO=${GITEA_REPO:-strat-chatbot}
- DATA_DIR=/app/data
- RULES_DIR=/app/data/rules
- CHROMA_DIR=/app/data/chroma
- DB_URL=sqlite+aiosqlite:///./data/conversations.db
- CONVERSATION_TTL=1800
- TOP_K_RULES=10
- EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2
depends_on: depends_on:
api: chroma:
condition: service_healthy condition: service_healthy
# Override the default command to run the Discord bot command: python -m run_discord
command: >
sh -c "
echo 'Waiting for API to be ready...' &&
while ! curl -s http://api:8000/health > /dev/null; do sleep 2; done &&
echo 'API ready, starting Discord bot...' &&
python -m app.discord_bot
"
restart: unless-stopped restart: unless-stopped

View File

@ -9,7 +9,6 @@ dependencies = [
"discord.py>=2.5.0", "discord.py>=2.5.0",
"chromadb>=0.5.0", "chromadb>=0.5.0",
"sentence-transformers>=3.0.0", "sentence-transformers>=3.0.0",
"openai>=1.0.0",
"python-dotenv>=1.0.0", "python-dotenv>=1.0.0",
"sqlalchemy>=2.0.0", "sqlalchemy>=2.0.0",
"aiosqlite>=0.19.0", "aiosqlite>=0.19.0",
@ -31,7 +30,7 @@ requires = ["hatchling"]
build-backend = "hatchling.build" build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel] [tool.hatch.build.targets.wheel]
packages = ["domain", "adapters", "config", "app"] packages = ["domain", "adapters", "config"]
[tool.black] [tool.black]
line-length = 88 line-length = 88

86
run_discord.py Normal file
View File

@ -0,0 +1,86 @@
"""Entry point for running the Discord bot with direct ChatService injection.
This script constructs the same adapter stack as the FastAPI app but runs
the Discord bot instead of a web server. The bot calls ChatService directly
no HTTP roundtrip to the API.
"""
import asyncio
import logging
from adapters.outbound.chroma_rules import ChromaRuleRepository
from adapters.outbound.gitea_issues import GiteaIssueTracker
from adapters.outbound.openrouter import OpenRouterLLM
from adapters.outbound.sqlite_convos import SQLiteConversationStore
from adapters.inbound.discord_bot import run_bot
from config.settings import Settings
from domain.services import ChatService
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
async def _init_and_run() -> None:
settings = Settings()
if not settings.discord_bot_token:
raise ValueError("DISCORD_BOT_TOKEN is required")
logger.info("Initialising adapters for Discord bot...")
# Vector store
chroma_repo = ChromaRuleRepository(
persist_dir=settings.chroma_dir,
embedding_model=settings.embedding_model,
)
logger.info("ChromaDB ready (%d rules)", chroma_repo.count())
# Conversation store
conv_store = SQLiteConversationStore(db_url=settings.db_url)
await conv_store.init_db()
logger.info("SQLite conversation store ready")
# LLM
llm = None
if settings.openrouter_api_key:
llm = OpenRouterLLM(
api_key=settings.openrouter_api_key,
model=settings.openrouter_model,
)
logger.info("OpenRouter LLM ready (model: %s)", settings.openrouter_model)
else:
logger.warning("OPENROUTER_API_KEY not set — LLM disabled")
# Gitea
gitea = None
if settings.gitea_token:
gitea = GiteaIssueTracker(
token=settings.gitea_token,
owner=settings.gitea_owner,
repo=settings.gitea_repo,
base_url=settings.gitea_base_url,
)
# Service
service = ChatService(
rules=chroma_repo,
llm=llm, # type: ignore[arg-type]
conversations=conv_store,
issues=gitea,
top_k_rules=settings.top_k_rules,
)
logger.info("Starting Discord bot...")
run_bot(
token=settings.discord_bot_token,
chat_service=service,
guild_id=settings.discord_guild_id,
)
def main() -> None:
asyncio.run(_init_and_run())
if __name__ == "__main__":
main()

View File

@ -0,0 +1,168 @@
"""Tests for the Discord inbound adapter.
Discord.py makes it hard to test event handlers directly (they require a
running gateway connection). Instead, we test the *pure logic* that the
adapter extracts into standalone functions / methods:
- build_answer_embed: constructs the Discord embed from a ChatResult
- build_error_embed: constructs a safe error embed (no leaked details)
- parse_conversation_id: extracts conversation UUID from footer text
- truncate_response: handles Discord's 4000-char embed limit
The bot class itself (StratChatbot) is tested for construction, dependency
injection, and configuration not for full gateway event handling.
"""
import pytest
from domain.models import ChatResult
from adapters.inbound.discord_bot import (
build_answer_embed,
build_error_embed,
parse_conversation_id,
FOOTER_PREFIX,
)
# ---------------------------------------------------------------------------
# Fixtures
# ---------------------------------------------------------------------------
def _make_result(**overrides) -> ChatResult:
"""Create a ChatResult with sensible defaults, overridable per-test."""
defaults = {
"response": "Based on Rule 5.2.1(b), runners can steal.",
"conversation_id": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
"message_id": "msg-123",
"parent_message_id": "msg-000",
"cited_rules": ["5.2.1(b)"],
"confidence": 0.9,
"needs_human": False,
}
defaults.update(overrides)
return ChatResult(**defaults)
# ---------------------------------------------------------------------------
# build_answer_embed
# ---------------------------------------------------------------------------
class TestBuildAnswerEmbed:
"""build_answer_embed turns a ChatResult into a Discord Embed."""
def test_description_contains_response(self):
result = _make_result()
embed = build_answer_embed(result, title="Rules Answer")
assert result.response in embed.description
def test_footer_contains_full_conversation_id(self):
result = _make_result()
embed = build_answer_embed(result, title="Rules Answer")
assert result.conversation_id in embed.footer.text
def test_footer_starts_with_prefix(self):
result = _make_result()
embed = build_answer_embed(result, title="Rules Answer")
assert embed.footer.text.startswith(FOOTER_PREFIX)
def test_cited_rules_field_present(self):
result = _make_result(cited_rules=["5.2.1(b)", "3.1"])
embed = build_answer_embed(result, title="Rules Answer")
field_names = [f.name for f in embed.fields]
assert any("Cited" in name for name in field_names)
# Both rule IDs should be in the field value
rules_field = [f for f in embed.fields if "Cited" in f.name][0]
assert "5.2.1(b)" in rules_field.value
assert "3.1" in rules_field.value
def test_no_cited_rules_field_when_empty(self):
result = _make_result(cited_rules=[])
embed = build_answer_embed(result, title="Rules Answer")
field_names = [f.name for f in embed.fields]
assert not any("Cited" in name for name in field_names)
def test_low_confidence_adds_warning_field(self):
result = _make_result(confidence=0.2)
embed = build_answer_embed(result, title="Rules Answer")
field_names = [f.name for f in embed.fields]
assert any("Confidence" in name for name in field_names)
def test_high_confidence_no_warning_field(self):
result = _make_result(confidence=0.9)
embed = build_answer_embed(result, title="Rules Answer")
field_names = [f.name for f in embed.fields]
assert not any("Confidence" in name for name in field_names)
def test_response_truncated_at_4000_chars(self):
long_response = "x" * 5000
result = _make_result(response=long_response)
embed = build_answer_embed(result, title="Rules Answer")
assert len(embed.description) <= 4000
def test_truncation_notice_appended(self):
long_response = "x" * 5000
result = _make_result(response=long_response)
embed = build_answer_embed(result, title="Rules Answer")
assert "truncated" in embed.description.lower()
def test_custom_title(self):
result = _make_result()
embed = build_answer_embed(result, title="Follow-up Answer")
assert embed.title == "Follow-up Answer"
# ---------------------------------------------------------------------------
# build_error_embed
# ---------------------------------------------------------------------------
class TestBuildErrorEmbed:
"""build_error_embed creates a safe error embed with no leaked details."""
def test_does_not_contain_exception_text(self):
error = RuntimeError("API key abc123 is invalid for https://internal.host")
embed = build_error_embed(error)
assert "abc123" not in embed.description
assert "internal.host" not in embed.description
def test_has_generic_message(self):
embed = build_error_embed(RuntimeError("anything"))
assert (
"try again" in embed.description.lower()
or "went wrong" in embed.description.lower()
)
def test_title_indicates_error(self):
embed = build_error_embed(ValueError("x"))
assert "Error" in embed.title or "error" in embed.title
# ---------------------------------------------------------------------------
# parse_conversation_id
# ---------------------------------------------------------------------------
class TestParseConversationId:
"""parse_conversation_id extracts the full UUID from embed footer text."""
def test_parses_valid_footer(self):
footer = "conv:aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee | Reply to ask a follow-up"
assert parse_conversation_id(footer) == "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"
def test_returns_none_for_missing_prefix(self):
assert parse_conversation_id("no prefix here") is None
def test_returns_none_for_empty_string(self):
assert parse_conversation_id("") is None
def test_returns_none_for_none_input(self):
assert parse_conversation_id(None) is None
def test_returns_none_for_malformed_footer(self):
assert parse_conversation_id("conv:") is None
def test_handles_no_pipe_separator(self):
footer = "conv:some-uuid-value"
result = parse_conversation_id(footer)
assert result == "some-uuid-value"

View File

@ -1,63 +0,0 @@
"""Basic test to verify the vector store and ingestion."""
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent / "app"))
from app.config import settings
from app.vector_store import VectorStore
from app.models import RuleDocument, RuleMetadata
def test_ingest_example_rule():
"""Test ingesting the example rule and searching."""
# Override settings for test
test_data_dir = Path(__file__).parent.parent / "data"
test_chroma_dir = test_data_dir / "chroma_test"
test_rules_dir = test_data_dir / "rules"
vs = VectorStore(test_chroma_dir, settings.embedding_model)
vs.clear_all()
# Load example rule
example_rule_path = test_rules_dir / "example_rule.md"
if not example_rule_path.exists():
print(f"Example rule not found at {example_rule_path}, skipping test")
return
content = example_rule_path.read_text(encoding="utf-8")
import re
import yaml
pattern = r"^---\s*\n(.*?)\n---\s*\n(.*)$"
match = re.match(pattern, content, re.DOTALL)
if match:
metadata_dict = yaml.safe_load(match.group(1))
body = match.group(2).strip()
metadata = RuleMetadata(**metadata_dict)
doc = RuleDocument(
metadata=metadata, content=body, source_file=str(example_rule_path)
)
vs.add_document(doc)
# Verify count
assert vs.count() == 1, f"Expected 1 rule, got {vs.count()}"
# Search for relevant content
results = vs.search("runner steal base", top_k=5)
assert len(results) > 0, "Expected at least one search result"
assert (
results[0].rule_id == "5.2.1(b)"
), f"Expected rule 5.2.1(b), got {results[0].rule_id}"
print("✓ Test passed: Ingestion and search work correctly")
print(f" Found rule: {results[0].title}")
print(f" Similarity: {results[0].similarity:.2%}")
# Cleanup
vs.clear_all()
if __name__ == "__main__":
test_ingest_example_rule()

124
uv.lock generated
View File

@ -540,15 +540,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f7/a7/17208c3b3f92319e7fad259f1c6d5a5baf8fd0654c54846ced329f83c3eb/discord_py-2.7.1-py3-none-any.whl", hash = "sha256:849dca2c63b171146f3a7f3f8acc04248098e9e6203412ce3cf2745f284f7439", size = 1227550, upload-time = "2026-03-03T18:40:44.492Z" }, { url = "https://files.pythonhosted.org/packages/f7/a7/17208c3b3f92319e7fad259f1c6d5a5baf8fd0654c54846ced329f83c3eb/discord_py-2.7.1-py3-none-any.whl", hash = "sha256:849dca2c63b171146f3a7f3f8acc04248098e9e6203412ce3cf2745f284f7439", size = 1227550, upload-time = "2026-03-03T18:40:44.492Z" },
] ]
[[package]]
name = "distro"
version = "1.9.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" },
]
[[package]] [[package]]
name = "durationpy" name = "durationpy"
version = "0.10" version = "0.10"
@ -996,91 +987,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
] ]
[[package]]
name = "jiter"
version = "0.13.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/71/29/499f8c9eaa8a16751b1c0e45e6f5f1761d180da873d417996cc7bddc8eef/jiter-0.13.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ea026e70a9a28ebbdddcbcf0f1323128a8db66898a06eaad3a4e62d2f554d096", size = 311157, upload-time = "2026-02-02T12:35:37.758Z" },
{ url = "https://files.pythonhosted.org/packages/50/f6/566364c777d2ab450b92100bea11333c64c38d32caf8dc378b48e5b20c46/jiter-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66aa3e663840152d18cc8ff1e4faad3dd181373491b9cfdc6004b92198d67911", size = 319729, upload-time = "2026-02-02T12:35:39.246Z" },
{ url = "https://files.pythonhosted.org/packages/73/dd/560f13ec5e4f116d8ad2658781646cca91b617ae3b8758d4a5076b278f70/jiter-0.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3524798e70655ff19aec58c7d05adb1f074fecff62da857ea9be2b908b6d701", size = 354766, upload-time = "2026-02-02T12:35:40.662Z" },
{ url = "https://files.pythonhosted.org/packages/7c/0d/061faffcfe94608cbc28a0d42a77a74222bdf5055ccdbe5fd2292b94f510/jiter-0.13.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec7e287d7fbd02cb6e22f9a00dd9c9cd504c40a61f2c61e7e1f9690a82726b4c", size = 362587, upload-time = "2026-02-02T12:35:42.025Z" },
{ url = "https://files.pythonhosted.org/packages/92/c9/c66a7864982fd38a9773ec6e932e0398d1262677b8c60faecd02ffb67bf3/jiter-0.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47455245307e4debf2ce6c6e65a717550a0244231240dcf3b8f7d64e4c2f22f4", size = 487537, upload-time = "2026-02-02T12:35:43.459Z" },
{ url = "https://files.pythonhosted.org/packages/6c/86/84eb4352cd3668f16d1a88929b5888a3fe0418ea8c1dfc2ad4e7bf6e069a/jiter-0.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee9da221dca6e0429c2704c1b3655fe7b025204a71d4d9b73390c759d776d165", size = 373717, upload-time = "2026-02-02T12:35:44.928Z" },
{ url = "https://files.pythonhosted.org/packages/6e/09/9fe4c159358176f82d4390407a03f506a8659ed13ca3ac93a843402acecf/jiter-0.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24ab43126d5e05f3d53a36a8e11eb2f23304c6c1117844aaaf9a0aa5e40b5018", size = 362683, upload-time = "2026-02-02T12:35:46.636Z" },
{ url = "https://files.pythonhosted.org/packages/c9/5e/85f3ab9caca0c1d0897937d378b4a515cae9e119730563572361ea0c48ae/jiter-0.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9da38b4fedde4fb528c740c2564628fbab737166a0e73d6d46cb4bb5463ff411", size = 392345, upload-time = "2026-02-02T12:35:48.088Z" },
{ url = "https://files.pythonhosted.org/packages/12/4c/05b8629ad546191939e6f0c2f17e29f542a398f4a52fb987bc70b6d1eb8b/jiter-0.13.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b34c519e17658ed88d5047999a93547f8889f3c1824120c26ad6be5f27b6cf5", size = 517775, upload-time = "2026-02-02T12:35:49.482Z" },
{ url = "https://files.pythonhosted.org/packages/4d/88/367ea2eb6bc582c7052e4baf5ddf57ebe5ab924a88e0e09830dfb585c02d/jiter-0.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2a6394e6af690d462310a86b53c47ad75ac8c21dc79f120714ea449979cb1d3", size = 551325, upload-time = "2026-02-02T12:35:51.104Z" },
{ url = "https://files.pythonhosted.org/packages/f3/12/fa377ffb94a2f28c41afaed093e0d70cfe512035d5ecb0cad0ae4792d35e/jiter-0.13.0-cp311-cp311-win32.whl", hash = "sha256:0f0c065695f616a27c920a56ad0d4fc46415ef8b806bf8fc1cacf25002bd24e1", size = 204709, upload-time = "2026-02-02T12:35:52.467Z" },
{ url = "https://files.pythonhosted.org/packages/cb/16/8e8203ce92f844dfcd3d9d6a5a7322c77077248dbb12da52d23193a839cd/jiter-0.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0733312953b909688ae3c2d58d043aa040f9f1a6a75693defed7bc2cc4bf2654", size = 204560, upload-time = "2026-02-02T12:35:53.925Z" },
{ url = "https://files.pythonhosted.org/packages/44/26/97cc40663deb17b9e13c3a5cf29251788c271b18ee4d262c8f94798b8336/jiter-0.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:5d9b34ad56761b3bf0fbe8f7e55468704107608512350962d3317ffd7a4382d5", size = 189608, upload-time = "2026-02-02T12:35:55.304Z" },
{ url = "https://files.pythonhosted.org/packages/2e/30/7687e4f87086829955013ca12a9233523349767f69653ebc27036313def9/jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663", size = 307958, upload-time = "2026-02-02T12:35:57.165Z" },
{ url = "https://files.pythonhosted.org/packages/c3/27/e57f9a783246ed95481e6749cc5002a8a767a73177a83c63ea71f0528b90/jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505", size = 318597, upload-time = "2026-02-02T12:35:58.591Z" },
{ url = "https://files.pythonhosted.org/packages/cf/52/e5719a60ac5d4d7c5995461a94ad5ef962a37c8bf5b088390e6fad59b2ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152", size = 348821, upload-time = "2026-02-02T12:36:00.093Z" },
{ url = "https://files.pythonhosted.org/packages/61/db/c1efc32b8ba4c740ab3fc2d037d8753f67685f475e26b9d6536a4322bcdd/jiter-0.13.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04670992b576fa65bd056dbac0c39fe8bd67681c380cb2b48efa885711d9d726", size = 364163, upload-time = "2026-02-02T12:36:01.937Z" },
{ url = "https://files.pythonhosted.org/packages/55/8a/fb75556236047c8806995671a18e4a0ad646ed255276f51a20f32dceaeec/jiter-0.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1aff1fbdb803a376d4d22a8f63f8e7ccbce0b4890c26cc7af9e501ab339ef0", size = 483709, upload-time = "2026-02-02T12:36:03.41Z" },
{ url = "https://files.pythonhosted.org/packages/7e/16/43512e6ee863875693a8e6f6d532e19d650779d6ba9a81593ae40a9088ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b3fb8c2053acaef8580809ac1d1f7481a0a0bdc012fd7f5d8b18fb696a5a089", size = 370480, upload-time = "2026-02-02T12:36:04.791Z" },
{ url = "https://files.pythonhosted.org/packages/f8/4c/09b93e30e984a187bc8aaa3510e1ec8dcbdcd71ca05d2f56aac0492453aa/jiter-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdaba7d87e66f26a2c45d8cbadcbfc4bf7884182317907baf39cfe9775bb4d93", size = 360735, upload-time = "2026-02-02T12:36:06.994Z" },
{ url = "https://files.pythonhosted.org/packages/1a/1b/46c5e349019874ec5dfa508c14c37e29864ea108d376ae26d90bee238cd7/jiter-0.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b88d649135aca526da172e48083da915ec086b54e8e73a425ba50999468cc08", size = 391814, upload-time = "2026-02-02T12:36:08.368Z" },
{ url = "https://files.pythonhosted.org/packages/15/9e/26184760e85baee7162ad37b7912797d2077718476bf91517641c92b3639/jiter-0.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e404ea551d35438013c64b4f357b0474c7abf9f781c06d44fcaf7a14c69ff9e2", size = 513990, upload-time = "2026-02-02T12:36:09.993Z" },
{ url = "https://files.pythonhosted.org/packages/e9/34/2c9355247d6debad57a0a15e76ab1566ab799388042743656e566b3b7de1/jiter-0.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f4748aad1b4a93c8bdd70f604d0f748cdc0e8744c5547798acfa52f10e79228", size = 548021, upload-time = "2026-02-02T12:36:11.376Z" },
{ url = "https://files.pythonhosted.org/packages/ac/4a/9f2c23255d04a834398b9c2e0e665382116911dc4d06b795710503cdad25/jiter-0.13.0-cp312-cp312-win32.whl", hash = "sha256:0bf670e3b1445fc4d31612199f1744f67f889ee1bbae703c4b54dc097e5dd394", size = 203024, upload-time = "2026-02-02T12:36:12.682Z" },
{ url = "https://files.pythonhosted.org/packages/09/ee/f0ae675a957ae5a8f160be3e87acea6b11dc7b89f6b7ab057e77b2d2b13a/jiter-0.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:15db60e121e11fe186c0b15236bd5d18381b9ddacdcf4e659feb96fc6c969c92", size = 205424, upload-time = "2026-02-02T12:36:13.93Z" },
{ url = "https://files.pythonhosted.org/packages/1b/02/ae611edf913d3cbf02c97cdb90374af2082c48d7190d74c1111dde08bcdd/jiter-0.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:41f92313d17989102f3cb5dd533a02787cdb99454d494344b0361355da52fcb9", size = 186818, upload-time = "2026-02-02T12:36:15.308Z" },
{ url = "https://files.pythonhosted.org/packages/91/9c/7ee5a6ff4b9991e1a45263bfc46731634c4a2bde27dfda6c8251df2d958c/jiter-0.13.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1f8a55b848cbabf97d861495cd65f1e5c590246fabca8b48e1747c4dfc8f85bf", size = 306897, upload-time = "2026-02-02T12:36:16.748Z" },
{ url = "https://files.pythonhosted.org/packages/7c/02/be5b870d1d2be5dd6a91bdfb90f248fbb7dcbd21338f092c6b89817c3dbf/jiter-0.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f556aa591c00f2c45eb1b89f68f52441a016034d18b65da60e2d2875bbbf344a", size = 317507, upload-time = "2026-02-02T12:36:18.351Z" },
{ url = "https://files.pythonhosted.org/packages/da/92/b25d2ec333615f5f284f3a4024f7ce68cfa0604c322c6808b2344c7f5d2b/jiter-0.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e1d61da332ec412350463891923f960c3073cf1aae93b538f0bb4c8cd46efb", size = 350560, upload-time = "2026-02-02T12:36:19.746Z" },
{ url = "https://files.pythonhosted.org/packages/be/ec/74dcb99fef0aca9fbe56b303bf79f6bd839010cb18ad41000bf6cc71eec0/jiter-0.13.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3097d665a27bc96fd9bbf7f86178037db139f319f785e4757ce7ccbf390db6c2", size = 363232, upload-time = "2026-02-02T12:36:21.243Z" },
{ url = "https://files.pythonhosted.org/packages/1b/37/f17375e0bb2f6a812d4dd92d7616e41917f740f3e71343627da9db2824ce/jiter-0.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d01ecc3a8cbdb6f25a37bd500510550b64ddf9f7d64a107d92f3ccb25035d0f", size = 483727, upload-time = "2026-02-02T12:36:22.688Z" },
{ url = "https://files.pythonhosted.org/packages/77/d2/a71160a5ae1a1e66c1395b37ef77da67513b0adba73b993a27fbe47eb048/jiter-0.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bbc30f5d60a3bdf63ae76beb3f9db280d7f195dfcfa61af792d6ce912d159", size = 370799, upload-time = "2026-02-02T12:36:24.106Z" },
{ url = "https://files.pythonhosted.org/packages/01/99/ed5e478ff0eb4e8aa5fd998f9d69603c9fd3f32de3bd16c2b1194f68361c/jiter-0.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fbafb6e88256f4454de33c1f40203d09fc33ed19162a68b3b257b29ca7f663", size = 359120, upload-time = "2026-02-02T12:36:25.519Z" },
{ url = "https://files.pythonhosted.org/packages/16/be/7ffd08203277a813f732ba897352797fa9493faf8dc7995b31f3d9cb9488/jiter-0.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5467696f6b827f1116556cb0db620440380434591e93ecee7fd14d1a491b6daa", size = 390664, upload-time = "2026-02-02T12:36:26.866Z" },
{ url = "https://files.pythonhosted.org/packages/d1/84/e0787856196d6d346264d6dcccb01f741e5f0bd014c1d9a2ebe149caf4f3/jiter-0.13.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2d08c9475d48b92892583df9da592a0e2ac49bcd41fae1fec4f39ba6cf107820", size = 513543, upload-time = "2026-02-02T12:36:28.217Z" },
{ url = "https://files.pythonhosted.org/packages/65/50/ecbd258181c4313cf79bca6c88fb63207d04d5bf5e4f65174114d072aa55/jiter-0.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:aed40e099404721d7fcaf5b89bd3b4568a4666358bcac7b6b15c09fb6252ab68", size = 547262, upload-time = "2026-02-02T12:36:29.678Z" },
{ url = "https://files.pythonhosted.org/packages/27/da/68f38d12e7111d2016cd198161b36e1f042bd115c169255bcb7ec823a3bf/jiter-0.13.0-cp313-cp313-win32.whl", hash = "sha256:36ebfbcffafb146d0e6ffb3e74d51e03d9c35ce7c625c8066cdbfc7b953bdc72", size = 200630, upload-time = "2026-02-02T12:36:31.808Z" },
{ url = "https://files.pythonhosted.org/packages/25/65/3bd1a972c9a08ecd22eb3b08a95d1941ebe6938aea620c246cf426ae09c2/jiter-0.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:8d76029f077379374cf0dbc78dbe45b38dec4a2eb78b08b5194ce836b2517afc", size = 202602, upload-time = "2026-02-02T12:36:33.679Z" },
{ url = "https://files.pythonhosted.org/packages/15/fe/13bd3678a311aa67686bb303654792c48206a112068f8b0b21426eb6851e/jiter-0.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:bb7613e1a427cfcb6ea4544f9ac566b93d5bf67e0d48c787eca673ff9c9dff2b", size = 185939, upload-time = "2026-02-02T12:36:35.065Z" },
{ url = "https://files.pythonhosted.org/packages/49/19/a929ec002ad3228bc97ca01dbb14f7632fffdc84a95ec92ceaf4145688ae/jiter-0.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa476ab5dd49f3bf3a168e05f89358c75a17608dbabb080ef65f96b27c19ab10", size = 316616, upload-time = "2026-02-02T12:36:36.579Z" },
{ url = "https://files.pythonhosted.org/packages/52/56/d19a9a194afa37c1728831e5fb81b7722c3de18a3109e8f282bfc23e587a/jiter-0.13.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade8cb6ff5632a62b7dbd4757d8c5573f7a2e9ae285d6b5b841707d8363205ef", size = 346850, upload-time = "2026-02-02T12:36:38.058Z" },
{ url = "https://files.pythonhosted.org/packages/36/4a/94e831c6bf287754a8a019cb966ed39ff8be6ab78cadecf08df3bb02d505/jiter-0.13.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9950290340acc1adaded363edd94baebcee7dabdfa8bee4790794cd5cfad2af6", size = 358551, upload-time = "2026-02-02T12:36:39.417Z" },
{ url = "https://files.pythonhosted.org/packages/a2/ec/a4c72c822695fa80e55d2b4142b73f0012035d9fcf90eccc56bc060db37c/jiter-0.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2b4972c6df33731aac0742b64fd0d18e0a69bc7d6e03108ce7d40c85fd9e3e6d", size = 201950, upload-time = "2026-02-02T12:36:40.791Z" },
{ url = "https://files.pythonhosted.org/packages/b6/00/393553ec27b824fbc29047e9c7cd4a3951d7fbe4a76743f17e44034fa4e4/jiter-0.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:701a1e77d1e593c1b435315ff625fd071f0998c5f02792038a5ca98899261b7d", size = 185852, upload-time = "2026-02-02T12:36:42.077Z" },
{ url = "https://files.pythonhosted.org/packages/6e/f5/f1997e987211f6f9bd71b8083047b316208b4aca0b529bb5f8c96c89ef3e/jiter-0.13.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cc5223ab19fe25e2f0bf2643204ad7318896fe3729bf12fde41b77bfc4fafff0", size = 308804, upload-time = "2026-02-02T12:36:43.496Z" },
{ url = "https://files.pythonhosted.org/packages/cd/8f/5482a7677731fd44881f0204981ce2d7175db271f82cba2085dd2212e095/jiter-0.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9776ebe51713acf438fd9b4405fcd86893ae5d03487546dae7f34993217f8a91", size = 318787, upload-time = "2026-02-02T12:36:45.071Z" },
{ url = "https://files.pythonhosted.org/packages/f3/b9/7257ac59778f1cd025b26a23c5520a36a424f7f1b068f2442a5b499b7464/jiter-0.13.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879e768938e7b49b5e90b7e3fecc0dbec01b8cb89595861fb39a8967c5220d09", size = 353880, upload-time = "2026-02-02T12:36:47.365Z" },
{ url = "https://files.pythonhosted.org/packages/c3/87/719eec4a3f0841dad99e3d3604ee4cba36af4419a76f3cb0b8e2e691ad67/jiter-0.13.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:682161a67adea11e3aae9038c06c8b4a9a71023228767477d683f69903ebc607", size = 366702, upload-time = "2026-02-02T12:36:48.871Z" },
{ url = "https://files.pythonhosted.org/packages/d2/65/415f0a75cf6921e43365a1bc227c565cb949caca8b7532776e430cbaa530/jiter-0.13.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a13b68cd1cd8cc9de8f244ebae18ccb3e4067ad205220ef324c39181e23bbf66", size = 486319, upload-time = "2026-02-02T12:36:53.006Z" },
{ url = "https://files.pythonhosted.org/packages/54/a2/9e12b48e82c6bbc6081fd81abf915e1443add1b13d8fc586e1d90bb02bb8/jiter-0.13.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87ce0f14c6c08892b610686ae8be350bf368467b6acd5085a5b65441e2bf36d2", size = 372289, upload-time = "2026-02-02T12:36:54.593Z" },
{ url = "https://files.pythonhosted.org/packages/4e/c1/e4693f107a1789a239c759a432e9afc592366f04e901470c2af89cfd28e1/jiter-0.13.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c365005b05505a90d1c47856420980d0237adf82f70c4aff7aebd3c1cc143ad", size = 360165, upload-time = "2026-02-02T12:36:56.112Z" },
{ url = "https://files.pythonhosted.org/packages/17/08/91b9ea976c1c758240614bd88442681a87672eebc3d9a6dde476874e706b/jiter-0.13.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1317fdffd16f5873e46ce27d0e0f7f4f90f0cdf1d86bf6abeaea9f63ca2c401d", size = 389634, upload-time = "2026-02-02T12:36:57.495Z" },
{ url = "https://files.pythonhosted.org/packages/18/23/58325ef99390d6d40427ed6005bf1ad54f2577866594bcf13ce55675f87d/jiter-0.13.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c05b450d37ba0c9e21c77fef1f205f56bcee2330bddca68d344baebfc55ae0df", size = 514933, upload-time = "2026-02-02T12:36:58.909Z" },
{ url = "https://files.pythonhosted.org/packages/5b/25/69f1120c7c395fd276c3996bb8adefa9c6b84c12bb7111e5c6ccdcd8526d/jiter-0.13.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:775e10de3849d0631a97c603f996f518159272db00fdda0a780f81752255ee9d", size = 548842, upload-time = "2026-02-02T12:37:00.433Z" },
{ url = "https://files.pythonhosted.org/packages/18/05/981c9669d86850c5fbb0d9e62bba144787f9fba84546ba43d624ee27ef29/jiter-0.13.0-cp314-cp314-win32.whl", hash = "sha256:632bf7c1d28421c00dd8bbb8a3bac5663e1f57d5cd5ed962bce3c73bf62608e6", size = 202108, upload-time = "2026-02-02T12:37:01.718Z" },
{ url = "https://files.pythonhosted.org/packages/8d/96/cdcf54dd0b0341db7d25413229888a346c7130bd20820530905fdb65727b/jiter-0.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:f22ef501c3f87ede88f23f9b11e608581c14f04db59b6a801f354397ae13739f", size = 204027, upload-time = "2026-02-02T12:37:03.075Z" },
{ url = "https://files.pythonhosted.org/packages/fb/f9/724bcaaab7a3cd727031fe4f6995cb86c4bd344909177c186699c8dec51a/jiter-0.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:07b75fe09a4ee8e0c606200622e571e44943f47254f95e2436c8bdcaceb36d7d", size = 187199, upload-time = "2026-02-02T12:37:04.414Z" },
{ url = "https://files.pythonhosted.org/packages/62/92/1661d8b9fd6a3d7a2d89831db26fe3c1509a287d83ad7838831c7b7a5c7e/jiter-0.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964538479359059a35fb400e769295d4b315ae61e4105396d355a12f7fef09f0", size = 318423, upload-time = "2026-02-02T12:37:05.806Z" },
{ url = "https://files.pythonhosted.org/packages/4f/3b/f77d342a54d4ebcd128e520fc58ec2f5b30a423b0fd26acdfc0c6fef8e26/jiter-0.13.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e104da1db1c0991b3eaed391ccd650ae8d947eab1480c733e5a3fb28d4313e40", size = 351438, upload-time = "2026-02-02T12:37:07.189Z" },
{ url = "https://files.pythonhosted.org/packages/76/b3/ba9a69f0e4209bd3331470c723c2f5509e6f0482e416b612431a5061ed71/jiter-0.13.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e3a5f0cde8ff433b8e88e41aa40131455420fb3649a3c7abdda6145f8cb7202", size = 364774, upload-time = "2026-02-02T12:37:08.579Z" },
{ url = "https://files.pythonhosted.org/packages/b3/16/6cdb31fa342932602458dbb631bfbd47f601e03d2e4950740e0b2100b570/jiter-0.13.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57aab48f40be1db920a582b30b116fe2435d184f77f0e4226f546794cedd9cf0", size = 487238, upload-time = "2026-02-02T12:37:10.066Z" },
{ url = "https://files.pythonhosted.org/packages/ed/b1/956cc7abaca8d95c13aa8d6c9b3f3797241c246cd6e792934cc4c8b250d2/jiter-0.13.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7772115877c53f62beeb8fd853cab692dbc04374ef623b30f997959a4c0e7e95", size = 372892, upload-time = "2026-02-02T12:37:11.656Z" },
{ url = "https://files.pythonhosted.org/packages/26/c4/97ecde8b1e74f67b8598c57c6fccf6df86ea7861ed29da84629cdbba76c4/jiter-0.13.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1211427574b17b633cfceba5040de8081e5abf114f7a7602f73d2e16f9fdaa59", size = 360309, upload-time = "2026-02-02T12:37:13.244Z" },
{ url = "https://files.pythonhosted.org/packages/4b/d7/eabe3cf46715854ccc80be2cd78dd4c36aedeb30751dbf85a1d08c14373c/jiter-0.13.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7beae3a3d3b5212d3a55d2961db3c292e02e302feb43fce6a3f7a31b90ea6dfe", size = 389607, upload-time = "2026-02-02T12:37:14.881Z" },
{ url = "https://files.pythonhosted.org/packages/df/2d/03963fc0804e6109b82decfb9974eb92df3797fe7222428cae12f8ccaa0c/jiter-0.13.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e5562a0f0e90a6223b704163ea28e831bd3a9faa3512a711f031611e6b06c939", size = 514986, upload-time = "2026-02-02T12:37:16.326Z" },
{ url = "https://files.pythonhosted.org/packages/f6/6c/8c83b45eb3eb1c1e18d841fe30b4b5bc5619d781267ca9bc03e005d8fd0a/jiter-0.13.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:6c26a424569a59140fb51160a56df13f438a2b0967365e987889186d5fc2f6f9", size = 548756, upload-time = "2026-02-02T12:37:17.736Z" },
{ url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" },
{ url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" },
{ url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" },
{ url = "https://files.pythonhosted.org/packages/79/b3/3c29819a27178d0e461a8571fb63c6ae38be6dc36b78b3ec2876bbd6a910/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b1cbfa133241d0e6bdab48dcdc2604e8ba81512f6bbd68ec3e8e1357dd3c316c", size = 307016, upload-time = "2026-02-02T12:37:42.755Z" },
{ url = "https://files.pythonhosted.org/packages/eb/ae/60993e4b07b1ac5ebe46da7aa99fdbb802eb986c38d26e3883ac0125c4e0/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:db367d8be9fad6e8ebbac4a7578b7af562e506211036cba2c06c3b998603c3d2", size = 305024, upload-time = "2026-02-02T12:37:44.774Z" },
{ url = "https://files.pythonhosted.org/packages/77/fa/2227e590e9cf98803db2811f172b2d6460a21539ab73006f251c66f44b14/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45f6f8efb2f3b0603092401dc2df79fa89ccbc027aaba4174d2d4133ed661434", size = 339337, upload-time = "2026-02-02T12:37:46.668Z" },
{ url = "https://files.pythonhosted.org/packages/2d/92/015173281f7eb96c0ef580c997da8ef50870d4f7f4c9e03c845a1d62ae04/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:597245258e6ad085d064780abfb23a284d418d3e61c57362d9449c6c7317ee2d", size = 346395, upload-time = "2026-02-02T12:37:48.09Z" },
{ url = "https://files.pythonhosted.org/packages/80/60/e50fa45dd7e2eae049f0ce964663849e897300433921198aef94b6ffa23a/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a", size = 305169, upload-time = "2026-02-02T12:37:50.376Z" },
{ url = "https://files.pythonhosted.org/packages/d2/73/a009f41c5eed71c49bec53036c4b33555afcdee70682a18c6f66e396c039/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f", size = 303808, upload-time = "2026-02-02T12:37:52.092Z" },
{ url = "https://files.pythonhosted.org/packages/c4/10/528b439290763bff3d939268085d03382471b442f212dca4ff5f12802d43/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59", size = 337384, upload-time = "2026-02-02T12:37:53.582Z" },
{ url = "https://files.pythonhosted.org/packages/67/8a/a342b2f0251f3dac4ca17618265d93bf244a2a4d089126e81e4c1056ac50/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb00b6d26db67a05fe3e12c76edc75f32077fb51deed13822dc648fa373bc19", size = 343768, upload-time = "2026-02-02T12:37:55.055Z" },
]
[[package]] [[package]]
name = "joblib" name = "joblib"
version = "1.5.3" version = "1.5.3"
@ -1734,25 +1640,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a6/d6/413e98ab666c6fb9e8be7d1c6eb3bd403b0bea1b8d42db066dab98c7df07/onnxruntime-1.24.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02aaf6ddfa784523b6873b4176a79d508e599efe12ab0ea1a3a6e7314408b7aa", size = 17240738, upload-time = "2026-03-05T17:18:15.203Z" }, { url = "https://files.pythonhosted.org/packages/a6/d6/413e98ab666c6fb9e8be7d1c6eb3bd403b0bea1b8d42db066dab98c7df07/onnxruntime-1.24.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02aaf6ddfa784523b6873b4176a79d508e599efe12ab0ea1a3a6e7314408b7aa", size = 17240738, upload-time = "2026-03-05T17:18:15.203Z" },
] ]
[[package]]
name = "openai"
version = "2.26.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "distro" },
{ name = "httpx" },
{ name = "jiter" },
{ name = "pydantic" },
{ name = "sniffio" },
{ name = "tqdm" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d7/91/2a06c4e9597c338cac1e5e5a8dd6f29e1836fc229c4c523529dca387fda8/openai-2.26.0.tar.gz", hash = "sha256:b41f37c140ae0034a6e92b0c509376d907f3a66109935fba2c1b471a7c05a8fb", size = 666702, upload-time = "2026-03-05T23:17:35.874Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c6/2e/3f73e8ca53718952222cacd0cf7eecc9db439d020f0c1fe7ae717e4e199a/openai-2.26.0-py3-none-any.whl", hash = "sha256:6151bf8f83802f036117f06cc8a57b3a4da60da9926826cc96747888b57f394f", size = 1136409, upload-time = "2026-03-05T23:17:34.072Z" },
]
[[package]] [[package]]
name = "opentelemetry-api" name = "opentelemetry-api"
version = "1.40.0" version = "1.40.0"
@ -2983,15 +2870,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
] ]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
[[package]] [[package]]
name = "sqlalchemy" name = "sqlalchemy"
version = "2.0.48" version = "2.0.48"
@ -3068,7 +2946,6 @@ dependencies = [
{ name = "discord-py" }, { name = "discord-py" },
{ name = "fastapi" }, { name = "fastapi" },
{ name = "httpx" }, { name = "httpx" },
{ name = "openai" },
{ name = "pydantic" }, { name = "pydantic" },
{ name = "pydantic-settings" }, { name = "pydantic-settings" },
{ name = "python-dotenv" }, { name = "python-dotenv" },
@ -3093,7 +2970,6 @@ requires-dist = [
{ name = "discord-py", specifier = ">=2.5.0" }, { name = "discord-py", specifier = ">=2.5.0" },
{ name = "fastapi", specifier = ">=0.115.0" }, { name = "fastapi", specifier = ">=0.115.0" },
{ name = "httpx", specifier = ">=0.27.0" }, { name = "httpx", specifier = ">=0.27.0" },
{ name = "openai", specifier = ">=1.0.0" },
{ name = "pydantic", specifier = ">=2.0.0" }, { name = "pydantic", specifier = ">=2.0.0" },
{ name = "pydantic-settings", specifier = ">=2.0.0" }, { name = "pydantic-settings", specifier = ">=2.0.0" },
{ name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" }, { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" },