Catchup files
This commit is contained in:
parent
5d841419b0
commit
af49704272
99
DEPLOYMENT.md
Normal file
99
DEPLOYMENT.md
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
# Paper Dynasty Deployment Guide
|
||||||
|
|
||||||
|
## Current Deployment Process (Shell Script)
|
||||||
|
|
||||||
|
### Quick Start
|
||||||
|
|
||||||
|
From the `discord-app` directory:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./deploy.sh 1.3.0
|
||||||
|
```
|
||||||
|
|
||||||
|
Or let it prompt you for version:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./deploy.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### What It Does
|
||||||
|
|
||||||
|
1. **Validates version format** (X.Y or X.Y.Z)
|
||||||
|
2. **Builds Docker image** with both `latest` and version tags
|
||||||
|
3. **Pushes to Docker Hub** (manticorum67/paper-dynasty)
|
||||||
|
4. **Creates git tag** (v1.3.0)
|
||||||
|
5. **Deploys to sba-bots** via SSH
|
||||||
|
6. **Cleans up old images**
|
||||||
|
|
||||||
|
### Version Numbering
|
||||||
|
|
||||||
|
- **Bug fixes**: Increment patch (1.2.0 → 1.2.1)
|
||||||
|
- **New features**: Increment minor (1.2.1 → 1.3.0)
|
||||||
|
- **Breaking changes**: Increment major (1.3.0 → 2.0.0)
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Deploy a bug fix
|
||||||
|
./deploy.sh 1.2.1
|
||||||
|
|
||||||
|
# Deploy a new feature
|
||||||
|
./deploy.sh 1.3.0
|
||||||
|
|
||||||
|
# Deploy with 'v' prefix (works either way)
|
||||||
|
./deploy.sh v1.3.0
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rollback Process
|
||||||
|
|
||||||
|
If something goes wrong:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ssh sba-bots
|
||||||
|
cd /container-data/paper-dynasty
|
||||||
|
|
||||||
|
# Check available versions
|
||||||
|
docker image ls | grep paper-dynasty
|
||||||
|
|
||||||
|
# Update docker-compose.yml to use specific version
|
||||||
|
# Change: image: manticorum67/paper-dynasty:latest
|
||||||
|
# To: image: manticorum67/paper-dynasty:1.2.0
|
||||||
|
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
## Future: Automated CI/CD (GitHub Actions + Watchtower)
|
||||||
|
|
||||||
|
When ready to automate further, we'll:
|
||||||
|
|
||||||
|
1. Add GitHub Actions workflow to auto-build on git tags
|
||||||
|
2. Install Watchtower on sba-bots to auto-deploy
|
||||||
|
3. Push tags and walk away ☕
|
||||||
|
|
||||||
|
Files ready in `.github/workflows/` when you're ready to switch.
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### "Permission denied" when running script
|
||||||
|
```bash
|
||||||
|
chmod +x deploy.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### "Docker build failed"
|
||||||
|
- Check Dockerfile syntax
|
||||||
|
- Ensure all required files are present
|
||||||
|
- Check Docker daemon is running
|
||||||
|
|
||||||
|
### "Failed to push to Docker Hub"
|
||||||
|
- Verify you're logged in: `docker login`
|
||||||
|
- Check Docker Hub credentials
|
||||||
|
|
||||||
|
### "SSH connection failed"
|
||||||
|
- Verify SSH key is configured: `ssh sba-bots`
|
||||||
|
- Check ~/.ssh/config has sba-bots entry
|
||||||
|
|
||||||
|
### Remote deployment fails but build succeeds
|
||||||
|
- SSH into sba-bots manually
|
||||||
|
- Check disk space: `df -h`
|
||||||
|
- Check docker-compose.yml is present in `/container-data/paper-dynasty/`
|
||||||
|
- Run docker compose commands manually to see error
|
||||||
181
deploy.sh
Executable file
181
deploy.sh
Executable file
@ -0,0 +1,181 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e # Exit on error
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
DOCKER_IMAGE="manticorum67/paper-dynasty"
|
||||||
|
REMOTE_HOST="sba-bots"
|
||||||
|
REMOTE_PATH="/home/cal/container-data/paper-dynasty"
|
||||||
|
DOCKERFILE_PATH="."
|
||||||
|
AUTO_YES=false
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Helper functions
|
||||||
|
log_info() {
|
||||||
|
echo -e "${BLUE}ℹ️ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_success() {
|
||||||
|
echo -e "${GREEN}✅ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_warning() {
|
||||||
|
echo -e "${YELLOW}⚠️ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_error() {
|
||||||
|
echo -e "${RED}❌ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-y|--yes)
|
||||||
|
AUTO_YES=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
VERSION=$1
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Get version from argument or prompt
|
||||||
|
if [ -z "$VERSION" ]; then
|
||||||
|
echo ""
|
||||||
|
log_info "Recent version tags:"
|
||||||
|
git tag -l "v*" | sort -V | tail -5
|
||||||
|
echo ""
|
||||||
|
read -p "Enter new version (e.g., 1.3.0 for features, 1.2.1 for bugfix): " VERSION
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove 'v' prefix if provided
|
||||||
|
VERSION=${VERSION#v}
|
||||||
|
|
||||||
|
# Validate version format
|
||||||
|
if ! [[ $VERSION =~ ^[0-9]+\.[0-9]+(\.[0-9]+)?$ ]]; then
|
||||||
|
log_error "Version must be in format X.Y or X.Y.Z (e.g., 1.3 or 1.3.0)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Confirm deployment
|
||||||
|
echo ""
|
||||||
|
log_warning "About to deploy Paper Dynasty version ${VERSION}"
|
||||||
|
log_info "This will:"
|
||||||
|
echo " 1. Build Docker image with tags: latest and ${VERSION}"
|
||||||
|
echo " 2. Push both tags to Docker Hub (${DOCKER_IMAGE})"
|
||||||
|
echo " 3. Create git tag v${VERSION}"
|
||||||
|
echo " 4. Deploy to ${REMOTE_HOST}"
|
||||||
|
echo ""
|
||||||
|
if [ "$AUTO_YES" = false ]; then
|
||||||
|
read -p "Continue? (y/N): " -n 1 -r
|
||||||
|
echo ""
|
||||||
|
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
log_info "Deployment cancelled"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_info "Auto-confirmed with --yes flag"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if git tag already exists
|
||||||
|
if git rev-parse "v${VERSION}" >/dev/null 2>&1; then
|
||||||
|
log_error "Git tag v${VERSION} already exists!"
|
||||||
|
log_info "If you want to redeploy, delete the tag first: git tag -d v${VERSION} && git push origin :refs/tags/v${VERSION}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for uncommitted changes
|
||||||
|
if ! git diff-index --quiet HEAD --; then
|
||||||
|
log_warning "You have uncommitted changes!"
|
||||||
|
git status --short
|
||||||
|
echo ""
|
||||||
|
if [ "$AUTO_YES" = false ]; then
|
||||||
|
read -p "Continue anyway? (y/N): " -n 1 -r
|
||||||
|
echo ""
|
||||||
|
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
log_info "Deployment cancelled"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_warning "Continuing with uncommitted changes (--yes flag)"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_info "🏗️ Building Docker image for version ${VERSION}..."
|
||||||
|
if docker build -t $DOCKER_IMAGE:latest -t $DOCKER_IMAGE:$VERSION $DOCKERFILE_PATH; then
|
||||||
|
log_success "Docker image built successfully"
|
||||||
|
else
|
||||||
|
log_error "Docker build failed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_info "📦 Pushing images to Docker Hub..."
|
||||||
|
log_info "Pushing ${DOCKER_IMAGE}:latest..."
|
||||||
|
if docker push $DOCKER_IMAGE:latest; then
|
||||||
|
log_success "Pushed latest tag"
|
||||||
|
else
|
||||||
|
log_error "Failed to push latest tag"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Pushing ${DOCKER_IMAGE}:${VERSION}..."
|
||||||
|
if docker push $DOCKER_IMAGE:$VERSION; then
|
||||||
|
log_success "Pushed version ${VERSION} tag"
|
||||||
|
else
|
||||||
|
log_error "Failed to push version tag"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_info "🏷️ Creating git tag v${VERSION}..."
|
||||||
|
git tag -a "v${VERSION}" -m "Release v${VERSION}"
|
||||||
|
git push origin "v${VERSION}"
|
||||||
|
log_success "Git tag created and pushed"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_info "🚀 Deploying to ${REMOTE_HOST}..."
|
||||||
|
log_info "Running: cd ${REMOTE_PATH} && docker-compose pull"
|
||||||
|
|
||||||
|
if ssh $REMOTE_HOST "cd $REMOTE_PATH && docker-compose pull"; then
|
||||||
|
log_success "Successfully pulled latest image"
|
||||||
|
else
|
||||||
|
log_error "Failed to pull image on remote server"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Running: docker-compose down && docker-compose up -d"
|
||||||
|
if ssh $REMOTE_HOST "cd $REMOTE_PATH && docker-compose down && docker-compose up -d"; then
|
||||||
|
log_success "Container restarted successfully"
|
||||||
|
else
|
||||||
|
log_error "Failed to restart container"
|
||||||
|
log_warning "You may need to SSH in manually to fix: ssh ${REMOTE_HOST}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Optional: Clean up old images on remote
|
||||||
|
log_info "Cleaning up old Docker images on remote..."
|
||||||
|
ssh $REMOTE_HOST "docker image prune -f" >/dev/null 2>&1 || true
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_success "🎉 Deployment complete!"
|
||||||
|
echo ""
|
||||||
|
log_info "Version v${VERSION} is now running on ${REMOTE_HOST}"
|
||||||
|
log_info "Check logs with: ssh ${REMOTE_HOST} 'cd ${REMOTE_PATH} && docker-compose logs -f paper-dynasty'"
|
||||||
|
log_info "Check status with: ssh ${REMOTE_HOST} 'cd ${REMOTE_PATH} && docker-compose ps'"
|
||||||
|
echo ""
|
||||||
|
log_info "If you need to rollback:"
|
||||||
|
echo " ssh ${REMOTE_HOST}"
|
||||||
|
echo " cd ${REMOTE_PATH}"
|
||||||
|
echo " # Find previous version: docker image ls | grep paper-dynasty"
|
||||||
|
echo " # Edit docker-compose.yml to use specific version tag"
|
||||||
|
echo " docker compose up -d"
|
||||||
|
echo ""
|
||||||
@ -210,13 +210,25 @@ class SelectOpenPack(discord.ui.Select):
|
|||||||
p_query = await db_get('packs', params=params)
|
p_query = await db_get('packs', params=params)
|
||||||
if p_query['count'] == 0:
|
if p_query['count'] == 0:
|
||||||
logger.error(f'open-packs - no packs found with params: {params}')
|
logger.error(f'open-packs - no packs found with params: {params}')
|
||||||
raise ValueError(f'Unable to open packs')
|
await interaction.followup.send(
|
||||||
|
content='Unable to find the selected pack. Please contact an admin.',
|
||||||
|
ephemeral=True
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
# Open the packs
|
# Open the packs
|
||||||
if open_type == 'standard':
|
try:
|
||||||
await open_st_pr_packs(p_query['packs'], self.owner_team, interaction)
|
if open_type == 'standard':
|
||||||
elif open_type == 'choice':
|
await open_st_pr_packs(p_query['packs'], self.owner_team, interaction)
|
||||||
await open_choice_pack(p_query['packs'][0], self.owner_team, interaction, cardset_id)
|
elif open_type == 'choice':
|
||||||
|
await open_choice_pack(p_query['packs'][0], self.owner_team, interaction, cardset_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f'Failed to open pack: {e}')
|
||||||
|
await interaction.followup.send(
|
||||||
|
content=f'Failed to open pack. Please contact an admin. Error: {str(e)}',
|
||||||
|
ephemeral=True
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
class SelectPaperdexCardset(discord.ui.Select):
|
class SelectPaperdexCardset(discord.ui.Select):
|
||||||
|
|||||||
@ -1711,7 +1711,8 @@ async def open_choice_pack(this_pack, team: dict, context, cardset_id: Optional[
|
|||||||
('min_rarity', min_rarity), ('max_rarity', rarity_id), ('limit', 4 - len(players)),
|
('min_rarity', min_rarity), ('max_rarity', rarity_id), ('limit', 4 - len(players)),
|
||||||
('franchise', this_pack['pack_team']['lname'])
|
('franchise', this_pack['pack_team']['lname'])
|
||||||
]
|
]
|
||||||
if this_pack['pack_team']['abbrev'] not in ['MSS']:
|
# Only apply in_packs filter if no specific cardset is provided
|
||||||
|
if this_pack['pack_team']['abbrev'] not in ['MSS'] and cardset_id is None:
|
||||||
params.append(('in_packs', True))
|
params.append(('in_packs', True))
|
||||||
if cardset_id is not None:
|
if cardset_id is not None:
|
||||||
params.append(('cardset_id', cardset_id))
|
params.append(('cardset_id', cardset_id))
|
||||||
@ -1762,8 +1763,11 @@ async def open_choice_pack(this_pack, team: dict, context, cardset_id: Optional[
|
|||||||
min_rarity = rarity_id
|
min_rarity = rarity_id
|
||||||
while len(players) < 4 and rarity_id < 10:
|
while len(players) < 4 and rarity_id < 10:
|
||||||
params = [
|
params = [
|
||||||
('min_rarity', min_rarity), ('max_rarity', rarity_id), ('limit', 4), ('in_packs', True)
|
('min_rarity', min_rarity), ('max_rarity', rarity_id), ('limit', 4)
|
||||||
]
|
]
|
||||||
|
# Only apply in_packs filter if no specific cardset is provided
|
||||||
|
if cardset_id is None:
|
||||||
|
params.append(('in_packs', True))
|
||||||
if this_pack['pack_team'] is not None:
|
if this_pack['pack_team'] is not None:
|
||||||
params.append(('franchise', this_pack['pack_team']['lname']))
|
params.append(('franchise', this_pack['pack_team']['lname']))
|
||||||
if cardset_id is not None:
|
if cardset_id is not None:
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
import pytest
|
import pytest
|
||||||
from sqlmodel import Session, select, func
|
from sqlmodel import Session, select, func
|
||||||
|
|
||||||
from command_logic.logic_gameplay import advance_runners, doubles, gb_result_1, get_obc, get_re24, get_wpa, complete_play, log_run_scored, strikeouts, steals, xchecks, walks, popouts, hit_by_pitch, homeruns, singles, triples, bunts, chaos, safe_wpa_lookup
|
from command_logic.logic_gameplay import advance_runners, doubles, gb_result_1, get_obc, get_re24, get_wpa, complete_play, log_run_scored, strikeouts, steals, xchecks, walks, popouts, hit_by_pitch, homeruns, singles, triples, bunts, chaos, safe_wpa_lookup, create_pinch_runner_entry_play
|
||||||
from in_game.gameplay_models import Lineup, Play
|
from in_game.gameplay_models import Lineup, Play, Card, Player
|
||||||
from tests.factory import session_fixture, Game
|
from tests.factory import session_fixture, Game
|
||||||
|
|
||||||
|
|
||||||
@ -469,3 +469,103 @@ async def test_chaos(session: Session):
|
|||||||
assert play_4.ab == 0
|
assert play_4.ab == 0
|
||||||
assert play_4.pick_off == 1 # Pickoff recorded
|
assert play_4.pick_off == 1 # Pickoff recorded
|
||||||
assert play_4.outs == 1 # Out recorded
|
assert play_4.outs == 1 # Out recorded
|
||||||
|
|
||||||
|
|
||||||
|
def test_pinch_runner_entry_and_scoring(session: Session):
|
||||||
|
"""
|
||||||
|
Test that pinch runners get an entry Play record when they substitute for a runner on base,
|
||||||
|
and that the run is properly credited when they score.
|
||||||
|
"""
|
||||||
|
this_game = session.get(Game, 3)
|
||||||
|
|
||||||
|
# Step 1: Batter A gets a single
|
||||||
|
play_1 = this_game.initialize_play(session)
|
||||||
|
play_1.pa, play_1.ab, play_1.hit, play_1.batter_final = 1, 1, 1, 1
|
||||||
|
|
||||||
|
# Complete the play - batter A is now on first
|
||||||
|
play_2 = complete_play(session, play_1)
|
||||||
|
assert play_2.on_first == play_1.batter # Original batter on first
|
||||||
|
assert play_2.on_first.player_id == play_1.batter.player_id
|
||||||
|
|
||||||
|
# Step 2: Create a pinch runner lineup (simulating /substitute batter)
|
||||||
|
# Get a different player/card for the pinch runner
|
||||||
|
pinch_runner_card = session.get(Card, 2) # Different card than the one on base
|
||||||
|
pinch_runner_lineup = Lineup(
|
||||||
|
team=play_2.on_first.team,
|
||||||
|
player=pinch_runner_card.player,
|
||||||
|
card=pinch_runner_card,
|
||||||
|
position='PR',
|
||||||
|
batting_order=play_2.on_first.batting_order,
|
||||||
|
game=this_game,
|
||||||
|
after_play=play_2.play_num - 1,
|
||||||
|
replacing_id=play_2.on_first.id
|
||||||
|
)
|
||||||
|
session.add(pinch_runner_lineup)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(pinch_runner_lineup)
|
||||||
|
|
||||||
|
# Step 3: Update the current play to reference the pinch runner
|
||||||
|
play_2.on_first = pinch_runner_lineup
|
||||||
|
session.add(play_2)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
# Step 4: Create the pinch runner entry Play
|
||||||
|
entry_play = create_pinch_runner_entry_play(
|
||||||
|
session=session,
|
||||||
|
game=this_game,
|
||||||
|
current_play=play_2,
|
||||||
|
pinch_runner_lineup=pinch_runner_lineup
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the entry Play was created correctly
|
||||||
|
assert entry_play is not None
|
||||||
|
assert entry_play.batter == pinch_runner_lineup
|
||||||
|
assert entry_play.pa == 0 # NOT a plate appearance
|
||||||
|
assert entry_play.ab == 0 # NOT an at-bat
|
||||||
|
assert entry_play.run == 0 # Not scored yet
|
||||||
|
assert entry_play.complete == True # Entry is complete
|
||||||
|
assert entry_play.game == this_game
|
||||||
|
assert entry_play.pitcher == play_2.pitcher
|
||||||
|
assert entry_play.inning_half == play_2.inning_half
|
||||||
|
assert entry_play.inning_num == play_2.inning_num
|
||||||
|
|
||||||
|
# Step 5: Advance the pinch runner to third
|
||||||
|
play_2.hit = 1
|
||||||
|
play_2.batter_final = 1
|
||||||
|
play_2 = advance_runners(session, play_2, num_bases=2) # Advance runner 2 bases
|
||||||
|
session.add(play_2)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
assert play_2.on_first_final == 3 # Pinch runner advanced to third
|
||||||
|
|
||||||
|
# Complete the play - pinch runner is now on third
|
||||||
|
play_3 = complete_play(session, play_2)
|
||||||
|
assert play_3.on_third == pinch_runner_lineup
|
||||||
|
|
||||||
|
# Step 6: Score the pinch runner on a subsequent hit
|
||||||
|
play_3.pa, play_3.ab, play_3.hit, play_3.batter_final = 1, 1, 1, 1
|
||||||
|
play_3 = advance_runners(session, play_3, num_bases=1) # Score from third
|
||||||
|
session.add(play_3)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
assert play_3.on_third_final == 4 # Runner scored
|
||||||
|
assert play_3.rbi >= 1 # RBI for the batter
|
||||||
|
|
||||||
|
# Step 7: Verify the run was credited to the pinch runner's entry Play
|
||||||
|
session.refresh(entry_play)
|
||||||
|
assert entry_play.run == 1 # Pinch runner's entry Play has run=1
|
||||||
|
assert entry_play.pa == 0 # Still not a plate appearance
|
||||||
|
assert entry_play.ab == 0 # Still not an at-bat
|
||||||
|
|
||||||
|
# Step 8: Verify pitcher stats are not affected by the entry Play
|
||||||
|
# The pitcher allowed a hit (play_1) but the entry Play (PA=0, AB=0) should not count
|
||||||
|
pitcher_plays = session.exec(
|
||||||
|
select(Play).where(
|
||||||
|
Play.game == this_game,
|
||||||
|
Play.pitcher == play_2.pitcher,
|
||||||
|
Play.pa > 0
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
# Entry play should NOT be in this list (PA=0)
|
||||||
|
assert entry_play not in pitcher_plays
|
||||||
|
|||||||
@ -483,6 +483,11 @@ def session_fixture():
|
|||||||
session.add(manager_ai_3)
|
session.add(manager_ai_3)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
# Reset sequences so auto-generated IDs don't conflict with factory data
|
||||||
|
session.exec(text("SELECT setval(pg_get_serial_sequence('lineup', 'id'), COALESCE((SELECT MAX(id) FROM lineup), 1))"))
|
||||||
|
session.exec(text("SELECT setval(pg_get_serial_sequence('play', 'id'), COALESCE((SELECT MAX(id) FROM play), 1))"))
|
||||||
|
session.commit()
|
||||||
|
|
||||||
yield session
|
yield session
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user