Add direct setlist import script
This commit is contained in:
parent
3de9a7cb3f
commit
fb697817b0
2 changed files with 205 additions and 0 deletions
38
.agent/workflows/deploy.md
Normal file
38
.agent/workflows/deploy.md
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
---
|
||||||
|
description: how to deploy elmeg changes safely
|
||||||
|
---
|
||||||
|
|
||||||
|
# Elmeg Safe Deployment
|
||||||
|
|
||||||
|
## CRITICAL: Never wipe the database
|
||||||
|
|
||||||
|
When deploying changes to elmeg, **ONLY rebuild the backend and frontend containers**. The database must NEVER be rebuilt or recreated.
|
||||||
|
|
||||||
|
## Safe deployment command
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# turbo
|
||||||
|
ssh nexus-vector "cd /srv/containers/elmeg-demo && git pull && docker compose up -d --build --no-deps backend frontend"
|
||||||
|
```
|
||||||
|
|
||||||
|
## DANGEROUS - Do NOT use these commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# These will WIPE THE DATABASE:
|
||||||
|
docker compose up -d --build # Rebuilds ALL containers including db
|
||||||
|
docker compose down && docker compose up -d # Recreates all containers
|
||||||
|
docker compose up -d --force-recreate # Force recreates all
|
||||||
|
```
|
||||||
|
|
||||||
|
## Backup before any deployment (optional but recommended)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# turbo
|
||||||
|
ssh nexus-vector "docker exec elmeg-demo-db-1 pg_dump -U elmeg elmeg > /srv/containers/elmeg-demo/backup-\$(date +%Y%m%d-%H%M%S).sql"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Restore from backup if needed
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ssh nexus-vector "cat /srv/containers/elmeg-demo/backup-YYYYMMDD-HHMMSS.sql | docker exec -i elmeg-demo-db-1 psql -U elmeg elmeg"
|
||||||
|
```
|
||||||
167
backend/import_setlists_direct.py
Normal file
167
backend/import_setlists_direct.py
Normal file
|
|
@ -0,0 +1,167 @@
|
||||||
|
"""
|
||||||
|
Direct setlist import - bypasses broken show_map logic
|
||||||
|
Imports setlists by matching dates directly
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
from sqlmodel import Session, select, func
|
||||||
|
from database import engine
|
||||||
|
from models import Show, Song, Performance
|
||||||
|
from slugify import generate_slug
|
||||||
|
|
||||||
|
BASE_URL = "https://elgoose.net/api/v2"
|
||||||
|
|
||||||
|
def fetch_json(endpoint, params=None):
|
||||||
|
"""Fetch JSON from El Goose API"""
|
||||||
|
url = f"{BASE_URL}/{endpoint}.json"
|
||||||
|
try:
|
||||||
|
response = requests.get(url, params=params, timeout=30)
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
if data.get('error') == 1:
|
||||||
|
return None
|
||||||
|
return data.get('data', [])
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error fetching {endpoint}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def main():
|
||||||
|
print("=" * 60)
|
||||||
|
print("DIRECT SETLIST IMPORTER")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
with Session(engine) as session:
|
||||||
|
# Build date -> show_id mapping from our database
|
||||||
|
print("\n1. Building date->show mapping from database...")
|
||||||
|
shows = session.exec(select(Show)).all()
|
||||||
|
date_to_show = {}
|
||||||
|
for show in shows:
|
||||||
|
date_str = show.date.strftime('%Y-%m-%d')
|
||||||
|
date_to_show[date_str] = show.id
|
||||||
|
print(f" Found {len(date_to_show)} shows in database")
|
||||||
|
|
||||||
|
# Build song title -> song_id mapping
|
||||||
|
print("\n2. Building song mappings from database...")
|
||||||
|
songs = session.exec(select(Song)).all()
|
||||||
|
song_title_to_id = {s.title.lower(): s.id for s in songs}
|
||||||
|
print(f" Found {len(song_title_to_id)} songs in database")
|
||||||
|
|
||||||
|
# Fetch setlists from ElGoose
|
||||||
|
print("\n3. Fetching setlists from ElGoose API...")
|
||||||
|
page = 1
|
||||||
|
total_added = 0
|
||||||
|
total_skipped = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
print(f" Page {page}...", end="", flush=True)
|
||||||
|
data = fetch_json("setlists", {"page": page})
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
print(" Done.")
|
||||||
|
break
|
||||||
|
|
||||||
|
added_this_page = 0
|
||||||
|
for perf in data:
|
||||||
|
# Get the show date from the setlist item
|
||||||
|
show_date = perf.get('showdate')
|
||||||
|
song_name = perf.get('songname', '').lower()
|
||||||
|
|
||||||
|
if not show_date or not song_name:
|
||||||
|
total_skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Find our show by date
|
||||||
|
our_show_id = date_to_show.get(show_date)
|
||||||
|
if not our_show_id:
|
||||||
|
total_skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Find our song by title
|
||||||
|
our_song_id = song_title_to_id.get(song_name)
|
||||||
|
if not our_song_id:
|
||||||
|
total_skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
position = perf.get('position', 0)
|
||||||
|
|
||||||
|
# Check if performance already exists
|
||||||
|
existing = session.exec(
|
||||||
|
select(Performance).where(
|
||||||
|
Performance.show_id == our_show_id,
|
||||||
|
Performance.song_id == our_song_id,
|
||||||
|
Performance.position == position
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Map setnumber
|
||||||
|
set_val = str(perf.get('setnumber', '1'))
|
||||||
|
if set_val.isdigit():
|
||||||
|
set_name = f"Set {set_val}"
|
||||||
|
elif set_val.lower() == 'e':
|
||||||
|
set_name = "Encore"
|
||||||
|
elif set_val.lower() == 'e2':
|
||||||
|
set_name = "Encore 2"
|
||||||
|
elif set_val.lower() == 's':
|
||||||
|
set_name = "Soundcheck"
|
||||||
|
else:
|
||||||
|
set_name = f"Set {set_val}"
|
||||||
|
|
||||||
|
# Create performance
|
||||||
|
new_perf = Performance(
|
||||||
|
show_id=our_show_id,
|
||||||
|
song_id=our_song_id,
|
||||||
|
position=position,
|
||||||
|
set_name=set_name,
|
||||||
|
segue=bool(perf.get('segue', 0)),
|
||||||
|
notes=perf.get('footnote')
|
||||||
|
)
|
||||||
|
session.add(new_perf)
|
||||||
|
added_this_page += 1
|
||||||
|
total_added += 1
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
print(f" added {added_this_page} performances")
|
||||||
|
|
||||||
|
if page > 500: # Safety limit
|
||||||
|
print(" Safety limit reached")
|
||||||
|
break
|
||||||
|
|
||||||
|
page += 1
|
||||||
|
time.sleep(0.1) # Be nice to the API
|
||||||
|
|
||||||
|
# Generate slugs for all performances
|
||||||
|
print("\n4. Generating slugs for performances...")
|
||||||
|
perfs_without_slugs = session.exec(
|
||||||
|
select(Performance).where(Performance.slug == None)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
for perf in perfs_without_slugs:
|
||||||
|
if perf.song and perf.show:
|
||||||
|
song_slug = perf.song.slug or generate_slug(perf.song.title)
|
||||||
|
date_str = perf.show.date.strftime('%Y-%m-%d')
|
||||||
|
perf.slug = f"{song_slug}-{date_str}"
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
print(f" Generated {len(perfs_without_slugs)} slugs")
|
||||||
|
|
||||||
|
# Final count
|
||||||
|
total_perfs = session.exec(select(func.count(Performance.id))).one()
|
||||||
|
shows_with_perfs = session.exec(
|
||||||
|
select(func.count(func.distinct(Performance.show_id)))
|
||||||
|
).one()
|
||||||
|
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("IMPORT COMPLETE!")
|
||||||
|
print("=" * 60)
|
||||||
|
print(f"\nStats:")
|
||||||
|
print(f" • Added: {total_added} new performances")
|
||||||
|
print(f" • Skipped: {total_skipped} (no match)")
|
||||||
|
print(f" • Total performances: {total_perfs}")
|
||||||
|
print(f" • Shows with setlists: {shows_with_perfs}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Loading…
Add table
Reference in a new issue