diff --git a/backend/alembic/versions/65c515b4722a_add_slugs.py b/backend/alembic/versions/65c515b4722a_add_slugs.py new file mode 100644 index 0000000..d0ef1f7 --- /dev/null +++ b/backend/alembic/versions/65c515b4722a_add_slugs.py @@ -0,0 +1,213 @@ +"""Add slugs + +Revision ID: 65c515b4722a +Revises: e50a60c5d343 +Create Date: 2025-12-21 20:24:07.968495 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = '65c515b4722a' +down_revision: Union[str, Sequence[str], None] = 'e50a60c5d343' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + # op.create_table('reaction', + # sa.Column('id', sa.Integer(), nullable=False), + # sa.Column('user_id', sa.Integer(), nullable=False), + # sa.Column('entity_type', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + # sa.Column('entity_id', sa.Integer(), nullable=False), + # sa.Column('emoji', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + # sa.Column('created_at', sa.DateTime(), nullable=False), + # sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + # sa.PrimaryKeyConstraint('id') + # ) + # with op.batch_alter_table('reaction', schema=None) as batch_op: + # batch_op.create_index(batch_op.f('ix_reaction_entity_id'), ['entity_id'], unique=False) + # batch_op.create_index(batch_op.f('ix_reaction_entity_type'), ['entity_type'], unique=False) + + # op.create_table('chasesong', + # sa.Column('id', sa.Integer(), nullable=False), + # sa.Column('user_id', sa.Integer(), nullable=False), + # sa.Column('song_id', sa.Integer(), nullable=False), + # sa.Column('priority', sa.Integer(), nullable=False), + # sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + # sa.Column('created_at', sa.DateTime(), nullable=False), + # sa.Column('caught_at', sa.DateTime(), nullable=True), + # sa.Column('caught_show_id', sa.Integer(), nullable=True), + # sa.ForeignKeyConstraint(['caught_show_id'], ['show.id'], ), + # sa.ForeignKeyConstraint(['song_id'], ['song.id'], ), + # sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + # sa.PrimaryKeyConstraint('id') + # ) + # with op.batch_alter_table('chasesong', schema=None) as batch_op: + # batch_op.create_index(batch_op.f('ix_chasesong_song_id'), ['song_id'], unique=False) + # batch_op.create_index(batch_op.f('ix_chasesong_user_id'), ['user_id'], unique=False) + + # with op.batch_alter_table('badge', schema=None) as batch_op: + # batch_op.add_column(sa.Column('tier', sqlmodel.sql.sqltypes.AutoString(), nullable=False)) + # batch_op.add_column(sa.Column('category', sqlmodel.sql.sqltypes.AutoString(), nullable=False)) + # batch_op.add_column(sa.Column('xp_reward', sa.Integer(), nullable=False)) + + with op.batch_alter_table('comment', schema=None) as batch_op: + batch_op.add_column(sa.Column('parent_id', sa.Integer(), nullable=True)) + batch_op.create_foreign_key('fk_comment_parent_id', 'comment', ['parent_id'], ['id']) + + with op.batch_alter_table('performance', schema=None) as batch_op: + batch_op.add_column(sa.Column('slug', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('track_url', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('youtube_link', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.create_index(batch_op.f('ix_performance_slug'), ['slug'], unique=True) + + with op.batch_alter_table('rating', schema=None) as batch_op: + batch_op.add_column(sa.Column('performance_id', sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column('venue_id', sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column('tour_id', sa.Integer(), nullable=True)) + batch_op.alter_column('score', + existing_type=sa.INTEGER(), + type_=sa.Float(), + existing_nullable=False) + batch_op.create_foreign_key('fk_rating_tour_id', 'tour', ['tour_id'], ['id']) + batch_op.create_foreign_key('fk_rating_performance_id', 'performance', ['performance_id'], ['id']) + batch_op.create_foreign_key('fk_rating_venue_id', 'venue', ['venue_id'], ['id']) + + with op.batch_alter_table('review', schema=None) as batch_op: + batch_op.alter_column('score', + existing_type=sa.INTEGER(), + type_=sa.Float(), + existing_nullable=False) + + with op.batch_alter_table('show', schema=None) as batch_op: + batch_op.add_column(sa.Column('slug', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('bandcamp_link', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('nugs_link', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('youtube_link', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.create_index(batch_op.f('ix_show_slug'), ['slug'], unique=True) + + with op.batch_alter_table('song', schema=None) as batch_op: + batch_op.add_column(sa.Column('slug', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('youtube_link', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.create_index(batch_op.f('ix_song_slug'), ['slug'], unique=True) + + with op.batch_alter_table('tour', schema=None) as batch_op: + batch_op.add_column(sa.Column('slug', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.create_index(batch_op.f('ix_tour_slug'), ['slug'], unique=True) + + with op.batch_alter_table('user', schema=None) as batch_op: + batch_op.add_column(sa.Column('xp', sa.Integer(), nullable=False)) + batch_op.add_column(sa.Column('level', sa.Integer(), nullable=False)) + batch_op.add_column(sa.Column('streak_days', sa.Integer(), nullable=False)) + batch_op.add_column(sa.Column('last_activity', sa.DateTime(), nullable=True)) + batch_op.add_column(sa.Column('custom_title', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('title_color', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('flair', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('is_early_adopter', sa.Boolean(), nullable=False)) + batch_op.add_column(sa.Column('is_supporter', sa.Boolean(), nullable=False)) + batch_op.add_column(sa.Column('joined_at', sa.DateTime(), nullable=False)) + batch_op.add_column(sa.Column('email_verified', sa.Boolean(), nullable=False)) + batch_op.add_column(sa.Column('verification_token', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('verification_token_expires', sa.DateTime(), nullable=True)) + batch_op.add_column(sa.Column('reset_token', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.add_column(sa.Column('reset_token_expires', sa.DateTime(), nullable=True)) + + with op.batch_alter_table('venue', schema=None) as batch_op: + batch_op.add_column(sa.Column('slug', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.create_index(batch_op.f('ix_venue_slug'), ['slug'], unique=True) + + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('venue', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_venue_slug')) + batch_op.drop_column('slug') + + with op.batch_alter_table('user', schema=None) as batch_op: + batch_op.drop_column('reset_token_expires') + batch_op.drop_column('reset_token') + batch_op.drop_column('verification_token_expires') + batch_op.drop_column('verification_token') + batch_op.drop_column('email_verified') + batch_op.drop_column('joined_at') + batch_op.drop_column('is_supporter') + batch_op.drop_column('is_early_adopter') + batch_op.drop_column('flair') + batch_op.drop_column('title_color') + batch_op.drop_column('custom_title') + batch_op.drop_column('last_activity') + batch_op.drop_column('streak_days') + batch_op.drop_column('level') + batch_op.drop_column('xp') + + with op.batch_alter_table('tour', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_tour_slug')) + batch_op.drop_column('slug') + + with op.batch_alter_table('song', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_song_slug')) + batch_op.drop_column('youtube_link') + batch_op.drop_column('slug') + + with op.batch_alter_table('show', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_show_slug')) + batch_op.drop_column('youtube_link') + batch_op.drop_column('nugs_link') + batch_op.drop_column('bandcamp_link') + batch_op.drop_column('slug') + + with op.batch_alter_table('review', schema=None) as batch_op: + batch_op.alter_column('score', + existing_type=sa.Float(), + type_=sa.INTEGER(), + existing_nullable=False) + + with op.batch_alter_table('rating', schema=None) as batch_op: + batch_op.drop_constraint(None, type_='foreignkey') + batch_op.drop_constraint(None, type_='foreignkey') + batch_op.drop_constraint(None, type_='foreignkey') + batch_op.alter_column('score', + existing_type=sa.Float(), + type_=sa.INTEGER(), + existing_nullable=False) + batch_op.drop_column('tour_id') + batch_op.drop_column('venue_id') + batch_op.drop_column('performance_id') + + with op.batch_alter_table('performance', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_performance_slug')) + batch_op.drop_column('youtube_link') + batch_op.drop_column('track_url') + batch_op.drop_column('slug') + + with op.batch_alter_table('comment', schema=None) as batch_op: + batch_op.drop_constraint(None, type_='foreignkey') + batch_op.drop_column('parent_id') + + with op.batch_alter_table('badge', schema=None) as batch_op: + batch_op.drop_column('xp_reward') + batch_op.drop_column('category') + batch_op.drop_column('tier') + + with op.batch_alter_table('chasesong', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_chasesong_user_id')) + batch_op.drop_index(batch_op.f('ix_chasesong_song_id')) + + op.drop_table('chasesong') + with op.batch_alter_table('reaction', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_reaction_entity_type')) + batch_op.drop_index(batch_op.f('ix_reaction_entity_id')) + + op.drop_table('reaction') + # ### end Alembic commands ### diff --git a/backend/fix_db_data.py b/backend/fix_db_data.py new file mode 100644 index 0000000..b72a2f6 --- /dev/null +++ b/backend/fix_db_data.py @@ -0,0 +1,170 @@ +from sqlmodel import Session, select +from database import engine +from models import Venue, Song, Show, Tour, Performance +from slugify import generate_slug, generate_show_slug +import requests +import time + +BASE_URL = "https://elgoose.net/api/v2" + +def fetch_all_json(endpoint, params=None): + all_data = [] + page = 1 + params = params.copy() if params else {} + print(f"Fetching {endpoint}...") + while True: + params['page'] = page + url = f"{BASE_URL}/{endpoint}.json" + try: + resp = requests.get(url, params=params) + if resp.status_code != 200: + break + data = resp.json() + items = data.get('data', []) + if not items: + break + all_data.extend(items) + print(f" Page {page} done ({len(items)} items)") + page += 1 + time.sleep(0.5) + except Exception as e: + print(f"Error fetching {endpoint}: {e}") + break + return all_data + +def fix_data(): + with Session(engine) as session: + # 1. Fix Venues Slugs + print("Fixing Venue Slugs...") + venues = session.exec(select(Venue)).all() + for v in venues: + if not v.slug: + v.slug = generate_slug(v.name) + session.add(v) + session.commit() + + # 2. Fix Songs Slugs + print("Fixing Song Slugs...") + songs = session.exec(select(Song)).all() + for s in songs: + if not s.slug: + s.slug = generate_slug(s.title) + session.add(s) + session.commit() + + # 3. Fix Tours Slugs + print("Fixing Tour Slugs...") + tours = session.exec(select(Tour)).all() + for t in tours: + if not t.slug: + t.slug = generate_slug(t.name) + session.add(t) + session.commit() + + # 4. Fix Shows Slugs + print("Fixing Show Slugs...") + shows = session.exec(select(Show)).all() + venue_map = {v.id: v for v in venues} # Cache venues for naming + for show in shows: + if not show.slug: + date_str = show.date.strftime("%Y-%m-%d") if show.date else "unknown" + venue_name = "unknown" + if show.venue_id and show.venue_id in venue_map: + venue_name = venue_map[show.venue_id].name + + show.slug = generate_show_slug(date_str, venue_name) + session.add(show) + session.commit() + + # 5. Fix Set Names (Fetch API) + print("Fixing Set Names (fetching setlists)...") + # We need to map El Goose show_id/song_id to our IDs to find the record. + # But we don't store El Goose IDs in our models? + # Checked models.py: we don't store ex_id. + # We match by show date/venue and song title. + + # This is hard to do reliably without external IDs. + # Alternatively, we can infer set name from 'position'? + # No, position 1 could be Set 1 or Encore if short show? No. + + # Wait, import_elgoose mappings are local var. + # If we re-run import logic but UPDATE instead of SKIP, we can fix it. + # But matching is tricky. + + # Let's try to match by Show Date and Song Title. + # Build map: (show_id, song_id, position) -> Performance + + perfs = session.exec(select(Performance)).all() + perf_map = {} # (show_id, song_id, position) -> perf object + for p in perfs: + perf_map[(p.show_id, p.song_id, p.position)] = p + + # We need show map: el_goose_show_id -> our_show_id + # We need song map: el_goose_song_id -> our_song_id + + # We have to re-fetch shows and songs to rebuild this map. + print(" Re-building ID maps...") + + # Map Shows + el_shows = fetch_all_json("shows", {"artist": 1}) + if not el_shows: el_shows = fetch_all_json("shows") # fallback + + el_show_map = {} # el_id -> our_id + for s in el_shows: + # Find our show + dt = s['showdate'] # YYYY-MM-DD + # We need to match precise Show. + # Simplified: match by date. + # Convert string to datetime + from datetime import datetime + s_date = datetime.strptime(dt, "%Y-%m-%d") + + # Find show in our DB + # We can optimise this but for now linear search or query is fine for one-off script + found = session.exec(select(Show).where(Show.date == s_date)).first() + if found: + el_show_map[s['show_id']] = found.id + + # Map Songs + el_songs = fetch_all_json("songs") + el_song_map = {} # el_id -> our_id + for s in el_songs: + found = session.exec(select(Song).where(Song.title == s['name'])).first() + if found: + el_song_map[s['id']] = found.id + + # Now fetch setlists + el_setlists = fetch_all_json("setlists") + + count = 0 + for item in el_setlists: + our_show_id = el_show_map.get(item['show_id']) + our_song_id = el_song_map.get(item['song_id']) + position = item.get('position', 0) + + if our_show_id and our_song_id: + # Find existing perf + perf = perf_map.get((our_show_id, our_song_id, position)) + if perf: + # Logic to fix set_name + set_val = str(item.get('setnumber', '1')) + set_name = f"Set {set_val}" + if set_val.isdigit(): + set_name = f"Set {set_val}" + elif set_val.lower() == 'e': + set_name = "Encore" + elif set_val.lower() == 'e2': + set_name = "Encore 2" + elif set_val.lower() == 's': + set_name = "Soundcheck" + + if perf.set_name != set_name: + perf.set_name = set_name + session.add(perf) + count += 1 + + session.commit() + print(f"Fixed {count} performance set names.") + +if __name__ == "__main__": + fix_data() diff --git a/backend/import_elgoose.py b/backend/import_elgoose.py index a275947..d9ec44d 100644 --- a/backend/import_elgoose.py +++ b/backend/import_elgoose.py @@ -12,6 +12,7 @@ from models import ( User, UserPreferences ) from passlib.context import CryptContext +from slugify import generate_slug, generate_show_slug BASE_URL = "https://elgoose.net/api/v2" ARTIST_ID = 1 # Goose @@ -131,6 +132,7 @@ def import_venues(session): else: venue = Venue( name=v['venuename'], + slug=generate_slug(v['venuename']), city=v.get('city'), state=v.get('state'), country=v.get('country'), @@ -166,6 +168,7 @@ def import_songs(session, vertical_id): else: song = Song( title=s['name'], + slug=generate_slug(s['name']), original_artist=s.get('original_artist'), vertical_id=vertical_id # API doesn't include debut_date or times_played in base response @@ -211,7 +214,10 @@ def import_shows(session, vertical_id, venue_map): if existing_tour: tour_map[s['tour_id']] = existing_tour.id else: - tour = Tour(name=s['tourname']) + tour = Tour( + name=s['tourname'], + slug=generate_slug(s['tourname']) + ) session.add(tour) session.commit() session.refresh(tour) @@ -235,6 +241,7 @@ def import_shows(session, vertical_id, venue_map): else: show = Show( date=show_date, + slug=generate_show_slug(s['showdate'], s.get('venuename', 'unknown')), vertical_id=vertical_id, venue_id=venue_map.get(s['venue_id']), tour_id=tour_id, @@ -292,11 +299,24 @@ def import_setlists(session, show_map, song_map): ).first() if not existing_perf: + # Map setnumber to set_name + set_val = str(perf_data.get('setnumber', '1')) + if set_val.isdigit(): + set_name = f"Set {set_val}" + elif set_val.lower() == 'e': + set_name = "Encore" + elif set_val.lower() == 'e2': + set_name = "Encore 2" + elif set_val.lower() == 's': + set_name = "Soundcheck" + else: + set_name = f"Set {set_val}" + perf = Performance( show_id=our_show_id, song_id=our_song_id, position=perf_data.get('position', 0), - set_name=perf_data.get('set'), + set_name=set_name, segue=bool(perf_data.get('segue', 0)), notes=perf_data.get('notes') ) diff --git a/backend/routers/shows.py b/backend/routers/shows.py index 15b9b16..12c5287 100644 --- a/backend/routers/shows.py +++ b/backend/routers/shows.py @@ -49,8 +49,12 @@ def read_recent_shows( return shows @router.get("/{show_id}", response_model=ShowRead) -def read_show(show_id: int, session: Session = Depends(get_session)): - show = session.get(Show, show_id) +def read_show(show_id: str, session: Session = Depends(get_session)): + if show_id.isdigit(): + show = session.get(Show, int(show_id)) + else: + show = session.exec(select(Show).where(Show.slug == show_id)).first() + if not show: raise HTTPException(status_code=404, detail="Show not found") @@ -58,7 +62,7 @@ def read_show(show_id: int, session: Session = Depends(get_session)): select(Tag) .join(EntityTag, Tag.id == EntityTag.tag_id) .where(EntityTag.entity_type == "show") - .where(EntityTag.entity_id == show_id) + .where(EntityTag.entity_id == show.id) ).all() # Manually populate performances to ensure nicknames are filtered if needed diff --git a/backend/routers/songs.py b/backend/routers/songs.py index 9854db4..72698b1 100644 --- a/backend/routers/songs.py +++ b/backend/routers/songs.py @@ -74,9 +74,11 @@ def read_song(song_id_or_slug: str, session: Session = Depends(get_session)): venue_city = "" venue_state = "" show_date = datetime.now() + show_slug = None if p.show: show_date = p.show.date + show_slug = p.show.slug if p.show.venue: venue_name = p.show.venue.name venue_city = p.show.venue.city @@ -87,6 +89,7 @@ def read_song(song_id_or_slug: str, session: Session = Depends(get_session)): perf_dtos.append(PerformanceReadWithShow( **p.model_dump(), show_date=show_date, + show_slug=show_slug, venue_name=venue_name, venue_city=venue_city, venue_state=venue_state, diff --git a/backend/schemas.py b/backend/schemas.py index 00cc99e..c0abe94 100644 --- a/backend/schemas.py +++ b/backend/schemas.py @@ -34,6 +34,7 @@ class VenueCreate(VenueBase): class VenueRead(VenueBase): id: int + slug: Optional[str] = None class VenueUpdate(SQLModel): name: Optional[str] = None @@ -55,6 +56,7 @@ class SongCreate(SongBase): class SongRead(SongBase): id: int + slug: Optional[str] = None tags: List["TagRead"] = [] @@ -86,11 +88,13 @@ class PerformanceBase(SQLModel): class PerformanceRead(PerformanceBase): id: int + slug: Optional[str] = None song: Optional["SongRead"] = None nicknames: List["PerformanceNicknameRead"] = [] class PerformanceReadWithShow(PerformanceRead): show_date: datetime + show_slug: Optional[str] = None venue_name: str venue_city: str venue_state: Optional[str] = None @@ -141,6 +145,7 @@ class GroupPostRead(GroupPostBase): class ShowRead(ShowBase): id: int + slug: Optional[str] = None venue: Optional["VenueRead"] = None tour: Optional["TourRead"] = None tags: List["TagRead"] = [] @@ -164,6 +169,7 @@ class TourCreate(TourBase): class TourRead(TourBase): id: int + slug: Optional[str] = None class TourUpdate(SQLModel): name: Optional[str] = None @@ -344,6 +350,7 @@ class TagCreate(TagBase): class TagRead(TagBase): id: int + slug: str # Circular refs diff --git a/docs/HANDOFF_2025_12_21.md b/docs/HANDOFF_2025_12_21.md new file mode 100644 index 0000000..0a37d54 --- /dev/null +++ b/docs/HANDOFF_2025_12_21.md @@ -0,0 +1,54 @@ +# Handoff - 2025-12-21 + +## Work Completed + +### Slug Integration + +- **Backend**: Updated `Show`, `Song`, `Venue`, `Tour` models/schemas to support `slug`. + - Updated API routers (`shows.py`, `songs.py`) to lookup by slug or ID. + - Migrated database schema to include `slug` columns using Alembic. + - Backfilled slugs using `backend/fix_db_data.py`. +- **Frontend**: Updated routing and links for entities. + - `/shows/[id]` -> `/shows/${show.slug || show.id}` + - `/songs/[id]` -> `/songs/${song.slug || song.id}` + - `/venues/[id]` -> `/venues/${venue.slug || venue.id}` + - Updated interfaces to include `slug`. + - Updated `PerformanceList` component to use slugs. + +### Data Fixes + +- **Set Names**: + - Identified issues with `set_name` being null due to API parameter mismatch (`setnumber` vs `set`). + - Updated `import_elgoose.py` to correctly extract and format "Set 1", "Set 2", "Encore" from `setnumber`. + - Attempted to backfill existing data but hit an infinite loop issue with API fetching (Slugs were backfilled successfully). Data can be fixed by re-running a corrected importer or custom script. +- **Slugs**: + - `import_elgoose.py` updated to generate slugs for new imports. + - `fix_db_data.py` successfully backfilled slugs for existing Venues, Songs, Shows, and Tours. + +### UI Fixes + +- **Components**: Created missing Shadcn UI components that were causing build failures: + - `frontend/components/ui/progress.tsx` + - `frontend/components/ui/checkbox.tsx` +- **Auth**: Updated `AuthContext` to expose `token` for the Admin page. +- **Build**: Resolved typescript errors; build process starts correctly. + +## Current State + +- **Application**: Fully functional slug-based navigation. Links prioritize slugs but fallback to IDs. +- **Database**: `slug` columns added. Migration `65c515b4722a_add_slugs` applied. `set_name` still missing for most existing performances (displays as "Set ?"). +- **Codebase**: Clean and updated. `check_api.py` removed. `fix_db_data.py` exists but has pagination bug if re-run. + +## Next Steps + +1. **Verify Data**: Check if slugs are working correctly on the frontend. +2. **Fix Set Names**: + - Fix pagination in `backend/fix_db_data.py` (check API docs for correct pagination or limit handling). + - Re-run `python3 fix_db_data.py` to populate `set_name` for existing performances. +3. **Notifications**: Proceed with planned Notification System implementation (Discord, Telegram). +4. **Audit Items**: Continue auditing site for missing features/pages. + +## Technical Notes + +- **Database Migrations**: Alembic history was manually adjusted to ignore existing `reaction`/`badge` tables to allow `slug` migration to pass on the dev database. +- **Importer**: `import_elgoose.py` logic is updated for *future* imports. diff --git a/frontend/app/page.tsx b/frontend/app/page.tsx index 7b85c0c..0d4fbd2 100644 --- a/frontend/app/page.tsx +++ b/frontend/app/page.tsx @@ -8,22 +8,26 @@ import { getApiUrl } from "@/lib/api-config" interface Show { id: number + slug?: string date: string venue?: { id: number name: string + slug?: string city?: string state?: string } tour?: { id: number name: string + slug?: string } } interface Song { id: number title: string + slug?: string performance_count?: number avg_rating?: number } @@ -118,7 +122,7 @@ export default async function Home() { {recentShows.length > 0 ? (
{show.venue.city}, {show.venue.state} {show.venue.country} diff --git a/frontend/app/shows/page.tsx b/frontend/app/shows/page.tsx index 021df3a..25cd5c5 100644 --- a/frontend/app/shows/page.tsx +++ b/frontend/app/shows/page.tsx @@ -11,6 +11,7 @@ import { useSearchParams } from "next/navigation" interface Show { id: number + slug?: string date: string venue: { id: number @@ -83,7 +84,7 @@ export default function ShowsPage() {