From f8b8ad7033633407b887d1d65136bd2b84a51bf2 Mon Sep 17 00:00:00 2001 From: fullsizemalt <106900403+fullsizemalt@users.noreply.github.com> Date: Fri, 19 Dec 2025 22:01:29 -0800 Subject: [PATCH] feat: Initial elmeg-demo with full-stack fandom archive platform --- .gitignore | 9 + .specify/constitution.md | 10 + .specify/plan.md | 48 + .specify/spec.md | 55 + .specify/tasks/00_project_setup.md | 24 + .specify/tasks/01_database_schema.md | 21 + .specify/tasks/02_auth_system.md | 16 + .specify/tasks/03_core_api.md | 15 + .specify/tasks/04_frontend_shell.md | 16 + .specify/tasks/05_social_features.md | 26 + .specify/tasks/06_gamification.md | 24 + .specify/tasks/07_advanced_content.md | 22 + .specify/tasks/08_moderation.md | 22 + .specify/tasks/09_review_enhancements.md | 17 + .specify/tasks/09_review_system.md | 27 + .specify/tasks/10_core_enhancements.md | 27 + .specify/tasks/11_user_preferences.md | 20 + .specify/tasks/12_groups.md | 25 + .../tasks/13_user_profile_enhancements.md | 25 + .specify/tasks/14_global_search.md | 25 + .specify/tasks/15_notifications.md | 25 + .specify/tasks/16_glossary.md | 111 + DEPLOY.md | 48 + LOCAL_DEV.md | 125 + README.md | 194 + VPS_HANDOFF.md | 130 + backend/Dockerfile | 10 + backend/alembic.ini | 147 + backend/alembic/README | 1 + backend/alembic/env.py | 79 + backend/alembic/script.py.mako | 28 + .../versions/1305863562e7_add_groups.py | 76 + .../32ebf231693a_add_user_preferences.py | 42 + .../341b95b6e098_add_gamification_models.py | 58 + .../366067fc1318_add_review_system.py | 48 + .../6659cb1e0ca5_add_review_targets.py | 45 + .../83e6fd46fa2b_add_moderation_system.py | 59 + .../a0b7abe57112_add_core_enhancements.py | 122 + .../a526deda28e0_add_notifications.py | 57 + ...dd_review_created_at_and_report_details.py | 53 + .../bc32a0b7efbb_add_performance_nicknames.py | 52 + .../c26cc8212061_add_social_models.py | 59 + .../f5ca1b7c50b1_initial_migration.py | 113 + backend/auth.py | 53 + backend/database.py | 17 + backend/dependencies.py | 15 + backend/import_elgoose.py | 296 + backend/import_songs_only.py | 199 + backend/main.py | 39 + backend/migrate_honking.py | 159 + backend/models.py | 275 + backend/quick_seed.py | 118 + backend/requirements.txt | 11 + backend/routers/artists.py | 37 + backend/routers/attendance.py | 77 + backend/routers/auth.py | 54 + backend/routers/badges.py | 32 + backend/routers/feed.py | 72 + backend/routers/groups.py | 128 + backend/routers/leaderboards.py | 94 + backend/routers/moderation.py | 83 + backend/routers/nicknames.py | 44 + backend/routers/notifications.py | 88 + backend/routers/performances.py | 93 + backend/routers/preferences.py | 42 + backend/routers/reviews.py | 51 + backend/routers/search.py | 83 + backend/routers/shows.py | 88 + backend/routers/social.py | 99 + backend/routers/songs.py | 59 + backend/routers/tours.py | 37 + backend/routers/users.py | 80 + backend/routers/venues.py | 41 + backend/schemas.py | 336 + backend/seed.py | 52 + backend/seed_activity.py | 399 + backend/seed_demo.py | 300 + backend/seed_output.txt | 72 + backend/services/stats.py | 71 + backend/test_seed.py | 13 + backend/tests/conftest.py | 56 + backend/tests/test_shows.py | 70 + docker-compose.yml | 42 + docs/API.md | 26 + docs/CHANGELOG.md | 67 + docs/DEVELOPER.md | 118 + docs/ROADMAP.md | 72 + docs/USER_GUIDE.md | 59 + frontend/.gitignore | 41 + frontend/Dockerfile | 11 + frontend/README.md | 36 + frontend/__tests__/badge-list.test.tsx | 18 + frontend/app/admin/layout.tsx | 64 + frontend/app/admin/nicknames/page.tsx | 12 + frontend/app/admin/page.tsx | 30 + frontend/app/admin/reports/page.tsx | 12 + frontend/app/archive/page.tsx | 33 + frontend/app/favicon.ico | Bin 0 -> 25931 bytes frontend/app/globals.css | 122 + frontend/app/groups/[id]/page.tsx | 83 + frontend/app/groups/create/page.tsx | 104 + frontend/app/groups/page.tsx | 69 + frontend/app/layout.tsx | 32 + frontend/app/leaderboards/page.tsx | 183 + frontend/app/mod/page.tsx | 154 + frontend/app/page.tsx | 75 + frontend/app/performances/[id]/page.tsx | 139 + frontend/app/profile/page.tsx | 172 + frontend/app/settings/page.tsx | 74 + frontend/app/shows/[id]/page.tsx | 155 + frontend/app/shows/page.tsx | 78 + frontend/app/songs/[id]/page.tsx | 116 + frontend/app/songs/page.tsx | 65 + frontend/app/tours/[id]/page.tsx | 123 + frontend/app/tours/page.tsx | 66 + frontend/app/venues/[id]/page.tsx | 128 + frontend/app/venues/page.tsx | 65 + frontend/components.json | 22 + frontend/components/admin/nickname-queue.tsx | 84 + frontend/components/admin/report-queue.tsx | 91 + frontend/components/feed/activity-feed.tsx | 73 + frontend/components/groups/group-feed.tsx | 80 + .../components/groups/join-group-button.tsx | 51 + frontend/components/layout/navbar.tsx | 82 + .../components/layout/notification-bell.tsx | 140 + frontend/components/profile/badge-list.tsx | 43 + .../profile/user-attendance-list.tsx | 54 + .../components/profile/user-groups-list.tsx | 54 + .../components/profile/user-reviews-list.tsx | 37 + .../components/reviews/entity-reviews.tsx | 106 + frontend/components/reviews/review-card.tsx | 39 + frontend/components/reviews/review-form.tsx | 69 + frontend/components/shows/show-attendance.tsx | 87 + .../shows/suggest-nickname-dialog.tsx | 117 + .../components/social/comment-section.tsx | 117 + frontend/components/social/entity-rating.tsx | 79 + frontend/components/social/social-wrapper.tsx | 25 + frontend/components/ui/attendance-button.tsx | 42 + frontend/components/ui/button.tsx | 59 + frontend/components/ui/card.tsx | 54 + frontend/components/ui/command.tsx | 184 + frontend/components/ui/comment-section.tsx | 62 + frontend/components/ui/dialog.tsx | 143 + frontend/components/ui/dropdown-menu.tsx | 200 + frontend/components/ui/input.tsx | 25 + frontend/components/ui/label.tsx | 24 + frontend/components/ui/popover.tsx | 48 + frontend/components/ui/search-dialog.tsx | 161 + frontend/components/ui/star-rating.tsx | 44 + frontend/components/ui/switch.tsx | 25 + frontend/components/ui/tabs.tsx | 66 + frontend/components/ui/textarea.tsx | 24 + frontend/components/ui/wiki-text.tsx | 61 + frontend/contexts/preferences-context.tsx | 90 + frontend/eslint.config.mjs | 18 + frontend/jest.config.js | 18 + frontend/jest.setup.js | 1 + frontend/lib/api-config.ts | 8 + frontend/lib/utils.ts | 6 + frontend/next.config.ts | 7 + frontend/package-lock.json | 11915 ++++++++++++++++ frontend/package.json | 44 + frontend/postcss.config.mjs | 7 + frontend/public/file.svg | 1 + frontend/public/globe.svg | 1 + frontend/public/next.svg | 1 + frontend/public/vercel.svg | 1 + frontend/public/window.svg | 1 + frontend/tsconfig.json | 34 + 169 files changed, 23558 insertions(+) create mode 100644 .gitignore create mode 100644 .specify/constitution.md create mode 100644 .specify/plan.md create mode 100644 .specify/spec.md create mode 100644 .specify/tasks/00_project_setup.md create mode 100644 .specify/tasks/01_database_schema.md create mode 100644 .specify/tasks/02_auth_system.md create mode 100644 .specify/tasks/03_core_api.md create mode 100644 .specify/tasks/04_frontend_shell.md create mode 100644 .specify/tasks/05_social_features.md create mode 100644 .specify/tasks/06_gamification.md create mode 100644 .specify/tasks/07_advanced_content.md create mode 100644 .specify/tasks/08_moderation.md create mode 100644 .specify/tasks/09_review_enhancements.md create mode 100644 .specify/tasks/09_review_system.md create mode 100644 .specify/tasks/10_core_enhancements.md create mode 100644 .specify/tasks/11_user_preferences.md create mode 100644 .specify/tasks/12_groups.md create mode 100644 .specify/tasks/13_user_profile_enhancements.md create mode 100644 .specify/tasks/14_global_search.md create mode 100644 .specify/tasks/15_notifications.md create mode 100644 .specify/tasks/16_glossary.md create mode 100644 DEPLOY.md create mode 100644 LOCAL_DEV.md create mode 100644 README.md create mode 100644 VPS_HANDOFF.md create mode 100644 backend/Dockerfile create mode 100644 backend/alembic.ini create mode 100644 backend/alembic/README create mode 100644 backend/alembic/env.py create mode 100644 backend/alembic/script.py.mako create mode 100644 backend/alembic/versions/1305863562e7_add_groups.py create mode 100644 backend/alembic/versions/32ebf231693a_add_user_preferences.py create mode 100644 backend/alembic/versions/341b95b6e098_add_gamification_models.py create mode 100644 backend/alembic/versions/366067fc1318_add_review_system.py create mode 100644 backend/alembic/versions/6659cb1e0ca5_add_review_targets.py create mode 100644 backend/alembic/versions/83e6fd46fa2b_add_moderation_system.py create mode 100644 backend/alembic/versions/a0b7abe57112_add_core_enhancements.py create mode 100644 backend/alembic/versions/a526deda28e0_add_notifications.py create mode 100644 backend/alembic/versions/b16ef2228130_add_review_created_at_and_report_details.py create mode 100644 backend/alembic/versions/bc32a0b7efbb_add_performance_nicknames.py create mode 100644 backend/alembic/versions/c26cc8212061_add_social_models.py create mode 100644 backend/alembic/versions/f5ca1b7c50b1_initial_migration.py create mode 100644 backend/auth.py create mode 100644 backend/database.py create mode 100644 backend/dependencies.py create mode 100644 backend/import_elgoose.py create mode 100644 backend/import_songs_only.py create mode 100644 backend/main.py create mode 100644 backend/migrate_honking.py create mode 100644 backend/models.py create mode 100644 backend/quick_seed.py create mode 100644 backend/requirements.txt create mode 100644 backend/routers/artists.py create mode 100644 backend/routers/attendance.py create mode 100644 backend/routers/auth.py create mode 100644 backend/routers/badges.py create mode 100644 backend/routers/feed.py create mode 100644 backend/routers/groups.py create mode 100644 backend/routers/leaderboards.py create mode 100644 backend/routers/moderation.py create mode 100644 backend/routers/nicknames.py create mode 100644 backend/routers/notifications.py create mode 100644 backend/routers/performances.py create mode 100644 backend/routers/preferences.py create mode 100644 backend/routers/reviews.py create mode 100644 backend/routers/search.py create mode 100644 backend/routers/shows.py create mode 100644 backend/routers/social.py create mode 100644 backend/routers/songs.py create mode 100644 backend/routers/tours.py create mode 100644 backend/routers/users.py create mode 100644 backend/routers/venues.py create mode 100644 backend/schemas.py create mode 100644 backend/seed.py create mode 100644 backend/seed_activity.py create mode 100644 backend/seed_demo.py create mode 100644 backend/seed_output.txt create mode 100644 backend/services/stats.py create mode 100644 backend/test_seed.py create mode 100644 backend/tests/conftest.py create mode 100644 backend/tests/test_shows.py create mode 100644 docker-compose.yml create mode 100644 docs/API.md create mode 100644 docs/CHANGELOG.md create mode 100644 docs/DEVELOPER.md create mode 100644 docs/ROADMAP.md create mode 100644 docs/USER_GUIDE.md create mode 100644 frontend/.gitignore create mode 100644 frontend/Dockerfile create mode 100644 frontend/README.md create mode 100644 frontend/__tests__/badge-list.test.tsx create mode 100644 frontend/app/admin/layout.tsx create mode 100644 frontend/app/admin/nicknames/page.tsx create mode 100644 frontend/app/admin/page.tsx create mode 100644 frontend/app/admin/reports/page.tsx create mode 100644 frontend/app/archive/page.tsx create mode 100644 frontend/app/favicon.ico create mode 100644 frontend/app/globals.css create mode 100644 frontend/app/groups/[id]/page.tsx create mode 100644 frontend/app/groups/create/page.tsx create mode 100644 frontend/app/groups/page.tsx create mode 100644 frontend/app/layout.tsx create mode 100644 frontend/app/leaderboards/page.tsx create mode 100644 frontend/app/mod/page.tsx create mode 100644 frontend/app/page.tsx create mode 100644 frontend/app/performances/[id]/page.tsx create mode 100644 frontend/app/profile/page.tsx create mode 100644 frontend/app/settings/page.tsx create mode 100644 frontend/app/shows/[id]/page.tsx create mode 100644 frontend/app/shows/page.tsx create mode 100644 frontend/app/songs/[id]/page.tsx create mode 100644 frontend/app/songs/page.tsx create mode 100644 frontend/app/tours/[id]/page.tsx create mode 100644 frontend/app/tours/page.tsx create mode 100644 frontend/app/venues/[id]/page.tsx create mode 100644 frontend/app/venues/page.tsx create mode 100644 frontend/components.json create mode 100644 frontend/components/admin/nickname-queue.tsx create mode 100644 frontend/components/admin/report-queue.tsx create mode 100644 frontend/components/feed/activity-feed.tsx create mode 100644 frontend/components/groups/group-feed.tsx create mode 100644 frontend/components/groups/join-group-button.tsx create mode 100644 frontend/components/layout/navbar.tsx create mode 100644 frontend/components/layout/notification-bell.tsx create mode 100644 frontend/components/profile/badge-list.tsx create mode 100644 frontend/components/profile/user-attendance-list.tsx create mode 100644 frontend/components/profile/user-groups-list.tsx create mode 100644 frontend/components/profile/user-reviews-list.tsx create mode 100644 frontend/components/reviews/entity-reviews.tsx create mode 100644 frontend/components/reviews/review-card.tsx create mode 100644 frontend/components/reviews/review-form.tsx create mode 100644 frontend/components/shows/show-attendance.tsx create mode 100644 frontend/components/shows/suggest-nickname-dialog.tsx create mode 100644 frontend/components/social/comment-section.tsx create mode 100644 frontend/components/social/entity-rating.tsx create mode 100644 frontend/components/social/social-wrapper.tsx create mode 100644 frontend/components/ui/attendance-button.tsx create mode 100644 frontend/components/ui/button.tsx create mode 100644 frontend/components/ui/card.tsx create mode 100644 frontend/components/ui/command.tsx create mode 100644 frontend/components/ui/comment-section.tsx create mode 100644 frontend/components/ui/dialog.tsx create mode 100644 frontend/components/ui/dropdown-menu.tsx create mode 100644 frontend/components/ui/input.tsx create mode 100644 frontend/components/ui/label.tsx create mode 100644 frontend/components/ui/popover.tsx create mode 100644 frontend/components/ui/search-dialog.tsx create mode 100644 frontend/components/ui/star-rating.tsx create mode 100644 frontend/components/ui/switch.tsx create mode 100644 frontend/components/ui/tabs.tsx create mode 100644 frontend/components/ui/textarea.tsx create mode 100644 frontend/components/ui/wiki-text.tsx create mode 100644 frontend/contexts/preferences-context.tsx create mode 100644 frontend/eslint.config.mjs create mode 100644 frontend/jest.config.js create mode 100644 frontend/jest.setup.js create mode 100644 frontend/lib/api-config.ts create mode 100644 frontend/lib/utils.ts create mode 100644 frontend/next.config.ts create mode 100644 frontend/package-lock.json create mode 100644 frontend/package.json create mode 100644 frontend/postcss.config.mjs create mode 100644 frontend/public/file.svg create mode 100644 frontend/public/globe.svg create mode 100644 frontend/public/next.svg create mode 100644 frontend/public/vercel.svg create mode 100644 frontend/public/window.svg create mode 100644 frontend/tsconfig.json diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..447d941 --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +venv/ +__pycache__/ +*.pyc +*.db +node_modules/ +.next/ +.env +*.log +.DS_Store diff --git a/.specify/constitution.md b/.specify/constitution.md new file mode 100644 index 0000000..d1a1dff --- /dev/null +++ b/.specify/constitution.md @@ -0,0 +1,10 @@ +# Project Constitution + +## Principles + +1. **Code Quality & Integrity**: We commit to peer review, test-driven development, and maintaining high code standards. +2. **Privacy & Security**: User privacy is paramount. We support multi-identity, end-to-end encryption, and granular control over shared data. +3. **Openness & Fairness**: Moderation must be transparent and fair. We value community trust. +4. **Extensibility**: The architecture must be modular to support future verticals (music, TV, etc.) without technical debt. +5. **Universal Freemium**: A clear, balanced freemium model applies to all verticals, ensuring value for both free and paid users. +6. **Accessibility & Usability**: The platform must be accessible to all audiences (WCAG 2.2) and usable across devices (mobile-first). diff --git a/.specify/plan.md b/.specify/plan.md new file mode 100644 index 0000000..01c9863 --- /dev/null +++ b/.specify/plan.md @@ -0,0 +1,48 @@ +# Technical Plan + +## Architecture +- **MVP**: Scalable, modular cloud-native architecture (API-first, microservices recommended). +- **Backend**: Python/Node.js (TBD based on specific needs, likely Python for data/graph heavy lifting). +- **Frontend**: Modern web framework (React/Next.js). + +## Data +- **Storage**: Flexible graph-based or relational database with advanced indexing (e.g., Neo4j + PostgreSQL). + +## Authentication & Security +- **Auth**: OAuth2, multi-factor auth (MFA), end-to-end encryption for private data. +- **Privacy**: Robust user privacy controls. + +## New Feature Implementation +- **Social**: + - **Reviews**: `Review` table with `blurb` (short text), `content` (long text), `score` (int), and FKs. + - **Comments**: Polymorphic association (Comment -> EntityID, EntityType) or separate tables per entity. + - **Ratings**: `UserRating` table linking User -> Performance/Show. +- **Gamification**: + - **Stats**: Async jobs (Celery/ARQ) to recalculate stats on data changes. + - **Badges**: Rule engine to award badges based on user activity events. +- **Advanced Content**: + - **Nicknames**: `PerformanceNickname` table with `status` (suggested, approved) for moderation. +- **Moderation**: + - **RBAC**: Role-Based Access Control in API dependencies. + +## Moderation System +- **Layered Model**: + - Automated AI filtering for high-volume content. + - User reporting mechanisms. + - Human review tools with clear guidelines. + +## Core Entities (Data Architecture) +- **Core**: `Vertical` -> `Show` -> `Performance` -> `Song`. +- **Common Fields**: All entities include `notes` (Text) for flexible metadata (guests, trivia). +- **Grouping**: `Tour` (one-to-many with Show). +- **User Data**: `UserAttendance` (User <-> Show), `UserPreferences` (JSON or table for settings like "Wiki Mode"). +- **Metadata**: `Tag` and `EntityTag` (Polymorphic tagging). +- **Entities**: `Venue`, `Artist` (Musicians/Guests), `Character` (for non-music verticals). + +## Integrations +- **Launch**: Setlist.fm APIs, TV meta providers. +- **Future**: Event hooks for external integrations. + +## Accessibility +- **Standards**: WCAG 2.2 compliance. +- **Design**: Mobile-first responsive design. diff --git a/.specify/spec.md b/.specify/spec.md new file mode 100644 index 0000000..9a4c19f --- /dev/null +++ b/.specify/spec.md @@ -0,0 +1,55 @@ +# Functional Specification + +## Goal +Build a platform for fandom communities (jam bands, TV shows, additional verticals) that enables archival, discovery, cross-referencing, and rich social interaction around content (songs, shows, episodes). + +## User Roles +- **Visitor**: Can explore public content, search, try basic mind map tools (limited), and read reviews/stats. +- **Registered User**: One account, multi-identity across verticals; can participate, comment, tag, vote, make private groups, earn achievements. +- **Moderator/Admin**: Oversee content, resolve disputes, manage users/groups. +- **Band/Brand/Show Owner**: Create and administer white-labeled or private community spaces. + +## Core Features +- **Massive Archive**: Shows/episodes, performances, venues, metadata (setlists, dates, locations, characters, etc.). +- **Tours**: Grouping shows into tours (e.g., "Fall 2023", "Summer Tour"). +- **User Attendance**: "I was there" tracking for personal stats and badges. +- **Entity Notes**: Rich text notes on Shows, Venues, Songs, and Tours to handle details like "Guest Artist", "Cover Song", "Original Artist", or "Historical Context". +- **Artist Tracking**: First-class support for Band Members and Guest Musicians. Tag them in shows/performances to track their stats and history wiki-style. +- **Advanced Tagging & Search**: Wiki-style linking (Obsidian-style backlinks), cross-vertical support. +- **Tags**: Granular tagging for shows and songs (e.g., "Soundcheck", "Tease", "Jam"). +- **Mind Maps**: Visual tools for connecting entities (episode, show, tour, performance, character, venue). +- **Discussion & Social**: Forums, commenting, voting, peer ranking, reviewing. +- **"Just Saw" Mode**: Live post-event discussion for instant impressions and stats. +- **Community Stats**: Trending content, charts, "On this Day," automated highlights. + +## New Features (from Honkingversion Comparison) +- **Social Interaction**: + - **Reviews**: Formal reviews with a 1-10 rating, a "blurb" (one-liner/pullquote), and full text. Distinct from comments. + - **Comments**: Threaded discussions on Shows, Songs, and Venues. + - **Ratings/Voting**: Quick 1-10 ratings for Performances and Shows. + - **Activity Feed**: Global and user-specific activity streams. +- **Gamification**: + - **Badges**: Achievements for attendance, ratings, and contributions. + - **Stats Engine**: "Times Played", "Gap Charts", "Personal Stats". +- **Advanced Content**: + - **Performance Nicknames ("City Songs")**: Community-suggested aliases for specific performances (e.g., "Tahoe Tweezer"). + - **Sequences**: Explicit tracking of song transitions (Segues). +- **Enhanced Moderation**: + - **Role Hierarchy**: Power User, Moderator, Admin. + - **Moderation Queue**: Approval workflow for Nicknames and content reports. + +## Social & Privacy +- **Visibility Control**: Pseudonymous or linked profiles per vertical. +- **Interaction**: Tagging, mentions, friend/follow, group/private DM. +- **Interaction**: Tagging, mentions, friend/follow, group/private DM. +- **Wiki Mode**: Granular control to disable social overlays (comments, ratings) for a pure archive experience. +- **Groups**: White-label and private group support. + +## Monetization (Freemium) +- **Free**: Access to archive, basic stats, read-only community. +- **Premium**: + - **Wiki Mode**: Distraction-free browsing (no social clutter). + - **Advanced Stats**: Deep analytics, gap charts, personal attendance stats. + - **Offline Mode**: Download setlists/stats for offline viewing. + - **Badges**: Exclusive profile badges.s. +- **Add-ons**: Purchasable extras for groups and power users. diff --git a/.specify/tasks/00_project_setup.md b/.specify/tasks/00_project_setup.md new file mode 100644 index 0000000..6a8418c --- /dev/null +++ b/.specify/tasks/00_project_setup.md @@ -0,0 +1,24 @@ +# Task: Project Setup + +## Objective +Initialize the repository with the necessary boilerplate for a monorepo structure (or separate directories) containing the Backend (Python/FastAPI) and Frontend (Next.js). + +## Steps +- [ ] Initialize git repository (if not already done). +- [ ] Create `backend/` directory. + - [ ] Initialize Python environment (poetry or venv). + - [ ] Install FastAPI, Uvicorn. + - [ ] Create basic `main.py` "Hello World". +- [ ] Create `frontend/` directory. + - [ ] Initialize Next.js app (`npx create-next-app`). + - [ ] Install basic dependencies (Tailwind, Lucide, etc.). +- [ ] Create `docker-compose.yml` for orchestration. + - [ ] Define backend service. + - [ ] Define frontend service. + - [ ] Define database service (PostgreSQL). +- [ ] Verify local development environment runs. + +## Acceptance Criteria +- `docker-compose up` starts all services. +- Frontend is accessible at localhost:3000. +- Backend API is accessible at localhost:8000. diff --git a/.specify/tasks/01_database_schema.md b/.specify/tasks/01_database_schema.md new file mode 100644 index 0000000..cca35a5 --- /dev/null +++ b/.specify/tasks/01_database_schema.md @@ -0,0 +1,21 @@ +# Task: Database Schema Design + +## Objective +Design and implement the initial database schema to support the core "Massive Archive" feature. + +## Steps +- [ ] Choose ORM (SQLAlchemy or Prisma). +- [ ] Define models for: + - `User` (with multi-identity support placeholders). + - `Band` / `Vertical`. + - `Show` / `Event`. + - `Venue`. + - `Song` / `Content`. + - `Performance` (linking Song to Show). + - `Setlist`. +- [ ] Create migration scripts. +- [ ] Seed initial data (e.g., one band, a few shows) for testing. + +## Acceptance Criteria +- Database schema is applied to the PostgreSQL instance. +- Can query and retrieve seeded data via direct DB connection. diff --git a/.specify/tasks/02_auth_system.md b/.specify/tasks/02_auth_system.md new file mode 100644 index 0000000..cea3537 --- /dev/null +++ b/.specify/tasks/02_auth_system.md @@ -0,0 +1,16 @@ +# Task: Authentication System + +## Objective +Implement the user authentication system supporting the "Registered User" role and privacy controls. + +## Steps +- [ ] Implement OAuth2 flow (Google/GitHub providers for MVP). +- [ ] Create User registration/login API endpoints. +- [ ] Implement JWT token issuance and validation. +- [ ] Create "Identity" model to allow one User to have multiple profiles (pseudonymity). +- [ ] Protect API routes with auth dependencies. + +## Acceptance Criteria +- User can sign up and log in. +- Protected routes reject unauthenticated requests. +- User can create a second "Identity" profile. diff --git a/.specify/tasks/03_core_api.md b/.specify/tasks/03_core_api.md new file mode 100644 index 0000000..5edb87e --- /dev/null +++ b/.specify/tasks/03_core_api.md @@ -0,0 +1,15 @@ +# Task: Core API Development + +## Objective +Build the read/write API endpoints for the core archive content. + +## Steps +- [ ] Create CRUD endpoints for `Shows`. +- [ ] Create CRUD endpoints for `Venues`. +- [ ] Create CRUD endpoints for `Songs`. +- [ ] Implement search functionality (basic text search first). +- [ ] Implement "Tagging" system (linking entities). + +## Acceptance Criteria +- Can create, read, update, and delete core entities via API. +- Search returns relevant results. diff --git a/.specify/tasks/04_frontend_shell.md b/.specify/tasks/04_frontend_shell.md new file mode 100644 index 0000000..04611ab --- /dev/null +++ b/.specify/tasks/04_frontend_shell.md @@ -0,0 +1,16 @@ +# Task: Frontend Shell & Navigation + +## Objective +Create the basic UI structure, navigation, and layout for the application. + +## Steps +- [ ] Setup Global Layout (Navbar, Footer, Sidebar). +- [ ] Create "Home" page. +- [ ] Create "Archive" browse page (list view of Shows/Bands). +- [ ] Create "Detail" pages (Show detail, Song detail). +- [ ] Implement client-side routing. +- [ ] Integrate Shadcn/UI or similar component library for consistent design. + +## Acceptance Criteria +- User can navigate between Home, Archive, and Detail pages. +- UI is responsive (mobile/desktop). diff --git a/.specify/tasks/05_social_features.md b/.specify/tasks/05_social_features.md new file mode 100644 index 0000000..bb88961 --- /dev/null +++ b/.specify/tasks/05_social_features.md @@ -0,0 +1,26 @@ +# Task: Social Features (Comments & Ratings) + +## Objective + +Implement social interaction layers allowing users to comment on and rate content. + +## Steps + +- [x] **Database Schema**: + - [x] Create `Comment` model (user_id, content, timestamp, foreign keys to Show/Song/Venue). + - [x] Create `Rating` model (user_id, score, foreign keys). +- [x] **API Endpoints**: + - [x] POST /comments/ (Create comment) + - [x] GET /comments/{entity_type}/{entity_id} (List comments) + - [x] POST /ratings/ (Rate an item) + - [x] GET /ratings/average/{entity_type}/{entity_id} (Get average score) +- [x] **Frontend**: + - [x] Create `CommentSection` component. + - [x] Create `StarRating` component. + - [x] Integrate into Show Detail page. + +## Acceptance Criteria + +- Users can leave comments on a Show. +- Users can rate a Show. +- Average rating is displayed. diff --git a/.specify/tasks/06_gamification.md b/.specify/tasks/06_gamification.md new file mode 100644 index 0000000..ccb106c --- /dev/null +++ b/.specify/tasks/06_gamification.md @@ -0,0 +1,24 @@ +# Task: Gamification (Stats & Badges) + +## Objective + +Implement a stats engine and badge system to reward user engagement. + +## Steps + +- [x] **Stats Engine**: + - [x] Create service to calculate "Times Played" for songs. + - [x] Create service to calculate "Gap" (shows since last played). + - [x] Expose stats via `Song` and `Show` API responses. +- [x] **Badges**: + - [x] Create `Badge` model (name, icon, description). + - [x] Create `UserBadge` model (user_id, badge_id, awarded_at). + - [x] Implement logic to award badges (e.g., "First Review", "10 Shows Attended"). +- [x] **Frontend**: + - [x] Display Song Stats on Song Detail page. + - [x] Display User Badges on Profile page. + +## Acceptance Criteria + +- Song page shows "Times Played" and "Gap". +- User profile displays earned badges. diff --git a/.specify/tasks/07_advanced_content.md b/.specify/tasks/07_advanced_content.md new file mode 100644 index 0000000..39603f0 --- /dev/null +++ b/.specify/tasks/07_advanced_content.md @@ -0,0 +1,22 @@ +# Task: Advanced Content (Nicknames & Sequences) + +## Objective + +Implement "City Songs" (Performance Nicknames) and better sequence tracking. + +## Steps + +- [x] **Performance Nicknames**: + - [x] Create `PerformanceNickname` model (performance_id, nickname, status, suggested_by). + - [x] Create API for suggesting nicknames. + - [x] Create API for approving/rejecting nicknames (Moderator only). +- [x] **Sequences**: + - [x] Update `Performance` model to better handle transitions (>, ->). +- [x] **Frontend**: + - [x] Display approved nicknames on Show Detail page (e.g., "Tahoe Tweezer"). + - [x] Add UI to suggest a nickname. + +## Acceptance Criteria + +- Users can suggest a nickname. +- Approved nicknames appear on the setlist. diff --git a/.specify/tasks/08_moderation.md b/.specify/tasks/08_moderation.md new file mode 100644 index 0000000..034e869 --- /dev/null +++ b/.specify/tasks/08_moderation.md @@ -0,0 +1,22 @@ +# Task: Moderation System + +## Objective + +Implement role-based access control and a moderation queue. + +## Steps + +- [x] **Roles**: + - [x] Update `User` model with `role` field (User, Moderator, Admin). + - [x] Create `RoleChecker` dependency for API routes. +- [x] **Moderation Queue**: + - [x] Create API endpoint to list pending items (Nicknames, Reports). + - [x] Create API endpoint to approve/reject items. +- [x] **Frontend**: + - [x] Create "Moderator Dashboard" page. + - [x] Display pending queue with Approve/Reject actions. + +## Acceptance Criteria + +- Only Moderators can access the Dashboard. +- Moderators can approve/reject pending nicknames. diff --git a/.specify/tasks/09_review_enhancements.md b/.specify/tasks/09_review_enhancements.md new file mode 100644 index 0000000..290f396 --- /dev/null +++ b/.specify/tasks/09_review_enhancements.md @@ -0,0 +1,17 @@ +# Task: Review Enhancements (Blurb Field) + +## Objective +Add a "blurb" (one-liner/pullquote) field to comments/reviews to allow for better display in compact UI lists. + +## Steps +- [ ] **Backend**: + - [ ] Update `Comment` model in `models.py` to include `blurb: Optional[str]`. + - [ ] Update `Comment` schemas in `schemas.py`. + - [ ] Generate and apply migration. +- [ ] **Frontend**: + - [ ] Update `CommentSection` to allow inputting a blurb. + - [ ] Update `CommentSection` to display the blurb (e.g., as a bold lead-in). + +## Acceptance Criteria +- User can add a short blurb when creating a comment. +- Blurb is displayed prominently in the comment list. diff --git a/.specify/tasks/09_review_system.md b/.specify/tasks/09_review_system.md new file mode 100644 index 0000000..e8241ff --- /dev/null +++ b/.specify/tasks/09_review_system.md @@ -0,0 +1,27 @@ +# Task: Review System (Dedicated Model) + +## Objective + +Implement a dedicated **Review** system, distinct from Comments. Reviews allow users to share their opinion with a rating, a "one-liner" blurb, and full text. + +## Steps + +- [x] **Backend**: + - [x] Create `Review` model in `models.py`: + - `blurb`: str (The one-liner/pullquote). + - `content`: str (Full review text). + - `score`: int (1-10 rating). + - Foreign keys to User, Show, Venue, Song. + - [x] Create `Review` schemas in `schemas.py`. + - [x] Create `routers/reviews.py` for CRUD operations. + - [x] Generate and apply migrations. +- [x] **Frontend**: + - [x] Create `ReviewCard` component (displaying Blurb prominently). + - [x] Create `ReviewForm` component (inputs for Blurb, Content, Rating). + - [x] Integrate into Show Detail page (separate tab or section from Comments). + +## Acceptance Criteria + +- User can submit a Review with a blurb. +- Reviews are displayed with the blurb highlighted. +- Comments remain separate for discussion. diff --git a/.specify/tasks/10_core_enhancements.md b/.specify/tasks/10_core_enhancements.md new file mode 100644 index 0000000..fc2f9b1 --- /dev/null +++ b/.specify/tasks/10_core_enhancements.md @@ -0,0 +1,27 @@ +# Task: Core Enhancements (Tours, Attendance, Tags) + +## Objective + +Implement missing core entities that are essential for a complete fandom archive. + +## Steps + +- [x] **Backend**: + - [x] Create `Tour` model (name, start_date, end_date, notes). + - [x] Create `Artist` model (name, instrument, notes) and `ShowArtist`/`PerformanceArtist` link tables. + - [x] Update `Show`, `Venue`, `Song` models to include `notes: Optional[str]`. + - [x] Update `Show` model to include `tour_id`. + - [x] Create `Attendance` model (user_id, show_id, notes). + - [x] Create `Tag` model (name, slug) and `EntityTag` link table. + - [x] Update Schemas and API Routers. + - [x] Generate and apply migrations. +- [x] **Frontend**: + - [x] Add "I was there" button to Show Detail page. + - [x] Display Tour info on Show Detail page. + - [x] Display Tags on Show/Song pages. + +## Acceptance Criteria + +- Users can mark attendance. +- Shows belong to a Tour. +- Content can be tagged. diff --git a/.specify/tasks/11_user_preferences.md b/.specify/tasks/11_user_preferences.md new file mode 100644 index 0000000..991459a --- /dev/null +++ b/.specify/tasks/11_user_preferences.md @@ -0,0 +1,20 @@ +# Task: User Preferences (Wiki Mode) + +## Objective + +Implement a preference system to allow users to toggle "Wiki Mode" (disable social features). + +## Steps + +- [x] **Backend**: + - [x] Create `UserPreferences` model (user_id, wiki_mode: bool, show_ratings: bool, show_comments: bool). + - [x] Create API endpoints to Get/Update preferences. +- [x] **Frontend**: + - [x] Create `Settings` page. + - [x] Create `SocialToggle` component. + - [x] Update `ShowDetail` and other pages to conditionally render social components based on preferences. + +## Acceptance Criteria + +- User can toggle "Wiki Mode" in settings. +- When enabled, Comments and Ratings are hidden from the UI. diff --git a/.specify/tasks/12_groups.md b/.specify/tasks/12_groups.md new file mode 100644 index 0000000..8b19a66 --- /dev/null +++ b/.specify/tasks/12_groups.md @@ -0,0 +1,25 @@ +# Task: Groups / Communities + +## Objective + +Implement a system for users to create and join groups (e.g., "NYC Phans", "Rail Riders"). Groups can have their own discussions and potentially private content. + +## Steps + +- [x] **Backend**: + - [x] Create `Group` model (name, description, privacy_level, created_by). + - [x] Create `GroupMember` model (group_id, user_id, role, joined_at). + - [x] Create `GroupPost` model (group_id, user_id, content, created_at). + - [x] Create API Routers for Groups, Members, and Posts. + - [x] Generate and apply migrations. +- [x] **Frontend**: + - [x] Create `GroupsListPage` (browse/search groups). + - [x] Create `GroupDetailPage` (feed, members, join button). + - [x] Create `CreateGroupDialog`. + - [x] Create `GroupPostForm`. + +## Acceptance Criteria + +- Users can create a group. +- Users can join a public group. +- Group members can post to the group feed. diff --git a/.specify/tasks/13_user_profile_enhancements.md b/.specify/tasks/13_user_profile_enhancements.md new file mode 100644 index 0000000..01886db --- /dev/null +++ b/.specify/tasks/13_user_profile_enhancements.md @@ -0,0 +1,25 @@ +# Task: User Profile Enhancements + +## Objective + +Enhance the User Profile page to display a comprehensive overview of the user's activity, including attendance, reviews, ratings, and group memberships. + +## Steps + +- [x] **Backend**: + - [x] Create `routers/users.py` to handle user-specific data fetching (or extend `auth.py`). + - [x] Add endpoint `GET /users/{user_id}/attendance` (or `GET /attendance/me` which exists, but maybe we need public profiles too?). + - [x] Add endpoint `GET /users/{user_id}/reviews`. + - [x] Add endpoint `GET /users/{user_id}/groups`. + - [x] Add endpoint `GET /users/{user_id}/stats` (counts for shows, reviews, etc.). +- [x] **Frontend**: + - [x] Update `ProfilePage` to use Tabs (Overview, Attendance, Reviews, Groups). + - [x] Create `UserAttendanceList` component. + - [x] Create `UserReviewsList` component. + - [x] Create `UserGroupsList` component. + +## Acceptance Criteria + +- User can see their attended shows. +- User can see their written reviews. +- User can see groups they have joined. diff --git a/.specify/tasks/14_global_search.md b/.specify/tasks/14_global_search.md new file mode 100644 index 0000000..9181e35 --- /dev/null +++ b/.specify/tasks/14_global_search.md @@ -0,0 +1,25 @@ +# Task: Global Search + +## Objective + +Implement a global search functionality allowing users to find Shows, Songs, Venues, Tours, and Users from a single interface. + +## Steps + +- [x] **Backend**: + - [x] Create `routers/search.py`. + - [x] Implement `GET /search` endpoint accepting a query string `q`. + - [x] Perform ILIKE/Fuzzy searches across `Show`, `Song`, `Venue`, `Tour`, `User`, `Group` tables. + - [x] Return categorized results. +- [x] **Frontend**: + - [x] Install `cmdk` (Command K) library. + - [x] Create `SearchDialog` component. + - [x] Integrate `SearchDialog` into the main `Layout` or `Navbar`. + - [x] Implement keyboard shortcut (`Cmd+K` / `Ctrl+K`) to open search. + +## Acceptance Criteria + +- User can open search with Cmd+K or by clicking a search icon. +- Searching for "Tweezer" returns Songs named Tweezer. +- Searching for "MSG" returns Venues like Madison Square Garden. +- Searching for "1997" returns Shows or Years. diff --git a/.specify/tasks/15_notifications.md b/.specify/tasks/15_notifications.md new file mode 100644 index 0000000..368aca8 --- /dev/null +++ b/.specify/tasks/15_notifications.md @@ -0,0 +1,25 @@ +# Task: Notifications System + +## Objective + +Implement a notification system to alert users of relevant interactions (replies, mentions, group activity). + +## Steps + +- [x] **Backend**: + - [x] Create `Notification` model (user_id, type, title, message, link, is_read, created_at). + - [x] Create `routers/notifications.py`. + - [x] Implement `GET /notifications` (list my notifications). + - [x] Implement `POST /notifications/{id}/read` (mark as read). + - [x] Implement logic to trigger notifications (e.g., when a user receives a reply). +- [x] **Frontend**: + - [x] Create `NotificationBell` component in the Navbar. + - [x] Show unread count badge. + - [x] Create `NotificationsPopover` or `NotificationsPage` to view list. + - [x] Handle "Mark as Read" interaction. + +## Acceptance Criteria + +- User receives a notification when someone replies to their comment. +- User sees an unread count in the navbar. +- Clicking a notification navigates to the relevant content. diff --git a/.specify/tasks/16_glossary.md b/.specify/tasks/16_glossary.md new file mode 100644 index 0000000..8ae9490 --- /dev/null +++ b/.specify/tasks/16_glossary.md @@ -0,0 +1,111 @@ +# Glossary Feature Specification + +## Overview + +A wiki-style glossary system for defining and explaining fandom-specific terms, slang, and concepts. Users can suggest entries, and moderators/admins approve them before publication. + +## Use Cases + +- **Jam Band Terms**: "Bustout", "Tease", "Segue", "Type II Jam" +- **Venue Nicknames**: "The Gorge", "Red Rocks" +- **Song Nicknames**: Already handled by `PerformanceNickname`, but glossary can define broader terms +- **Cultural References**: "Couch Tour", "Lot Scene", "Heady" + +## Data Model + +### GlossaryEntry + +```python +class GlossaryEntry(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + term: str = Field(unique=True, index=True) + definition: str = Field(description="Main definition text") + example: Optional[str] = Field(default=None, description="Usage example") + category: str = Field(default="general", index=True) # general, venue, song, culture + status: str = Field(default="pending", index=True) # pending, approved, rejected + suggested_by: int = Field(foreign_key="user.id") + created_at: datetime = Field(default_factory=datetime.utcnow) + updated_at: datetime = Field(default_factory=datetime.utcnow) + + user: "User" = Relationship() +``` + +### GlossaryEdit (Revision History) + +```python +class GlossaryEdit(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + entry_id: int = Field(foreign_key="glossaryentry.id") + user_id: int = Field(foreign_key="user.id") + field_changed: str # definition, example, category + old_value: str + new_value: str + status: str = Field(default="pending") # pending, approved, rejected + created_at: datetime = Field(default_factory=datetime.utcnow) + + entry: "GlossaryEntry" = Relationship() + user: "User" = Relationship() +``` + +## API Endpoints + +### Public + +- `GET /glossary/` - List all approved entries (with search/filter) +- `GET /glossary/{term}` - Get a specific entry by term + +### Authenticated + +- `POST /glossary/` - Suggest a new entry +- `POST /glossary/{id}/edit` - Suggest an edit to an existing entry + +### Moderator/Admin + +- `GET /moderation/queue/glossary` - List pending entries +- `PUT /moderation/glossary/{id}/{action}` - Approve/Reject entry +- `GET /moderation/queue/glossary-edits` - List pending edits +- `PUT /moderation/glossary-edits/{id}/{action}` - Approve/Reject edit + +## Frontend Components + +### Public + +- `/glossary` - Glossary index page with search +- `/glossary/[term]` - Individual term page + +### Authenticated + +- "Suggest Term" button on glossary index +- "Suggest Edit" button on term pages + +### Admin + +- `/admin/glossary` - Queue for pending entries and edits + +## Workflow + +### New Entry + +1. User submits a new term via form +2. Entry created with `status=pending` +3. Moderator reviews in `/admin/glossary` +4. On approval, `status=approved` and entry is public + +### Edit Existing Entry + +1. User clicks "Suggest Edit" on a term page +2. `GlossaryEdit` record created with `status=pending` +3. Moderator reviews edit +4. On approval, the `GlossaryEntry` is updated and `GlossaryEdit.status=approved` + +## Integration Points + +- **Search**: Include glossary terms in global search +- **Inline Tooltips**: Hovering over a glossary term in comments/reviews shows a tooltip with the definition +- **Auto-linking**: Detect glossary terms in user-generated content and auto-link them + +## Future Enhancements + +- **Voting**: Community voting on definitions +- **Aliases**: Multiple terms pointing to the same entry (e.g., "The Gorge" โ†’ "Gorge Amphitheatre") +- **Cross-references**: Link related terms diff --git a/DEPLOY.md b/DEPLOY.md new file mode 100644 index 0000000..d2729e9 --- /dev/null +++ b/DEPLOY.md @@ -0,0 +1,48 @@ +# Deployment Guide + +## Prerequisites + +- Docker & Docker Compose +- Git + +## Steps + +1. **Clone the repository** (if not already on the server): + + ```bash + git clone + cd elmeg + ``` + +2. **Environment Variables**: + Create a `.env` file in the root directory (or rely on `docker-compose.yml` defaults for dev). + For production, you should set: + + ```env + POSTGRES_USER=your_user + POSTGRES_PASSWORD=your_password + POSTGRES_DB=elmeg_db + SECRET_KEY=your_production_secret_key + ``` + +3. **Build and Run**: + + ```bash + docker-compose up --build -d + ``` + +4. **Run Migrations**: + The backend container needs to run migrations. + + ```bash + docker-compose exec backend alembic upgrade head + ``` + +5. **Access the App**: + - Frontend: `http://localhost:3000` (or your server IP) + - Backend API: `http://localhost:8000` + +## Troubleshooting + +- **Database Connection**: Ensure the `backend` service can reach `db`. The `DATABASE_URL` in `docker-compose.yml` should match the postgres credentials. +- **API Connectivity**: If the frontend (SSR) fails to fetch data, check `INTERNAL_API_URL` in `docker-compose.yml`. It should point to `http://backend:8000`. diff --git a/LOCAL_DEV.md b/LOCAL_DEV.md new file mode 100644 index 0000000..f504746 --- /dev/null +++ b/LOCAL_DEV.md @@ -0,0 +1,125 @@ +# Local Development Setup (No Docker) + +## Backend Setup + +1. **Create Virtual Environment** (if not already done): + +```bash +cd backend +python3 -m venv venv +source venv/bin/activate # On Mac/Linux +``` + +2. **Install Dependencies**: + +```bash +pip install -r requirements.txt +``` + +3. **Set Environment Variables**: + +```bash +export DATABASE_URL="sqlite:///./elmeg.db" +export SECRET_KEY="your-secret-key-for-jwt" +``` + +4. **Run Migrations**: + +```bash +alembic upgrade head +``` + +5. **Start Backend**: + +```bash +uvicorn main:app --reload --port 8000 +``` + +Backend will be available at: `http://localhost:8000` + +--- + +## Frontend Setup + +1. **Install Dependencies**: + +```bash +cd frontend +npm install +``` + +2. **Set Environment Variables**: +Create `frontend/.env.local`: + +``` +NEXT_PUBLIC_API_URL=http://localhost:8000 +``` + +3. **Start Frontend**: + +```bash +npm run dev +``` + +Frontend will be available at: `http://localhost:3000` + +--- + +## Testing the Application + +1. **Create a User**: + - Navigate to `http://localhost:3000` + - Register a new account + +2. **Test Features**: + - Browse shows at `/archive` + - Search with `Cmd+K` + - Create a group at `/groups` + - Check settings at `/settings` + - View admin dashboard at `/admin` (if superuser) + +3. **Create Superuser** (for admin access): + +```bash +# In backend directory with venv activated +python -c " +from database import engine +from models import User +from sqlmodel import Session, select +from passlib.context import CryptContext + +pwd_context = CryptContext(schemes=['bcrypt'], deprecated='auto') + +with Session(engine) as session: + user = session.exec(select(User).where(User.email == 'your@email.com')).first() + if user: + user.is_superuser = True + user.role = 'admin' + session.add(user) + session.commit() + print(f'User {user.email} is now a superuser') + else: + print('User not found') +" +``` + +--- + +## Common Issues + +### Backend won't start + +- Check if port 8000 is already in use: `lsof -i :8000` +- Ensure virtual environment is activated +- Verify all dependencies installed: `pip list` + +### Frontend won't start + +- Check if port 3000 is already in use: `lsof -i :3000` +- Clear `.next` cache: `rm -rf .next` +- Reinstall dependencies: `rm -rf node_modules && npm install` + +### Database issues + +- Delete and recreate: `rm elmeg.db` then `alembic upgrade head` +- Check migration status: `alembic current` diff --git a/README.md b/README.md new file mode 100644 index 0000000..6664ea2 --- /dev/null +++ b/README.md @@ -0,0 +1,194 @@ +# Elmeg Demo Environment + +This is a fully-populated demo instance of Elmeg with Goose data and 12 diverse user personas. + +## Quick Start + +### 1. Backend (Port 8020) + +```bash +cd backend +# Create virtual environment if you haven't +python3 -m venv venv +source venv/bin/activate +pip install -r requirements.txt + +# Run server +DATABASE_URL='sqlite:///./elmeg-demo.db' SECRET_KEY='demo-secret' uvicorn main:app --reload --port 8020 +``` + +### 2. Frontend (Port 3020) + +```bash +cd frontend +# Install dependencies if you haven't +npm install + +# Run dev server +NEXT_PUBLIC_API_URL=http://localhost:8020 npm run dev -- -p 3020 +``` + +## ๐Ÿงช Testing + +Once running, visit: + +- **Frontend**: +- **Backend Docs**: + +## User Personas (All passwords: `demo123`) + +### 1. TheArchivist (`archivist@demo.com`) + +- **Role**: User +- **Wiki Mode**: ON +- **Behavior**: Pure data consumer, no social interaction +- **Use Case**: Tests wiki mode functionality + +### 2. StatNerd420 (`statnerd@demo.com`) + +- **Role**: User +- **Focus**: Attendance tracking, gap charts +- **Activity**: Marks attendance at 8 shows +- **Use Case**: Power user who loves stats + +### 3. CriticalListener (`reviewer@demo.com`) + +- **Role**: User +- **Focus**: Detailed reviews +- **Activity**: 3 in-depth show reviews +- **Use Case**: Content creator, thoughtful analysis + +### 4. CasualFan (`casual@demo.com`) + +- **Role**: User +- **Focus**: Occasional engagement +- **Activity**: 2 comments on performances +- **Use Case**: Average user, light participation + +### 5. NortheastHonkers (`groupleader@demo.com`) + +- **Role**: User +- **Focus**: Community organizing +- **Activity**: Created "Northeast Honkers" group, posts +- **Use Case**: Group admin, regional organizer + +### 6. ModGoose (`mod@demo.com`) + +- **Role**: Moderator +- **Focus**: Content moderation +- **Activity**: Approved performance nicknames +- **Use Case**: Tests moderation dashboard + +### 7. AdminBird (`admin@demo.com`) + +- **Role**: Admin (Superuser) +- **Focus**: Platform administration +- **Activity**: Full access to all features +- **Use Case**: Tests admin functionality + +### 8. NewToGoose (`newbie@demo.com`) + +- **Role**: User +- **Focus**: Just discovering the band +- **Activity**: Minimal (new user) +- **Use Case**: Onboarding experience + +### 9. TaperTom (`taper@demo.com`) + +- **Role**: User +- **Focus**: Recording quality +- **Activity**: Comments on audio quality for 4 shows +- **Use Case**: Niche interest user + +### 10. RoadWarrior (`tourfollower@demo.com`) + +- **Role**: User +- **Focus**: Tour following +- **Activity**: Attended ALL 10 shows +- **Use Case**: Super fan, high engagement + +### 11. SilentHonker (`lurker@demo.com`) + +- **Role**: User +- **Wiki Mode**: ON +- **Behavior**: Reads everything, posts nothing +- **Use Case**: Passive consumer + +### 12. HypeGoose (`hype@demo.com`) + +- **Role**: User +- **Focus**: Extreme enthusiasm +- **Activity**: Rates everything 10/10, "FIRE ๐Ÿ”ฅ" reviews +- **Use Case**: Enthusiastic but low-quality content + +## Demo Data + +- **Vertical**: Goose +- **Venues**: 5 (Red Rocks, Capitol Theatre, Radio City, The Gorge, Brooklyn Bowl) +- **Tours**: 2 (Fall 2023, Summer 2024) +- **Songs**: 8 (Hungersite, Arcadia, Hot Tea, Tumble, etc.) +- **Shows**: 10 (spanning 2024) +- **Performances**: 50 (5 songs per show) +- **Groups**: 1 (Northeast Honkers) +- **Reviews**: 8 (mix of thoughtful and hype) +- **Comments**: 10 (various contexts) +- **Attendance**: 18 records (StatNerd + RoadWarrior) +- **Nicknames**: 3 approved performance nicknames + +## Testing Scenarios + +### Wiki Mode + +- Login as `archivist@demo.com` or `lurker@demo.com` +- Verify no social features visible + +### Moderation + +- Login as `mod@demo.com` +- Navigate to `/admin` +- Review nickname queue + +### Admin Dashboard + +- Login as `admin@demo.com` +- Full access to `/admin` +- Manage reports, users, content + +### Group Features + +- Login as `groupleader@demo.com` +- View `/groups` +- Post in "Northeast Honkers" + +### Activity Feed + +- View home page +- See recent reviews, attendance, posts + +### Search + +- Press `Cmd+K` +- Search for "Hungersite" or "Red Rocks" +- Navigate to performance pages + +### Performance Pages + +- Click any song in a setlist +- View gap stats, times played +- Navigate between versions + +## Resetting Demo Data + +```bash +cd /Users/ten/ANTIGRAVITY/elmeg-demo/backend +rm elmeg-demo.db +DATABASE_URL="sqlite:///./elmeg-demo.db" alembic upgrade head +DATABASE_URL="sqlite:///./elmeg-demo.db" python3 quick_seed.py +``` + +## Notes + +- This demo is completely separate from the main `elmeg` codebase +- Uses separate database (`elmeg-demo.db`) +- Runs on different ports (8001/3001 vs 8000/3000) +- Safe to experiment and break things! diff --git a/VPS_HANDOFF.md b/VPS_HANDOFF.md new file mode 100644 index 0000000..d4e6e97 --- /dev/null +++ b/VPS_HANDOFF.md @@ -0,0 +1,130 @@ +# Handoff to VPS Agent + +**Project:** Elmeg +**Date:** 2025-12-03 +**Status:** Feature Complete / Ready for Deployment + +## 1. Summary of Changes + +We have implemented seven major feature sets: + +1. **Advanced Content (Performance Nicknames)**: + * **Backend**: Added `PerformanceNickname` model. API endpoints for suggesting and approving nicknames. + * **Frontend**: "Suggest Nickname" dialog on Show Detail page. Display of approved nicknames on the setlist. +2. **Review System**: + * **Backend**: Added `Review` model supporting multiple entity types (Show, Venue, Song, Performance, Tour, Year). + * **Frontend**: Generic `EntityReviews` component. Integrated into Show Detail page. +3. **Groups / Communities**: + * **Backend**: Added `Group`, `GroupMember`, `GroupPost` models and APIs. + * **Frontend**: `GroupsPage` (list), `GroupDetailPage` (feed), `CreateGroupPage`. +4. **User Profile Enhancements**: + * **Backend**: Added `routers/users.py` for fetching user stats, attendance, reviews, and groups. + * **Frontend**: Updated `ProfilePage` with tabs for Overview, Attendance, Reviews, and Groups. +5. **Global Search**: + * **Backend**: Added `routers/search.py` for multi-entity search (Songs, Venues, Tours, Groups, Users, Nicknames, Performances). + * **Frontend**: Implemented `Cmd+K` dialog with `cmdk` and `shadcn/ui`. +6. **Performance Pages**: + * **Backend**: Added `routers/performances.py` with logic to calculate "Gap" and "Times Played" stats, and identify Previous/Next performances. + * **Frontend**: Created `/performances/[id]` page with stats, navigation, and social features. +7. **Notifications**: + * **Backend**: Added `Notification` model and `routers/notifications.py`. Implemented logic to notify group owners on new member joins. + * **Frontend**: Added `NotificationBell` to Navbar with unread count and popover list. + +## 2. Technical Updates + +* **Database**: + * New tables/columns added via Alembic migrations. + * **Critical**: Migration `6659cb1e0ca5_add_review_targets.py` fixed. + * **New**: Migration `1305863562e7_add_groups.py` added. + * **New**: Migration `a526deda28e0_add_notifications.py` added. +* **Dependencies**: + * Added `psycopg2-binary` to `backend/requirements.txt` for PostgreSQL support. + * Added `argon2-cffi` for improved password hashing. +* **Frontend Config**: + * Added `lib/api-config.ts` to handle API URL resolution (`getApiUrl()`) which correctly distinguishes between Server-Side Rendering (internal Docker network) and Client-Side (public URL). + +## 3. Deployment Instructions + +### Option A: Docker Compose (Recommended) + +If the VPS has Docker and Docker Compose: + +1. **Update Codebase**: Pull the latest changes to the server. +2. **Rebuild Containers**: + + ```bash + docker-compose up --build -d + ``` + +3. **Run Migrations**: + + ```bash + docker-compose exec backend alembic upgrade head + ``` + +4. **Verify**: Check logs to ensure services started correctly. + + ```bash + docker-compose logs -f + ``` + +### Option B: Manual / Bare Metal + +If running services directly (Systemd/PM2): + +1. **Backend**: + * Activate virtual environment. + * Install new requirements: + + ```bash + pip install -r backend/requirements.txt + ``` + + * Run migrations: + + ```bash + cd backend + alembic upgrade head + ``` + + * Restart Backend Service (e.g., `systemctl restart elmeg-backend`). + +2. **Frontend**: + * Install dependencies: + + ```bash + cd frontend + npm install + ``` + + * Build the application: + + ```bash + npm run build + ``` + + * Restart Frontend Service (e.g., `pm2 restart elmeg-frontend`). + +## 4. Verification Steps + +1. **Navigate to a Show Page**: Ensure the page loads (tests SSR connectivity). +2. **Check Reviews**: Verify the "Reviews" section is visible at the bottom. +3. **Check Groups**: Navigate to `/groups`, create a group, and post a message. +4. **Check Profile**: Log in and verify your profile shows your attendance, reviews, and groups. +5. **Test Search**: Press `Cmd+K` (or `Ctrl+K`) and search for "Tweezer" (Song) or "Tahoe Tweezer" (Nickname). +1. **Navigate to a Show Page**: Ensure the page loads (tests SSR connectivity). +2. **Check Reviews**: Verify the "Reviews" section is visible at the bottom. +3. **Check Groups**: Navigate to `/groups`, create a group, and post a message. +4. **Check Profile**: Log in and verify your profile shows your attendance, reviews, and groups. +5. **Test Search**: Press `Cmd+K` (or `Ctrl+K`) and search for "Tweezer" (Song) or "Tahoe Tweezer" (Nickname). +6. **Check Performance Page**: Click a search result for a Performance or Nickname and verify you land on `/performances/[id]`. +7. **Test Navigation**: On a Performance Page, click "Previous Version" or "Next Version" to traverse the song's history. +8. **Test Notifications**: Have another user join a group you created and verify the bell icon updates. + +## 5. Known Issues / Notes + +* **Environment Variables**: Ensure `DATABASE_URL` is set correctly in the backend environment. Ensure `INTERNAL_API_URL` is set for the frontend if using Docker (e.g., `http://backend:8000`). + +## 6. Future Roadmap + +See [docs/ROADMAP.md](docs/ROADMAP.md) for the detailed plan regarding Cross-Vertical Federation, Wiki Mode, Moderation, and Advanced Stats. diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..628cc31 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000..1b03b05 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,147 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = %(here)s/alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. +prepend_sys_path = . + + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: +# +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# hooks = ruff +# ruff.type = module +# ruff.module = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Alternatively, use the exec runner to execute a binary found on your PATH +# hooks = ruff +# ruff.type = exec +# ruff.executable = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/alembic/README b/backend/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/backend/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/backend/alembic/env.py b/backend/alembic/env.py new file mode 100644 index 0000000..613bfcf --- /dev/null +++ b/backend/alembic/env.py @@ -0,0 +1,79 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# Import SQLModel and your models +from sqlmodel import SQLModel +from database import DATABASE_URL +import models # This registers the models with SQLModel.metadata + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# target_metadata = None +target_metadata = SQLModel.metadata + +# Override sqlalchemy.url with our DATABASE_URL +config.set_main_option("sqlalchemy.url", DATABASE_URL) + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + render_as_batch=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako new file mode 100644 index 0000000..1101630 --- /dev/null +++ b/backend/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/backend/alembic/versions/1305863562e7_add_groups.py b/backend/alembic/versions/1305863562e7_add_groups.py new file mode 100644 index 0000000..4c2282a --- /dev/null +++ b/backend/alembic/versions/1305863562e7_add_groups.py @@ -0,0 +1,76 @@ +"""add_groups + +Revision ID: 1305863562e7 +Revises: 6659cb1e0ca5 +Create Date: 2025-12-03 14:49:44.973922 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = '1305863562e7' +down_revision: Union[str, Sequence[str], None] = '6659cb1e0ca5' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('group', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('privacy', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('created_by', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['created_by'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('group', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_group_name'), ['name'], unique=True) + + op.create_table('groupmember', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('role', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('joined_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('grouppost', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('content', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # with op.batch_alter_table('review', schema=None) as batch_op: + # batch_op.drop_column('created_at') + + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('review', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_at', sa.DATETIME(), nullable=False)) + + op.drop_table('grouppost') + op.drop_table('groupmember') + with op.batch_alter_table('group', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_group_name')) + + op.drop_table('group') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/32ebf231693a_add_user_preferences.py b/backend/alembic/versions/32ebf231693a_add_user_preferences.py new file mode 100644 index 0000000..db9ecaa --- /dev/null +++ b/backend/alembic/versions/32ebf231693a_add_user_preferences.py @@ -0,0 +1,42 @@ +"""Add user preferences + +Revision ID: 32ebf231693a +Revises: a0b7abe57112 +Create Date: 2025-12-02 02:46:02.955217 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = '32ebf231693a' +down_revision: Union[str, Sequence[str], None] = 'a0b7abe57112' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('userpreferences', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('wiki_mode', sa.Boolean(), nullable=False), + sa.Column('show_ratings', sa.Boolean(), nullable=False), + sa.Column('show_comments', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('user_id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('userpreferences') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/341b95b6e098_add_gamification_models.py b/backend/alembic/versions/341b95b6e098_add_gamification_models.py new file mode 100644 index 0000000..6bad462 --- /dev/null +++ b/backend/alembic/versions/341b95b6e098_add_gamification_models.py @@ -0,0 +1,58 @@ +"""Add gamification models + +Revision ID: 341b95b6e098 +Revises: 366067fc1318 +Create Date: 2025-12-02 02:59:20.293100 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = '341b95b6e098' +down_revision: Union[str, Sequence[str], None] = '366067fc1318' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('badge', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('icon', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('slug', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('badge', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_badge_name'), ['name'], unique=True) + batch_op.create_index(batch_op.f('ix_badge_slug'), ['slug'], unique=True) + + op.create_table('userbadge', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('badge_id', sa.Integer(), nullable=False), + sa.Column('awarded_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['badge_id'], ['badge.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('userbadge') + with op.batch_alter_table('badge', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_badge_slug')) + batch_op.drop_index(batch_op.f('ix_badge_name')) + + op.drop_table('badge') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/366067fc1318_add_review_system.py b/backend/alembic/versions/366067fc1318_add_review_system.py new file mode 100644 index 0000000..b726770 --- /dev/null +++ b/backend/alembic/versions/366067fc1318_add_review_system.py @@ -0,0 +1,48 @@ +"""Add review system + +Revision ID: 366067fc1318 +Revises: 32ebf231693a +Create Date: 2025-12-02 02:50:57.830097 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = '366067fc1318' +down_revision: Union[str, Sequence[str], None] = '32ebf231693a' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('review', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('blurb', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('content', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('score', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('show_id', sa.Integer(), nullable=True), + sa.Column('venue_id', sa.Integer(), nullable=True), + sa.Column('song_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['show_id'], ['show.id'], ), + sa.ForeignKeyConstraint(['song_id'], ['song.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['venue_id'], ['venue.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('review') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/6659cb1e0ca5_add_review_targets.py b/backend/alembic/versions/6659cb1e0ca5_add_review_targets.py new file mode 100644 index 0000000..f00680c --- /dev/null +++ b/backend/alembic/versions/6659cb1e0ca5_add_review_targets.py @@ -0,0 +1,45 @@ +"""add_review_targets + +Revision ID: 6659cb1e0ca5 +Revises: 83e6fd46fa2b +Create Date: 2025-12-03 13:05:43.037872 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '6659cb1e0ca5' +down_revision: Union[str, Sequence[str], None] = '83e6fd46fa2b' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('review', schema=None) as batch_op: + batch_op.add_column(sa.Column('performance_id', sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column('tour_id', sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column('year', sa.Integer(), nullable=True)) + batch_op.create_foreign_key('fk_review_tour_id', 'tour', ['tour_id'], ['id']) + batch_op.create_foreign_key('fk_review_performance_id', 'performance', ['performance_id'], ['id']) + + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('review', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_at', sa.DATETIME(), nullable=False)) + batch_op.drop_constraint(None, type_='foreignkey') + batch_op.drop_constraint(None, type_='foreignkey') + batch_op.drop_column('year') + batch_op.drop_column('tour_id') + batch_op.drop_column('performance_id') + + # ### end Alembic commands ### diff --git a/backend/alembic/versions/83e6fd46fa2b_add_moderation_system.py b/backend/alembic/versions/83e6fd46fa2b_add_moderation_system.py new file mode 100644 index 0000000..83c501c --- /dev/null +++ b/backend/alembic/versions/83e6fd46fa2b_add_moderation_system.py @@ -0,0 +1,59 @@ +"""Add moderation system + +Revision ID: 83e6fd46fa2b +Revises: bc32a0b7efbb +Create Date: 2025-12-02 03:28:35.663970 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = '83e6fd46fa2b' +down_revision: Union[str, Sequence[str], None] = 'bc32a0b7efbb' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('report', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('target_type', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('target_id', sa.Integer(), nullable=False), + sa.Column('reason', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('status', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('report', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_report_status'), ['status'], unique=False) + batch_op.create_index(batch_op.f('ix_report_target_id'), ['target_id'], unique=False) + batch_op.create_index(batch_op.f('ix_report_target_type'), ['target_type'], unique=False) + + with op.batch_alter_table('user', schema=None) as batch_op: + batch_op.add_column(sa.Column('role', sqlmodel.sql.sqltypes.AutoString(), nullable=False)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('user', schema=None) as batch_op: + batch_op.drop_column('role') + + with op.batch_alter_table('report', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_report_target_type')) + batch_op.drop_index(batch_op.f('ix_report_target_id')) + batch_op.drop_index(batch_op.f('ix_report_status')) + + op.drop_table('report') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/a0b7abe57112_add_core_enhancements.py b/backend/alembic/versions/a0b7abe57112_add_core_enhancements.py new file mode 100644 index 0000000..0f46d76 --- /dev/null +++ b/backend/alembic/versions/a0b7abe57112_add_core_enhancements.py @@ -0,0 +1,122 @@ +"""Add core enhancements + +Revision ID: a0b7abe57112 +Revises: c26cc8212061 +Create Date: 2025-12-02 01:33:56.476865 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = 'a0b7abe57112' +down_revision: Union[str, Sequence[str], None] = 'c26cc8212061' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('artist', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('instrument', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_artist_name'), 'artist', ['name'], unique=False) + op.create_table('tag', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('slug', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_tag_name'), 'tag', ['name'], unique=True) + op.create_index(op.f('ix_tag_slug'), 'tag', ['slug'], unique=True) + op.create_table('tour', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('start_date', sa.DateTime(), nullable=True), + sa.Column('end_date', sa.DateTime(), nullable=True), + sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_tour_name'), 'tour', ['name'], unique=False) + op.create_table('entitytag', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('tag_id', sa.Integer(), nullable=False), + sa.Column('entity_type', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('entity_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_entitytag_entity_id'), 'entitytag', ['entity_id'], unique=False) + op.create_index(op.f('ix_entitytag_entity_type'), 'entitytag', ['entity_type'], unique=False) + op.create_table('attendance', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('show_id', sa.Integer(), nullable=False), + sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['show_id'], ['show.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('showartist', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('show_id', sa.Integer(), nullable=False), + sa.Column('artist_id', sa.Integer(), nullable=False), + sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.ForeignKeyConstraint(['artist_id'], ['artist.id'], ), + sa.ForeignKeyConstraint(['show_id'], ['show.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('performanceartist', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('performance_id', sa.Integer(), nullable=False), + sa.Column('artist_id', sa.Integer(), nullable=False), + sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.ForeignKeyConstraint(['artist_id'], ['artist.id'], ), + sa.ForeignKeyConstraint(['performance_id'], ['performance.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('show', schema=None) as batch_op: + batch_op.add_column(sa.Column('tour_id', sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + batch_op.create_foreign_key('fk_show_tour', 'tour', ['tour_id'], ['id']) + + with op.batch_alter_table('song', schema=None) as batch_op: + batch_op.add_column(sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + + with op.batch_alter_table('venue', schema=None) as batch_op: + batch_op.add_column(sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('venue', 'notes') + op.drop_column('song', 'notes') + op.drop_constraint(None, 'show', type_='foreignkey') + op.drop_column('show', 'notes') + op.drop_column('show', 'tour_id') + op.drop_table('performanceartist') + op.drop_table('showartist') + op.drop_table('attendance') + op.drop_index(op.f('ix_entitytag_entity_type'), table_name='entitytag') + op.drop_index(op.f('ix_entitytag_entity_id'), table_name='entitytag') + op.drop_table('entitytag') + op.drop_index(op.f('ix_tour_name'), table_name='tour') + op.drop_table('tour') + op.drop_index(op.f('ix_tag_slug'), table_name='tag') + op.drop_index(op.f('ix_tag_name'), table_name='tag') + op.drop_table('tag') + op.drop_index(op.f('ix_artist_name'), table_name='artist') + op.drop_table('artist') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/a526deda28e0_add_notifications.py b/backend/alembic/versions/a526deda28e0_add_notifications.py new file mode 100644 index 0000000..6ec4bf9 --- /dev/null +++ b/backend/alembic/versions/a526deda28e0_add_notifications.py @@ -0,0 +1,57 @@ +"""add_notifications + +Revision ID: a526deda28e0 +Revises: 1305863562e7 +Create Date: 2025-12-03 15:40:20.810781 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = 'a526deda28e0' +down_revision: Union[str, Sequence[str], None] = '1305863562e7' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('notification', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('message', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('link', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('is_read', sa.Boolean(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('notification', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_notification_user_id'), ['user_id'], unique=False) + + with op.batch_alter_table('review', schema=None) as batch_op: + # batch_op.drop_column('created_at') + pass + + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('review', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_at', sa.DATETIME(), nullable=False)) + + with op.batch_alter_table('notification', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_notification_user_id')) + + op.drop_table('notification') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/b16ef2228130_add_review_created_at_and_report_details.py b/backend/alembic/versions/b16ef2228130_add_review_created_at_and_report_details.py new file mode 100644 index 0000000..5764a1c --- /dev/null +++ b/backend/alembic/versions/b16ef2228130_add_review_created_at_and_report_details.py @@ -0,0 +1,53 @@ +"""add_review_created_at_and_report_details + +Revision ID: b16ef2228130 +Revises: a526deda28e0 +Create Date: 2025-12-03 16:15:16.644205 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = 'b16ef2228130' +down_revision: Union[str, Sequence[str], None] = 'a526deda28e0' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('report', schema=None) as batch_op: + batch_op.add_column(sa.Column('entity_type', sqlmodel.sql.sqltypes.AutoString(), nullable=False)) + batch_op.add_column(sa.Column('entity_id', sa.Integer(), nullable=False)) + batch_op.add_column(sa.Column('details', sqlmodel.sql.sqltypes.AutoString(), nullable=False)) + batch_op.drop_index(batch_op.f('ix_report_target_id')) + batch_op.drop_index(batch_op.f('ix_report_target_type')) + batch_op.create_index(batch_op.f('ix_report_entity_id'), ['entity_id'], unique=False) + batch_op.create_index(batch_op.f('ix_report_entity_type'), ['entity_type'], unique=False) + batch_op.drop_column('target_type') + batch_op.drop_column('target_id') + + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('report', schema=None) as batch_op: + batch_op.add_column(sa.Column('target_id', sa.INTEGER(), nullable=False)) + batch_op.add_column(sa.Column('target_type', sa.VARCHAR(), nullable=False)) + batch_op.drop_index(batch_op.f('ix_report_entity_type')) + batch_op.drop_index(batch_op.f('ix_report_entity_id')) + batch_op.create_index(batch_op.f('ix_report_target_type'), ['target_type'], unique=False) + batch_op.create_index(batch_op.f('ix_report_target_id'), ['target_id'], unique=False) + batch_op.drop_column('details') + batch_op.drop_column('entity_id') + batch_op.drop_column('entity_type') + + # ### end Alembic commands ### diff --git a/backend/alembic/versions/bc32a0b7efbb_add_performance_nicknames.py b/backend/alembic/versions/bc32a0b7efbb_add_performance_nicknames.py new file mode 100644 index 0000000..de0d42f --- /dev/null +++ b/backend/alembic/versions/bc32a0b7efbb_add_performance_nicknames.py @@ -0,0 +1,52 @@ +"""Add performance nicknames + +Revision ID: bc32a0b7efbb +Revises: 341b95b6e098 +Create Date: 2025-12-02 03:16:05.516007 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = 'bc32a0b7efbb' +down_revision: Union[str, Sequence[str], None] = '341b95b6e098' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('performancenickname', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('performance_id', sa.Integer(), nullable=False), + sa.Column('nickname', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('status', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('suggested_by', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['performance_id'], ['performance.id'], ), + sa.ForeignKeyConstraint(['suggested_by'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('performancenickname', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_performancenickname_nickname'), ['nickname'], unique=False) + batch_op.create_index(batch_op.f('ix_performancenickname_status'), ['status'], unique=False) + + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('performancenickname', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_performancenickname_status')) + batch_op.drop_index(batch_op.f('ix_performancenickname_nickname')) + + op.drop_table('performancenickname') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/c26cc8212061_add_social_models.py b/backend/alembic/versions/c26cc8212061_add_social_models.py new file mode 100644 index 0000000..c6708e2 --- /dev/null +++ b/backend/alembic/versions/c26cc8212061_add_social_models.py @@ -0,0 +1,59 @@ +"""Add social models + +Revision ID: c26cc8212061 +Revises: f5ca1b7c50b1 +Create Date: 2025-12-02 01:14:05.048299 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = 'c26cc8212061' +down_revision: Union[str, Sequence[str], None] = 'f5ca1b7c50b1' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('comment', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('content', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('show_id', sa.Integer(), nullable=True), + sa.Column('venue_id', sa.Integer(), nullable=True), + sa.Column('song_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['show_id'], ['show.id'], ), + sa.ForeignKeyConstraint(['song_id'], ['song.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['venue_id'], ['venue.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('rating', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('score', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('show_id', sa.Integer(), nullable=True), + sa.Column('song_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['show_id'], ['show.id'], ), + sa.ForeignKeyConstraint(['song_id'], ['song.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('rating') + op.drop_table('comment') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/f5ca1b7c50b1_initial_migration.py b/backend/alembic/versions/f5ca1b7c50b1_initial_migration.py new file mode 100644 index 0000000..5b7cd4d --- /dev/null +++ b/backend/alembic/versions/f5ca1b7c50b1_initial_migration.py @@ -0,0 +1,113 @@ +"""Initial migration + +Revision ID: f5ca1b7c50b1 +Revises: +Create Date: 2025-12-02 00:47:26.543594 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = 'f5ca1b7c50b1' +down_revision: Union[str, Sequence[str], None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('user', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('email', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('hashed_password', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('is_active', sa.Boolean(), nullable=False), + sa.Column('is_superuser', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True) + op.create_table('venue', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('city', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('state', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('country', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('capacity', sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_venue_name'), 'venue', ['name'], unique=False) + op.create_table('vertical', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('slug', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_vertical_name'), 'vertical', ['name'], unique=False) + op.create_index(op.f('ix_vertical_slug'), 'vertical', ['slug'], unique=True) + op.create_table('profile', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('username', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('display_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_profile_username'), 'profile', ['username'], unique=False) + op.create_table('show', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('date', sa.DateTime(), nullable=False), + sa.Column('vertical_id', sa.Integer(), nullable=False), + sa.Column('venue_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['venue_id'], ['venue.id'], ), + sa.ForeignKeyConstraint(['vertical_id'], ['vertical.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_show_date'), 'show', ['date'], unique=False) + op.create_table('song', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('original_artist', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('vertical_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['vertical_id'], ['vertical.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_song_title'), 'song', ['title'], unique=False) + op.create_table('performance', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('show_id', sa.Integer(), nullable=False), + sa.Column('song_id', sa.Integer(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('set_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('segue', sa.Boolean(), nullable=False), + sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.ForeignKeyConstraint(['show_id'], ['show.id'], ), + sa.ForeignKeyConstraint(['song_id'], ['song.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('performance') + op.drop_index(op.f('ix_song_title'), table_name='song') + op.drop_table('song') + op.drop_index(op.f('ix_show_date'), table_name='show') + op.drop_table('show') + op.drop_index(op.f('ix_profile_username'), table_name='profile') + op.drop_table('profile') + op.drop_index(op.f('ix_vertical_slug'), table_name='vertical') + op.drop_index(op.f('ix_vertical_name'), table_name='vertical') + op.drop_table('vertical') + op.drop_index(op.f('ix_venue_name'), table_name='venue') + op.drop_table('venue') + op.drop_index(op.f('ix_user_email'), table_name='user') + op.drop_table('user') + # ### end Alembic commands ### diff --git a/backend/auth.py b/backend/auth.py new file mode 100644 index 0000000..8455bb3 --- /dev/null +++ b/backend/auth.py @@ -0,0 +1,53 @@ +from datetime import datetime, timedelta +from typing import Optional +from jose import JWTError, jwt +from passlib.context import CryptContext +from fastapi import Depends, HTTPException, status +from fastapi.security import OAuth2PasswordBearer +from sqlmodel import Session, select +from database import get_session +from models import User +import os + +# Configuration +SECRET_KEY = os.getenv("SECRET_KEY", "supersecretkey") # Change this in production! +ALGORITHM = "HS256" +ACCESS_TOKEN_EXPIRE_MINUTES = 30 + +pwd_context = CryptContext(schemes=["argon2"], deprecated="auto") +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="auth/token") + +def verify_password(plain_password, hashed_password): + return pwd_context.verify(plain_password, hashed_password) + +def get_password_hash(password): + return pwd_context.hash(password) + +def create_access_token(data: dict, expires_delta: Optional[timedelta] = None): + to_encode = data.copy() + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=15) + to_encode.update({"exp": expire}) + encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) + return encoded_jwt + +async def get_current_user(token: str = Depends(oauth2_scheme), session: Session = Depends(get_session)): + credentials_exception = HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Bearer"}, + ) + try: + payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + email: str = payload.get("sub") + if email is None: + raise credentials_exception + except JWTError: + raise credentials_exception + + user = session.exec(select(User).where(User.email == email)).first() + if user is None: + raise credentials_exception + return user diff --git a/backend/database.py b/backend/database.py new file mode 100644 index 0000000..0f63feb --- /dev/null +++ b/backend/database.py @@ -0,0 +1,17 @@ +from sqlmodel import SQLModel, create_engine, Session +import os + +# Use SQLite for local dev by default, or override with DATABASE_URL env var +DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./database.db") + +# check_same_thread is needed for SQLite +connect_args = {"check_same_thread": False} if "sqlite" in DATABASE_URL else {} + +engine = create_engine(DATABASE_URL, echo=True, connect_args=connect_args) + +def create_db_and_tables(): + SQLModel.metadata.create_all(engine) + +def get_session(): + with Session(engine) as session: + yield session diff --git a/backend/dependencies.py b/backend/dependencies.py new file mode 100644 index 0000000..540b34f --- /dev/null +++ b/backend/dependencies.py @@ -0,0 +1,15 @@ +from fastapi import Depends, HTTPException, status +from models import User +from auth import get_current_user + +class RoleChecker: + def __init__(self, allowed_roles: list[str]): + self.allowed_roles = allowed_roles + + def __call__(self, user: User = Depends(get_current_user)): + if user.role not in self.allowed_roles: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Operation not permitted" + ) + return user diff --git a/backend/import_elgoose.py b/backend/import_elgoose.py new file mode 100644 index 0000000..362dae5 --- /dev/null +++ b/backend/import_elgoose.py @@ -0,0 +1,296 @@ +""" +Comprehensive El Goose Data Importer +Fetches ALL Goose data from El Goose API and populates demo database +""" +import requests +import time +from datetime import datetime +from sqlmodel import Session, select +from database import engine +from models import ( + Vertical, Venue, Tour, Show, Song, Performance, Artist, + User, UserPreferences +) +from passlib.context import CryptContext + +BASE_URL = "https://elgoose.net/api/v2" +ARTIST_ID = 1 # Goose +pwd_context = CryptContext(schemes=["argon2"], deprecated="auto") + +# User personas for demo +DEMO_USERS = [ + {"email": "archivist@demo.com", "username": "TheArchivist", "role": "user", "wiki_mode": True}, + {"email": "statnerd@demo.com", "username": "StatNerd420", "role": "user", "wiki_mode": False}, + {"email": "reviewer@demo.com", "username": "CriticalListener", "role": "user", "wiki_mode": False}, + {"email": "casual@demo.com", "username": "CasualFan", "role": "user", "wiki_mode": False}, + {"email": "groupleader@demo.com", "username": "NortheastHonkers", "role": "user", "wiki_mode": False}, + {"email": "mod@demo.com", "username": "ModGoose", "role": "moderator", "wiki_mode": False}, + {"email": "admin@demo.com", "username": "AdminBird", "role": "admin", "wiki_mode": False}, + {"email": "newbie@demo.com", "username": "NewToGoose", "role": "user", "wiki_mode": False}, + {"email": "taper@demo.com", "username": "TaperTom", "role": "user", "wiki_mode": False}, + {"email": "tourfollower@demo.com", "username": "RoadWarrior", "role": "user", "wiki_mode": False}, + {"email": "lurker@demo.com", "username": "SilentHonker", "role": "user", "wiki_mode": True}, + {"email": "hype@demo.com", "username": "HypeGoose", "role": "user", "wiki_mode": False}, +] + +def fetch_json(endpoint, params=None): + """Fetch JSON from El Goose API with error handling""" + url = f"{BASE_URL}/{endpoint}.json" + try: + response = requests.get(url, params=params) + response.raise_for_status() + data = response.json() + + if data.get('error') == 1: + print(f"โŒ API Error: {data.get('error_message')}") + return None + + return data.get('data', []) + except Exception as e: + print(f"โŒ Failed to fetch {endpoint}: {e}") + return None + +def create_users(session): + """Create demo user personas""" + print("\n๐Ÿ“ Creating user personas...") + users = [] + for user_data in DEMO_USERS: + user = User( + email=user_data["email"], + hashed_password=pwd_context.hash("demo123"), + is_active=True, + is_superuser=(user_data["role"] == "admin"), + role=user_data["role"] + ) + session.add(user) + session.commit() + session.refresh(user) + + prefs = UserPreferences( + user_id=user.id, + wiki_mode=user_data["wiki_mode"], + show_ratings=not user_data["wiki_mode"], + show_comments=not user_data["wiki_mode"] + ) + session.add(prefs) + users.append(user) + + session.commit() + print(f"โœ“ Created {len(users)} users") + return users + +def import_venues(session): + """Import all venues""" + print("\n๐Ÿ›๏ธ Importing venues...") + venues_data = fetch_json("venues") + if not venues_data: + return {} + + venue_map = {} + for v in venues_data: + existing = session.exec( + select(Venue).where(Venue.name == v['venuename']) + ).first() + + if existing: + venue_map[v['venue_id']] = existing.id + else: + venue = Venue( + name=v['venuename'], + city=v.get('city'), + state=v.get('state'), + country=v.get('country'), + capacity=v.get('capacity') + ) + session.add(venue) + session.commit() + session.refresh(venue) + venue_map[v['venue_id']] = venue.id + + print(f"โœ“ Imported {len(venue_map)} venues") + return venue_map + +def import_songs(session, vertical_id): + """Import all songs""" + print("\n๐ŸŽต Importing songs...") + songs_data = fetch_json("songs") + if not songs_data: + return {} + + song_map = {} + for s in songs_data: + # Check if song exists + existing = session.exec( + select(Song).where( + Song.title == s['name'], + Song.vertical_id == vertical_id + ) + ).first() + + if existing: + song_map[s['id']] = existing.id # API uses 'id' not 'song_id' + else: + song = Song( + title=s['name'], + original_artist=s.get('original_artist'), + vertical_id=vertical_id + # API doesn't include debut_date or times_played in base response + ) + session.add(song) + session.commit() + session.refresh(song) + song_map[s['id']] = song.id # API uses 'id' not 'song_id' + + print(f"โœ“ Imported {len(song_map)} songs") + return song_map + +def import_shows(session, vertical_id, venue_map): + """Import all Goose shows""" + print("\n๐ŸŽค Importing shows...") + params = {"artist": ARTIST_ID} + shows_data = fetch_json("shows", params) + + if not shows_data: + # Fallback: fetch all shows and filter + print(" Fetching all shows and filtering for Goose...") + shows_data = fetch_json("shows") + shows_data = [s for s in (shows_data or []) if s.get('artist_id') == ARTIST_ID] + + if not shows_data: + print("โŒ No shows found") + return {}, {} + + show_map = {} + tour_map = {} + + for s in shows_data: + # Handle tours + tour_id = None + if s.get('tour_id') and s['tour_id'] != 1: # 1 = "Not Part of a Tour" + if s['tour_id'] not in tour_map: + # Check if tour exists + existing_tour = session.exec( + select(Tour).where(Tour.name == s['tourname']) + ).first() + + if existing_tour: + tour_map[s['tour_id']] = existing_tour.id + else: + tour = Tour(name=s['tourname']) + session.add(tour) + session.commit() + session.refresh(tour) + tour_map[s['tour_id']] = tour.id + + tour_id = tour_map[s['tour_id']] + + # Create show + show_date = datetime.strptime(s['showdate'], '%Y-%m-%d') + show = Show( + date=show_date, + vertical_id=vertical_id, + venue_id=venue_map.get(s['venue_id']), + tour_id=tour_id, + notes=s.get('showtitle') + ) + session.add(show) + session.commit() + session.refresh(show) + show_map[s['show_id']] = show.id + + if len(show_map) % 50 == 0: + print(f" Progress: {len(show_map)} shows...") + + print(f"โœ“ Imported {len(show_map)} shows and {len(tour_map)} tours") + return show_map, tour_map + +def import_setlists(session, show_map, song_map): + """Import setlists for all shows""" + print("\n๐Ÿ“‹ Importing setlists...") + + # Fetch all setlists (this gets all performances across all shows) + setlists_data = fetch_json("setlists") + if not setlists_data: + print("โŒ No setlist data found") + return + + # Filter for Goose shows + goose_setlists = [ + s for s in setlists_data + if s.get('show_id') in show_map + ] + + performance_count = 0 + for perf_data in goose_setlists: + # Map to our show and song IDs + our_show_id = show_map.get(perf_data['show_id']) + our_song_id = song_map.get(perf_data['song_id']) + + if not our_show_id or not our_song_id: + continue + + perf = Performance( + show_id=our_show_id, + song_id=our_song_id, + position=perf_data.get('position', 0), + set_name=perf_data.get('set'), + segue=bool(perf_data.get('segue', 0)), + notes=perf_data.get('notes') + ) + session.add(perf) + performance_count += 1 + + if performance_count % 100 == 0: + session.commit() + print(f" Progress: {performance_count} performances...") + + session.commit() + print(f"โœ“ Imported {performance_count} performances") + +def main(): + print("="*60) + print("EL GOOSE DATA IMPORTER") + print("="*60) + + with Session(engine) as session: + # 1. Create vertical + print("\n๐Ÿฆ† Creating Goose vertical...") + vertical = Vertical( + name="Goose", + slug="goose", + description="Goose is a jam band from Connecticut" + ) + session.add(vertical) + session.commit() + session.refresh(vertical) + print(f"โœ“ Created vertical (ID: {vertical.id})") + + # 2. Create users + users = create_users(session) + + # 3. Import base data + venue_map = import_venues(session) + song_map = import_songs(session, vertical.id) + + # 4. Import shows + show_map, tour_map = import_shows(session, vertical.id, venue_map) + + # 5. Import setlists + import_setlists(session, show_map, song_map) + + print("\n" + "="*60) + print("โœ“ IMPORT COMPLETE!") + print("="*60) + print(f"\nImported:") + print(f" โ€ข {len(venue_map)} venues") + print(f" โ€ข {len(tour_map)} tours") + print(f" โ€ข {len(song_map)} songs") + print(f" โ€ข {len(show_map)} shows") + print(f" โ€ข {len(users)} demo users") + print(f"\nAll passwords: demo123") + print(f"\nStart demo servers:") + print(f" Backend: DATABASE_URL='sqlite:///./elmeg-demo.db' uvicorn main:app --reload --port 8001") + print(f" Frontend: NEXT_PUBLIC_API_URL=http://localhost:8001 npm run dev -- -p 3001") + +if __name__ == "__main__": + main() diff --git a/backend/import_songs_only.py b/backend/import_songs_only.py new file mode 100644 index 0000000..77cc029 --- /dev/null +++ b/backend/import_songs_only.py @@ -0,0 +1,199 @@ +""" +Import ONLY songs and setlists into existing demo database +Uses correct API field mappings based on actual El Goose API response +""" +import requests +from datetime import datetime +from sqlmodel import Session, select +from database import engine +from models import Vertical, Song, Performance, Show + +BASE_URL = "https://elgoose.net/api/v2" +ARTIST_ID = 1 # Goose + +def fetch_json(endpoint, params=None): + """Fetch JSON from El Goose API with error handling""" + url = f"{BASE_URL}/{endpoint}.json" + try: + response = requests.get(url, params=params) + response.raise_for_status() + data = response.json() + + if data.get('error') == 1 or data.get('error') == True: + print(f"โŒ API Error: {data.get('error_message')}") + return None + + return data.get('data', []) + except Exception as e: + print(f"โŒ Failed to fetch {endpoint}: {e}") + return None + +def import_songs(session, vertical_id): + """Import all songs using correct API field names""" + print("\n๐ŸŽต Importing songs...") + songs_data = fetch_json("songs") + if not songs_data: + print("โŒ No song data received") + return {} + + song_map = {} + for s in songs_data: + # Check if song exists + existing = session.exec( + select(Song).where( + Song.title == s['name'], + Song.vertical_id == vertical_id + ) + ).first() + + if existing: + song_map[s['id']] = existing.id + else: + song = Song( + title=s['name'], + original_artist=s.get('original_artist'), + vertical_id=vertical_id + ) + session.add(song) + session.commit() + session.refresh(song) + song_map[s['id']] = song.id + + if len(song_map) % 100 == 0: + print(f" Progress: {len(song_map)} songs...") + + print(f"โœ“ Imported {len(song_map)} songs") + return song_map + +def import_setlists(session, vertical_id, song_map): + """Import setlists for Goose shows only""" + print("\n๐Ÿ“‹ Importing setlists...") + + # Get all our shows from the database + shows = session.exec( + select(Show).where(Show.vertical_id == vertical_id) + ).all() + + print(f" Found {len(shows)} shows in database") + + # Fetch ALL setlists and filter for Goose + print(" Fetching setlists from API...") + setlists_data = fetch_json("setlists") + + if not setlists_data: + print("โŒ No setlist data found") + return + + print(f" Received {len(setlists_data)} total performances") + + # Create a map of El Goose show_id to our show id + show_map = {} + for show in shows: + # We need to find the matching El Goose show by date + # Since we imported shows with their original show_id stored... wait, we didn't + # We need to match by date instead + pass + + # Actually, let's fetch shows again to get the mapping + shows_data = fetch_json("shows") + if shows_data: + goose_shows = [s for s in shows_data if s.get('artist_id') == ARTIST_ID] + print(f" Found {len(goose_shows)} Goose shows in API") + + # Build mapping: El Goose show_id -> our database show id + for eg_show in goose_shows: + eg_date = datetime.strptime(eg_show['showdate'], '%Y-%m-%d').date() + # Find matching show in our DB by date + db_show = session.exec( + select(Show).where( + Show.vertical_id == vertical_id, + Show.date >= datetime.combine(eg_date, datetime.min.time()), + Show.date < datetime.combine(eg_date, datetime.max.time()) + ) + ).first() + + if db_show: + show_map[eg_show['show_id']] = db_show.id + + print(f" Mapped {len(show_map)} shows") + + # Now import performances + performance_count = 0 + skipped = 0 + + for perf_data in setlists_data: + # Only import if this is a Goose show + our_show_id = show_map.get(perf_data.get('show_id')) + if not our_show_id: + skipped += 1 + continue + + our_song_id = song_map.get(perf_data.get('song_id')) + if not our_song_id: + # Song not found - might be from another artist + skipped += 1 + continue + + # Check if performance already exists + existing = session.exec( + select(Performance).where( + Performance.show_id == our_show_id, + Performance.song_id == our_song_id, + Performance.position == perf_data.get('position', 0) + ) + ).first() + + if existing: + continue + + perf = Performance( + show_id=our_show_id, + song_id=our_song_id, + position=perf_data.get('position', 0), + set_name=perf_data.get('set'), + segue=bool(perf_data.get('segue', 0)), + notes=perf_data.get('notes') + ) + session.add(perf) + performance_count += 1 + + if performance_count % 100 == 0: + session.commit() + print(f" Progress: {performance_count} performances...") + + session.commit() + print(f"โœ“ Imported {performance_count} performances (skipped {skipped} non-Goose)") + +def main(): + print("="*60) + print("EL GOOSE SONGS & SETLISTS IMPORTER") + print("="*60) + + with Session(engine) as session: + # Get existing Goose vertical + vertical = session.exec( + select(Vertical).where(Vertical.slug == "goose") + ).first() + + if not vertical: + print("โŒ Goose vertical not found! Run main import first.") + return + + print(f"โœ“ Found Goose vertical (ID: {vertical.id})") + + # Import songs and setlists + song_map = import_songs(session, vertical.id) + import_setlists(session, vertical.id, song_map) + + print("\n" + "="*60) + print("โœ“ IMPORT COMPLETE!") + print("="*60) + print(f"\nImported:") + print(f" โ€ข {len(song_map)} songs") + print(f"\nDemo environment is now fully populated!") + print(f"\nStart demo servers:") + print(f" Backend: DATABASE_URL='sqlite:///./elmeg-demo.db' uvicorn main:app --reload --port 8001") + print(f" Frontend: NEXT_PUBLIC_API_URL=http://localhost:8001 npm run dev -- -p 3001") + +if __name__ == "__main__": + main() diff --git a/backend/main.py b/backend/main.py new file mode 100644 index 0000000..c0cf9c6 --- /dev/null +++ b/backend/main.py @@ -0,0 +1,39 @@ +from fastapi import FastAPI +from routers import auth, shows, venues, songs, social, tours, artists, preferences, reviews, badges, nicknames, moderation, attendance, groups, users, search, performances, notifications, feed, leaderboards + +from fastapi.middleware.cors import CORSMiddleware + +app = FastAPI() + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], # In production, set this to the frontend domain + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.include_router(auth.router) +app.include_router(shows.router) +app.include_router(venues.router) +app.include_router(songs.router) +app.include_router(social.router) +app.include_router(tours.router) +app.include_router(artists.router) +app.include_router(preferences.router) +app.include_router(reviews.router) +app.include_router(badges.router) +app.include_router(nicknames.router) +app.include_router(moderation.router) +app.include_router(attendance.router) +app.include_router(groups.router) +app.include_router(users.router) +app.include_router(search.router) +app.include_router(performances.router) +app.include_router(notifications.router) +app.include_router(feed.router) +app.include_router(leaderboards.router) + +@app.get("/") +def read_root(): + return {"Hello": "World"} diff --git a/backend/migrate_honking.py b/backend/migrate_honking.py new file mode 100644 index 0000000..3e57911 --- /dev/null +++ b/backend/migrate_honking.py @@ -0,0 +1,159 @@ +""" +Migration script to import real Goose data from Honkingversion to Elmeg Demo +""" +import sqlite3 +import sys +from datetime import datetime +from sqlmodel import Session, select +from database import engine +from models import Vertical, Venue, Tour, Show, Song, Performance + +SOURCE_DB = "/Users/ten/ANTIGRAVITY/honkingversion/database.db" + +def migrate_data(): + print("=" * 60) + print("MIGRATING REAL GOOSE DATA") + print("=" * 60) + + # Connect to source DB + try: + src_conn = sqlite3.connect(SOURCE_DB) + src_conn.row_factory = sqlite3.Row + src_cur = src_conn.cursor() + print(f"โœ“ Connected to source: {SOURCE_DB}") + except Exception as e: + print(f"โŒ Failed to connect to source DB: {e}") + return + + with Session(engine) as session: + # 1. Get or Create Goose Vertical + vertical = session.exec(select(Vertical).where(Vertical.slug == "goose")).first() + if not vertical: + vertical = Vertical(name="Goose", slug="goose", description="Jam band from CT") + session.add(vertical) + session.commit() + session.refresh(vertical) + print(f"โœ“ Created Goose vertical (ID: {vertical.id})") + else: + print(f"โœ“ Found Goose vertical (ID: {vertical.id})") + + # 2. Clear existing data (except Users) + print("Clearing existing show data...") + session.exec("DELETE FROM performance") + session.exec("DELETE FROM show") + session.exec("DELETE FROM song") + session.exec("DELETE FROM tour") + session.exec("DELETE FROM venue") + session.commit() + print("โœ“ Cleared existing data") + + # 3. Migrate Venues + print("Migrating Venues...") + src_cur.execute("SELECT * FROM venue") + venues = src_cur.fetchall() + venue_map = {} # old_id -> new_id + + for v in venues: + new_venue = Venue( + name=v['name'], + city=v['city'], + state=v['state'], + country=v['country'] + ) + session.add(new_venue) + session.commit() + session.refresh(new_venue) + venue_map[v['id']] = new_venue.id + print(f"โœ“ Migrated {len(venues)} venues") + + # 4. Migrate Tours + print("Migrating Tours...") + src_cur.execute("SELECT * FROM tour") + tours = src_cur.fetchall() + tour_map = {} + + for t in tours: + # Handle date parsing if needed, assuming strings in sqlite + start_date = datetime.strptime(t['start_date'], '%Y-%m-%d %H:%M:%S') if t['start_date'] else None + end_date = datetime.strptime(t['end_date'], '%Y-%m-%d %H:%M:%S') if t['end_date'] else None + + new_tour = Tour( + name=t['name'], + start_date=start_date, + end_date=end_date + ) + session.add(new_tour) + session.commit() + session.refresh(new_tour) + tour_map[t['id']] = new_tour.id + print(f"โœ“ Migrated {len(tours)} tours") + + # 5. Migrate Songs + print("Migrating Songs...") + src_cur.execute("SELECT * FROM song") + songs = src_cur.fetchall() + song_map = {} + + for s in songs: + new_song = Song( + title=s['name'], # Map 'name' to 'title' + original_artist=s['original_artist'], + vertical_id=vertical.id + ) + session.add(new_song) + session.commit() + session.refresh(new_song) + song_map[s['id']] = new_song.id + print(f"โœ“ Migrated {len(songs)} songs") + + # 6. Migrate Shows + print("Migrating Shows...") + src_cur.execute("SELECT * FROM show") + shows = src_cur.fetchall() + show_map = {} + + for s in shows: + show_date = datetime.strptime(s['date'], '%Y-%m-%d %H:%M:%S') if s['date'] else None + + new_show = Show( + date=show_date, + vertical_id=vertical.id, + venue_id=venue_map.get(s['venue_id']), + tour_id=tour_map.get(s['tour_id']), + notes=s['notes'] + ) + session.add(new_show) + session.commit() + session.refresh(new_show) + show_map[s['id']] = new_show.id + print(f"โœ“ Migrated {len(shows)} shows") + + # 7. Migrate Performances + print("Migrating Performances...") + src_cur.execute("SELECT * FROM songperformance") + perfs = src_cur.fetchall() + + for p in perfs: + # Skip if show or song missing (data integrity) + if p['show_id'] not in show_map or p['song_id'] not in song_map: + continue + + new_perf = Performance( + show_id=show_map[p['show_id']], + song_id=song_map[p['song_id']], + position=p['position'], + set_name=p['set_name'], + segue=bool(p['segue']), + notes=p['notes'] + ) + session.add(new_perf) + session.commit() + print(f"โœ“ Migrated {len(perfs)} performances") + + src_conn.close() + print("=" * 60) + print("โœ“ MIGRATION COMPLETE") + print("=" * 60) + +if __name__ == "__main__": + migrate_data() diff --git a/backend/models.py b/backend/models.py new file mode 100644 index 0000000..522f7cb --- /dev/null +++ b/backend/models.py @@ -0,0 +1,275 @@ +from typing import List, Optional +from sqlmodel import Field, Relationship, SQLModel +from datetime import datetime + +# --- Join Tables --- +class Performance(SQLModel, table=True): + """Link table between Show and Song (Many-to-Many with extra data)""" + id: Optional[int] = Field(default=None, primary_key=True) + show_id: int = Field(foreign_key="show.id") + song_id: int = Field(foreign_key="song.id") + position: int = Field(description="Order in the setlist") + set_name: Optional[str] = Field(default=None, description="e.g., Set 1, Encore") + segue: bool = Field(default=False, description="Transition to next song >") + notes: Optional[str] = Field(default=None) + + nicknames: List["PerformanceNickname"] = Relationship(back_populates="performance") + show: "Show" = Relationship(back_populates="performances") + song: "Song" = Relationship() + +class ShowArtist(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + show_id: int = Field(foreign_key="show.id") + artist_id: int = Field(foreign_key="artist.id") + notes: Optional[str] = Field(default=None, description="Role e.g. Guest") + +class PerformanceArtist(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + performance_id: int = Field(foreign_key="performance.id") + artist_id: int = Field(foreign_key="artist.id") + notes: Optional[str] = Field(default=None, description="Role e.g. Guest") + +class PerformanceNickname(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + performance_id: int = Field(foreign_key="performance.id") + nickname: str = Field(index=True) + description: Optional[str] = Field(default=None) + status: str = Field(default="pending", index=True) # pending, approved, rejected + suggested_by: int = Field(foreign_key="user.id") + created_at: datetime = Field(default_factory=datetime.utcnow) + + performance: "Performance" = Relationship(back_populates="nicknames") + user: "User" = Relationship() + +class EntityTag(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + tag_id: int = Field(foreign_key="tag.id") + entity_type: str = Field(index=True) # "show", "song", "venue" + entity_id: int = Field(index=True) + +# --- Core Entities --- + +class Vertical(SQLModel, table=True): + """Represents a Fandom Vertical (e.g., 'Phish', 'Goose', 'Star Wars')""" + id: Optional[int] = Field(default=None, primary_key=True) + name: str = Field(index=True) + slug: str = Field(unique=True, index=True) + description: Optional[str] = Field(default=None) + + shows: List["Show"] = Relationship(back_populates="vertical") + songs: List["Song"] = Relationship(back_populates="vertical") + +class Venue(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + name: str = Field(index=True) + city: str + state: Optional[str] = Field(default=None) + country: str + capacity: Optional[int] = Field(default=None) + notes: Optional[str] = Field(default=None) + + shows: List["Show"] = Relationship(back_populates="venue") + +class Tour(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + name: str = Field(index=True) + start_date: Optional[datetime] = None + end_date: Optional[datetime] = None + notes: Optional[str] = Field(default=None) + + shows: List["Show"] = Relationship(back_populates="tour") + +class Artist(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + name: str = Field(index=True) + instrument: Optional[str] = Field(default=None) + notes: Optional[str] = Field(default=None) + +class Show(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + date: datetime = Field(index=True) + vertical_id: int = Field(foreign_key="vertical.id") + venue_id: Optional[int] = Field(default=None, foreign_key="venue.id") + tour_id: Optional[int] = Field(default=None, foreign_key="tour.id") + notes: Optional[str] = Field(default=None) + + vertical: Vertical = Relationship(back_populates="shows") + venue: Optional[Venue] = Relationship(back_populates="shows") + tour: Optional[Tour] = Relationship(back_populates="shows") + attendances: List["Attendance"] = Relationship(back_populates="show") + performances: List["Performance"] = Relationship(back_populates="show") + +class Song(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + title: str = Field(index=True) + original_artist: Optional[str] = Field(default=None) + vertical_id: int = Field(foreign_key="vertical.id") + notes: Optional[str] = Field(default=None) + + vertical: Vertical = Relationship(back_populates="songs") + +class Tag(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + name: str = Field(unique=True, index=True) + slug: str = Field(unique=True, index=True) + +class Attendance(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="user.id") + show_id: int = Field(foreign_key="show.id") + notes: Optional[str] = Field(default=None) + created_at: datetime = Field(default_factory=datetime.utcnow) + + user: "User" = Relationship(back_populates="attendances") + show: "Show" = Relationship(back_populates="attendances") + +class Comment(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="user.id") + content: str + created_at: datetime = Field(default_factory=datetime.utcnow) + + # Polymorphic-ish associations (nullable FKs) + show_id: Optional[int] = Field(default=None, foreign_key="show.id") + venue_id: Optional[int] = Field(default=None, foreign_key="venue.id") + song_id: Optional[int] = Field(default=None, foreign_key="song.id") + + user: "User" = Relationship(back_populates="comments") + +class Rating(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="user.id") + score: int = Field(ge=1, le=10, description="Rating from 1 to 10") + created_at: datetime = Field(default_factory=datetime.utcnow) + + show_id: Optional[int] = Field(default=None, foreign_key="show.id") + song_id: Optional[int] = Field(default=None, foreign_key="song.id") + + user: "User" = Relationship(back_populates="ratings") + +class User(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + email: str = Field(unique=True, index=True) + hashed_password: str + is_active: bool = Field(default=True) + is_superuser: bool = Field(default=False) + role: str = Field(default="user") # user, moderator, admin + bio: Optional[str] = Field(default=None) + avatar: Optional[str] = Field(default=None) + + # Multi-identity support: A user can have multiple Profiles + profiles: List["Profile"] = Relationship(back_populates="user") + comments: List["Comment"] = Relationship(back_populates="user") + ratings: List["Rating"] = Relationship(back_populates="user") + reviews: List["Review"] = Relationship(back_populates="user") + attendances: List["Attendance"] = Relationship(back_populates="user") + badges: List["UserBadge"] = Relationship(back_populates="user") + preferences: Optional["UserPreferences"] = Relationship(back_populates="user", sa_relationship_kwargs={"uselist": False}) + reports: List["Report"] = Relationship(back_populates="user") + notifications: List["Notification"] = Relationship(back_populates="user") + +class Report(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="user.id") + entity_type: str = Field(index=True) # comment, review, nickname + entity_id: int = Field(index=True) + reason: str + details: str = Field(default="") + status: str = Field(default="pending", index=True) # pending, resolved, dismissed + created_at: datetime = Field(default_factory=datetime.utcnow) + + user: "User" = Relationship(back_populates="reports") + +class Badge(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + name: str = Field(unique=True, index=True) + description: str + icon: str = Field(description="Lucide icon name or image URL") + slug: str = Field(unique=True, index=True) + +class UserBadge(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="user.id") + badge_id: int = Field(foreign_key="badge.id") + awarded_at: datetime = Field(default_factory=datetime.utcnow) + + user: "User" = Relationship(back_populates="badges") + badge: "Badge" = Relationship() + +class Review(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="user.id") + blurb: str = Field(description="One-liner/pullquote") + content: str = Field(description="Full review text") + score: int = Field(ge=1, le=10) + show_id: Optional[int] = Field(default=None, foreign_key="show.id") + venue_id: Optional[int] = Field(default=None, foreign_key="venue.id") + song_id: Optional[int] = Field(default=None, foreign_key="song.id") + performance_id: Optional[int] = Field(default=None, foreign_key="performance.id") + tour_id: Optional[int] = Field(default=None, foreign_key="tour.id") + year: Optional[int] = Field(default=None, description="For reviewing a specific year") + created_at: datetime = Field(default_factory=datetime.utcnow) + + user: "User" = Relationship(back_populates="reviews") + +class UserPreferences(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="user.id", unique=True) + wiki_mode: bool = Field(default=False, description="Disable social features") + show_ratings: bool = Field(default=True) + show_comments: bool = Field(default=True) + + user: User = Relationship(back_populates="preferences") + +class Profile(SQLModel, table=True): + """A user's identity within a specific context or global""" + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="user.id") + username: str = Field(index=True) + display_name: Optional[str] = Field(default=None) + + user: User = Relationship(back_populates="profiles") + +# --- Groups --- +class Group(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + name: str = Field(index=True, unique=True) + description: Optional[str] = None + privacy: str = Field(default="public") # public, private + created_by: int = Field(foreign_key="user.id") + created_at: datetime = Field(default_factory=datetime.utcnow) + + members: List["GroupMember"] = Relationship(back_populates="group") + posts: List["GroupPost"] = Relationship(back_populates="group") + +class GroupMember(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + group_id: int = Field(foreign_key="group.id") + user_id: int = Field(foreign_key="user.id") + role: str = Field(default="member") # member, admin + joined_at: datetime = Field(default_factory=datetime.utcnow) + + group: Group = Relationship(back_populates="members") + user: User = Relationship() + +class GroupPost(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + group_id: int = Field(foreign_key="group.id") + user_id: int = Field(foreign_key="user.id") + content: str + created_at: datetime = Field(default_factory=datetime.utcnow) + + group: Group = Relationship(back_populates="posts") + user: User = Relationship() + +class Notification(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="user.id", index=True) + type: str = Field(description="reply, mention, system") + title: str + message: str + link: Optional[str] = None + is_read: bool = Field(default=False) + created_at: datetime = Field(default_factory=datetime.utcnow) + + user: User = Relationship(back_populates="notifications") diff --git a/backend/quick_seed.py b/backend/quick_seed.py new file mode 100644 index 0000000..491a1a0 --- /dev/null +++ b/backend/quick_seed.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python3 +"""Quick demo seeder - creates users and basic data""" +import sys +sys.path.insert(0, '/Users/ten/ANTIGRAVITY/elmeg-demo/backend') + +from datetime import datetime, timedelta +from sqlmodel import Session +from passlib.context import CryptContext +from database import engine +from models import User, Vertical, Venue, Show, Song, Performance, UserPreferences + +pwd_context = CryptContext(schemes=["argon2"], deprecated="auto") + +print("Starting demo seed...") + +with Session(engine) as session: + # Create Goose vertical + vertical = Vertical(name="Goose", slug="goose", description="Jam band from CT") + session.add(vertical) + session.commit() + session.refresh(vertical) + print(f"โœ“ Created vertical: {vertical.name}") + + # Create 12 users + users_data = [ + ("archivist@demo.com", "TheArchivist", "user", True), + ("statnerd@demo.com", "StatNerd420", "user", False), + ("reviewer@demo.com", "CriticalListener", "user", False), + ("casual@demo.com", "CasualFan", "user", False), + ("groupleader@demo.com", "NortheastHonkers", "user", False), + ("mod@demo.com", "ModGoose", "moderator", False), + ("admin@demo.com", "AdminBird", "admin", False), + ("newbie@demo.com", "NewToGoose", "user", False), + ("taper@demo.com", "TaperTom", "user", False), + ("tourfollower@demo.com", "RoadWarrior", "user", False), + ("lurker@demo.com", "SilentHonker", "user", True), + ("hype@demo.com", "HypeGoose", "user", False), + ] + + users = [] + for email, username, role, wiki_mode in users_data: + user = User( + email=email, + hashed_password=pwd_context.hash("demo123"), + is_active=True, + is_superuser=(role == "admin"), + role=role + ) + session.add(user) + session.commit() + session.refresh(user) + + prefs = UserPreferences( + user_id=user.id, + wiki_mode=wiki_mode, + show_ratings=not wiki_mode, + show_comments=not wiki_mode + ) + session.add(prefs) + users.append(user) + print(f"โœ“ Created user: {username}") + + session.commit() + + # Create venues + venues = [ + Venue(name="Red Rocks", city="Morrison", state="CO", country="USA"), + Venue(name="Capitol Theatre", city="Port Chester", state="NY", country="USA"), + ] + for v in venues: + session.add(v) + session.commit() + print(f"โœ“ Created {len(venues)} venues") + + # Create songs + songs = [ + Song(title="Hungersite", vertical_id=vertical.id), + Song(title="Arcadia", vertical_id=vertical.id), + Song(title="Hot Tea", vertical_id=vertical.id), + ] + for s in songs: + session.add(s) + session.commit() + print(f"โœ“ Created {len(songs)} songs") + + # Create shows + shows = [] + for i in range(5): + show = Show( + date=datetime(2024, 1, 1) + timedelta(days=i*30), + vertical_id=vertical.id, + venue_id=venues[i % len(venues)].id + ) + session.add(show) + shows.append(show) + session.commit() + print(f"โœ“ Created {len(shows)} shows") + + # Create performances + for show in shows: + for pos, song in enumerate(songs, 1): + perf = Performance( + show_id=show.id, + song_id=song.id, + position=pos, + set_name="Set 1" + ) + session.add(perf) + session.commit() + print(f"โœ“ Created performances") + + print("\n" + "="*60) + print("โœ“ DEMO DATA SEEDED!") + print("="*60) + print("\nAll passwords: demo123") + print("\nStart demo server:") + print(" cd /Users/ten/ANTIGRAVITY/elmeg-demo/backend") + print(" DATABASE_URL='sqlite:///./elmeg-demo.db' uvicorn main:app --reload --port 8001") diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000..585c57b --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,11 @@ +fastapi +uvicorn[standard] +sqlmodel +alembic +python-jose[cryptography] +passlib[bcrypt] +python-multipart +pytest +httpx +argon2-cffi +psycopg2-binary diff --git a/backend/routers/artists.py b/backend/routers/artists.py new file mode 100644 index 0000000..fd30e2a --- /dev/null +++ b/backend/routers/artists.py @@ -0,0 +1,37 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select +from database import get_session +from models import Artist, User +from schemas import ArtistCreate, ArtistRead +from auth import get_current_user + +router = APIRouter(prefix="/artists", tags=["artists"]) + +@router.post("/", response_model=ArtistRead) +def create_artist( + artist: ArtistCreate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + db_artist = Artist.model_validate(artist) + session.add(db_artist) + session.commit() + session.refresh(db_artist) + return db_artist + +@router.get("/", response_model=List[ArtistRead]) +def read_artists( + offset: int = 0, + limit: int = Query(default=100, le=100), + session: Session = Depends(get_session) +): + artists = session.exec(select(Artist).offset(offset).limit(limit)).all() + return artists + +@router.get("/{artist_id}", response_model=ArtistRead) +def read_artist(artist_id: int, session: Session = Depends(get_session)): + artist = session.get(Artist, artist_id) + if not artist: + raise HTTPException(status_code=404, detail="Artist not found") + return artist diff --git a/backend/routers/attendance.py b/backend/routers/attendance.py new file mode 100644 index 0000000..461159d --- /dev/null +++ b/backend/routers/attendance.py @@ -0,0 +1,77 @@ +from typing import List, Optional +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select +from database import get_session +from models import Attendance, User, Show +from schemas import AttendanceCreate, AttendanceRead +from auth import get_current_user + +router = APIRouter(prefix="/attendance", tags=["attendance"]) + +@router.post("/", response_model=AttendanceRead) +def mark_attendance( + attendance: AttendanceCreate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + # Check if already attended + existing = session.exec( + select(Attendance) + .where(Attendance.user_id == current_user.id) + .where(Attendance.show_id == attendance.show_id) + ).first() + + if existing: + # Update notes if provided, or just return existing + if attendance.notes: + existing.notes = attendance.notes + session.add(existing) + session.commit() + session.refresh(existing) + return existing + + db_attendance = Attendance(**attendance.model_dump(), user_id=current_user.id) + session.add(db_attendance) + session.commit() + session.refresh(db_attendance) + return db_attendance + +@router.delete("/{show_id}") +def remove_attendance( + show_id: int, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + attendance = session.exec( + select(Attendance) + .where(Attendance.user_id == current_user.id) + .where(Attendance.show_id == show_id) + ).first() + + if not attendance: + raise HTTPException(status_code=404, detail="Attendance not found") + + session.delete(attendance) + session.commit() + return {"ok": True} + +@router.get("/me", response_model=List[AttendanceRead]) +def get_my_attendance( + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + return session.exec(select(Attendance).where(Attendance.user_id == current_user.id)).all() + +@router.get("/show/{show_id}", response_model=List[AttendanceRead]) +def get_show_attendance( + show_id: int, + session: Session = Depends(get_session), + offset: int = 0, + limit: int = 100 +): + return session.exec( + select(Attendance) + .where(Attendance.show_id == show_id) + .offset(offset) + .limit(limit) + ).all() diff --git a/backend/routers/auth.py b/backend/routers/auth.py new file mode 100644 index 0000000..48b849b --- /dev/null +++ b/backend/routers/auth.py @@ -0,0 +1,54 @@ +from datetime import timedelta +from typing import Annotated +from fastapi import APIRouter, Depends, HTTPException, status +from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm +from sqlmodel import Session, select +from database import get_session +from models import User, Profile +from schemas import UserCreate, Token, UserRead +from auth import verify_password, get_password_hash, create_access_token, ACCESS_TOKEN_EXPIRE_MINUTES, get_current_user + +router = APIRouter(prefix="/auth", tags=["auth"]) + +@router.post("/register", response_model=UserRead) +def register(user_in: UserCreate, session: Session = Depends(get_session)): + user = session.exec(select(User).where(User.email == user_in.email)).first() + if user: + raise HTTPException(status_code=400, detail="Email already registered") + + # Create User + hashed_password = get_password_hash(user_in.password) + db_user = User(email=user_in.email, hashed_password=hashed_password) + session.add(db_user) + session.commit() + session.refresh(db_user) + + # Create Default Profile + profile = Profile(user_id=db_user.id, username=user_in.username, display_name=user_in.username) + session.add(profile) + session.commit() + + return db_user + +@router.post("/token", response_model=Token) +def login_for_access_token( + form_data: Annotated[OAuth2PasswordRequestForm, Depends()], + session: Session = Depends(get_session) +): + user = session.exec(select(User).where(User.email == form_data.username)).first() + if not user or not verify_password(form_data.password, user.hashed_password): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Incorrect username or password", + headers={"WWW-Authenticate": "Bearer"}, + ) + + access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) + access_token = create_access_token( + data={"sub": user.email}, expires_delta=access_token_expires + ) + return {"access_token": access_token, "token_type": "bearer"} + +@router.get("/users/me", response_model=UserRead) +def read_users_me(current_user: Annotated[User, Depends(get_current_user)]): + return current_user diff --git a/backend/routers/badges.py b/backend/routers/badges.py new file mode 100644 index 0000000..00f104f --- /dev/null +++ b/backend/routers/badges.py @@ -0,0 +1,32 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel import Session, select +from database import get_session +from models import User, UserBadge, Badge +from schemas import UserBadgeRead +from auth import get_current_user +from services.stats import check_and_award_badges + +router = APIRouter(prefix="/badges", tags=["badges"]) + +@router.get("/me", response_model=List[UserBadgeRead]) +def read_my_badges( + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + # Trigger a check (lazy evaluation of badges) + check_and_award_badges(session, current_user.id) + + # Refresh user to get new badges + session.refresh(current_user) + return current_user.badges + +@router.get("/{user_id}", response_model=List[UserBadgeRead]) +def read_user_badges( + user_id: int, + session: Session = Depends(get_session) +): + user = session.get(User, user_id) + if not user: + raise HTTPException(status_code=404, detail="User not found") + return user.badges diff --git a/backend/routers/feed.py b/backend/routers/feed.py new file mode 100644 index 0000000..d89b7ee --- /dev/null +++ b/backend/routers/feed.py @@ -0,0 +1,72 @@ +from typing import List, Union +from fastapi import APIRouter, Depends, Query +from sqlmodel import Session, select, desc +from database import get_session +from models import Review, Attendance, GroupPost, User +from schemas import ReviewRead, AttendanceRead, GroupPostRead +from datetime import datetime + +router = APIRouter(prefix="/feed", tags=["feed"]) + +# We need a unified schema for the feed +from pydantic import BaseModel + +class FeedItem(BaseModel): + type: str # review, attendance, post + timestamp: datetime + data: Union[ReviewRead, AttendanceRead, GroupPostRead, dict] + user: dict # Basic user info + +@router.get("/", response_model=List[FeedItem]) +def get_global_feed( + limit: int = 20, + session: Session = Depends(get_session) +): + # Fetch latest reviews + reviews = session.exec( + select(Review).order_by(desc(Review.created_at)).limit(limit) + ).all() + + # Fetch latest attendance + attendance = session.exec( + select(Attendance).order_by(desc(Attendance.created_at)).limit(limit) + ).all() + + # Fetch latest group posts + posts = session.exec( + select(GroupPost).order_by(desc(GroupPost.created_at)).limit(limit) + ).all() + + feed_items = [] + + for r in reviews: + user = session.get(User, r.user_id) + feed_items.append(FeedItem( + type="review", + timestamp=r.created_at or datetime.utcnow(), # Handle missing created_at if any + data=r, + user={"id": user.id, "username": user.username, "avatar": user.avatar} if user else {} + )) + + for a in attendance: + user = session.get(User, a.user_id) + feed_items.append(FeedItem( + type="attendance", + timestamp=a.created_at, + data=a, + user={"id": user.id, "username": user.username, "avatar": user.avatar} if user else {} + )) + + for p in posts: + user = session.get(User, p.user_id) + feed_items.append(FeedItem( + type="post", + timestamp=p.created_at, + data=p, + user={"id": user.id, "username": user.username, "avatar": user.avatar} if user else {} + )) + + # Sort by timestamp desc + feed_items.sort(key=lambda x: x.timestamp, reverse=True) + + return feed_items[:limit] diff --git a/backend/routers/groups.py b/backend/routers/groups.py new file mode 100644 index 0000000..b5e4383 --- /dev/null +++ b/backend/routers/groups.py @@ -0,0 +1,128 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select, func +from database import get_session +from models import Group, GroupMember, GroupPost, User +from schemas import GroupCreate, GroupRead, GroupPostCreate, GroupPostRead +from auth import get_current_user + +router = APIRouter(prefix="/groups", tags=["groups"]) + +# --- Groups --- + +@router.post("/", response_model=GroupRead) +def create_group( + group: GroupCreate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + db_group = Group.model_validate(group) + db_group.created_by = current_user.id + session.add(db_group) + session.commit() + session.refresh(db_group) + + # Auto-join creator as admin + member = GroupMember(group_id=db_group.id, user_id=current_user.id, role="admin") + session.add(member) + session.commit() + + return db_group + +@router.get("/", response_model=List[GroupRead]) +def read_groups( + offset: int = 0, + limit: int = Query(default=100, le=100), + session: Session = Depends(get_session) +): + # TODO: Add member count to response + groups = session.exec(select(Group).offset(offset).limit(limit)).all() + return groups + +@router.get("/{group_id}", response_model=GroupRead) +def read_group(group_id: int, session: Session = Depends(get_session)): + group = session.get(Group, group_id) + if not group: + raise HTTPException(status_code=404, detail="Group not found") + return group + +@router.post("/{group_id}/join") +def join_group( + group_id: int, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + group = session.get(Group, group_id) + if not group: + raise HTTPException(status_code=404, detail="Group not found") + + # Check if already member + existing = session.exec( + select(GroupMember) + .where(GroupMember.group_id == group_id) + .where(GroupMember.user_id == current_user.id) + ).first() + + if existing: + raise HTTPException(status_code=400, detail="Already a member") + + member = GroupMember(group_id=group_id, user_id=current_user.id) + session.add(member) + session.commit() + + # Notify Group Owner + from routers.notifications import create_notification + create_notification( + session, + user_id=group.created_by, + type="group_join", + title="New Group Member", + message=f"{current_user.email} joined {group.name}", + link=f"/groups/{group.id}" + ) + + return {"status": "joined"} + +# --- Posts --- + +@router.post("/{group_id}/posts", response_model=GroupPostRead) +def create_post( + group_id: int, + post: GroupPostCreate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + # Verify membership + member = session.exec( + select(GroupMember) + .where(GroupMember.group_id == group_id) + .where(GroupMember.user_id == current_user.id) + ).first() + + if not member: + raise HTTPException(status_code=403, detail="Must be a member to post") + + db_post = GroupPost.model_validate(post) + db_post.group_id = group_id + db_post.user_id = current_user.id + session.add(db_post) + session.commit() + session.refresh(db_post) + return db_post + +@router.get("/{group_id}/posts", response_model=List[GroupPostRead]) +def read_posts( + group_id: int, + offset: int = 0, + limit: int = Query(default=50, le=100), + session: Session = Depends(get_session) +): + # Check if group is private? For now assume public read or check membership if private. + posts = session.exec( + select(GroupPost) + .where(GroupPost.group_id == group_id) + .order_by(GroupPost.created_at.desc()) + .offset(offset) + .limit(limit) + ).all() + return posts diff --git a/backend/routers/leaderboards.py b/backend/routers/leaderboards.py new file mode 100644 index 0000000..8bd2e43 --- /dev/null +++ b/backend/routers/leaderboards.py @@ -0,0 +1,94 @@ +from fastapi import APIRouter, Depends, Query +from sqlmodel import Session, select, func, desc +from typing import List, Optional +from database import get_session +from models import Review, Show, Venue, User, Profile + +router = APIRouter( + prefix="/leaderboards", + tags=["leaderboards"] +) + +@router.get("/shows/top") +def get_top_shows(limit: int = 10, session: Session = Depends(get_session)): + """Get top rated shows based on average review score""" + # Group by show_id, calc avg score, count reviews + # Filter for shows with at least 1 review (or maybe 2 to be significant?) + + # SQLModel doesn't support complex group_by/having easily in pure pythonic way sometimes, + # but we can use session.exec with a direct select. + + query = ( + select( + Show, + func.avg(Review.score).label("avg_score"), + func.count(Review.id).label("review_count") + ) + .join(Review, Review.show_id == Show.id) + .group_by(Show.id) + .having(func.count(Review.id) >= 1) + .order_by(desc("avg_score"), desc("review_count")) + .limit(limit) + ) + + results = session.exec(query).all() + + return [ + { + "show": show, + "avg_score": round(score, 2), + "review_count": count + } + for show, score, count in results + ] + +@router.get("/venues/top") +def get_top_venues(limit: int = 10, session: Session = Depends(get_session)): + """Get top rated venues""" + query = ( + select( + Venue, + func.avg(Review.score).label("avg_score"), + func.count(Review.id).label("review_count") + ) + .join(Review, Review.venue_id == Venue.id) + .group_by(Venue.id) + .order_by(desc("avg_score")) + .limit(limit) + ) + + results = session.exec(query).all() + + return [ + { + "venue": venue, + "avg_score": round(score, 2), + "review_count": count + } + for venue, score, count in results + ] + +@router.get("/users/active") +def get_active_users(limit: int = 10, session: Session = Depends(get_session)): + """Get users with most reviews""" + query = ( + select( + Profile, + func.count(Review.id).label("review_count") + ) + .join(User, User.id == Profile.user_id) + .join(Review, Review.user_id == User.id) + .group_by(Profile.id) + .order_by(desc("review_count")) + .limit(limit) + ) + + results = session.exec(query).all() + + return [ + { + "profile": profile, + "review_count": count + } + for profile, count in results + ] diff --git a/backend/routers/moderation.py b/backend/routers/moderation.py new file mode 100644 index 0000000..15c1bba --- /dev/null +++ b/backend/routers/moderation.py @@ -0,0 +1,83 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel import Session, select +from database import get_session +from models import Report, User, PerformanceNickname +from schemas import ReportCreate, ReportRead, PerformanceNicknameRead +from auth import get_current_user +from dependencies import RoleChecker + +router = APIRouter(prefix="/moderation", tags=["moderation"]) + +allow_moderator = RoleChecker(["moderator", "admin"]) + +@router.post("/reports", response_model=ReportRead) +def create_report( + report: ReportCreate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + db_report = Report.model_validate(report) + db_report.user_id = current_user.id + session.add(db_report) + session.commit() + session.refresh(db_report) + return db_report + +@router.get("/queue/nicknames", response_model=List[PerformanceNicknameRead], dependencies=[Depends(allow_moderator)]) +def get_pending_nicknames(session: Session = Depends(get_session)): + nicknames = session.exec( + select(PerformanceNickname).where(PerformanceNickname.status == "pending") + ).all() + return nicknames + +@router.put("/nicknames/{nickname_id}/{action}", response_model=PerformanceNicknameRead, dependencies=[Depends(allow_moderator)]) +def moderate_nickname( + nickname_id: int, + action: str, # approve, reject + session: Session = Depends(get_session) +): + nickname = session.get(PerformanceNickname, nickname_id) + if not nickname: + raise HTTPException(status_code=404, detail="Nickname not found") + + if action == "approve": + nickname.status = "approved" + elif action == "reject": + nickname.status = "rejected" + else: + raise HTTPException(status_code=400, detail="Invalid action") + + session.add(nickname) + session.commit() + session.refresh(nickname) + return nickname + +@router.get("/queue/reports", response_model=List[ReportRead], dependencies=[Depends(allow_moderator)]) +def get_pending_reports(session: Session = Depends(get_session)): + reports = session.exec( + select(Report).where(Report.status == "pending") + ).all() + return reports + +@router.put("/reports/{report_id}/{action}", response_model=ReportRead, dependencies=[Depends(allow_moderator)]) +def moderate_report( + report_id: int, + action: str, # resolve, dismiss + session: Session = Depends(get_session) +): + report = session.get(Report, report_id) + if not report: + raise HTTPException(status_code=404, detail="Report not found") + + if action == "resolve": + report.status = "resolved" + elif action == "dismiss": + report.status = "dismissed" + else: + raise HTTPException(status_code=400, detail="Invalid action") + + session.add(report) + session.commit() + session.refresh(report) + return report diff --git a/backend/routers/nicknames.py b/backend/routers/nicknames.py new file mode 100644 index 0000000..0aa73ed --- /dev/null +++ b/backend/routers/nicknames.py @@ -0,0 +1,44 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel import Session, select +from database import get_session +from models import PerformanceNickname, User, Performance +from schemas import PerformanceNicknameCreate, PerformanceNicknameRead +from auth import get_current_user + +router = APIRouter(prefix="/nicknames", tags=["nicknames"]) + +@router.post("/", response_model=PerformanceNicknameRead) +def suggest_nickname( + nickname: PerformanceNicknameCreate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + # Verify performance exists + perf = session.get(Performance, nickname.performance_id) + if not perf: + raise HTTPException(status_code=404, detail="Performance not found") + + db_nickname = PerformanceNickname.model_validate(nickname) + db_nickname.suggested_by = current_user.id + db_nickname.status = "pending" # Default to pending + + session.add(db_nickname) + session.commit() + session.refresh(db_nickname) + return db_nickname + +@router.get("/performance/{performance_id}", response_model=List[PerformanceNicknameRead]) +def read_performance_nicknames( + performance_id: int, + session: Session = Depends(get_session) +): + # Only show approved nicknames publicly + nicknames = session.exec( + select(PerformanceNickname) + .where(PerformanceNickname.performance_id == performance_id) + .where(PerformanceNickname.status == "approved") + ).all() + return nicknames + +# Moderator endpoints would go here (approve/reject) diff --git a/backend/routers/notifications.py b/backend/routers/notifications.py new file mode 100644 index 0000000..d4677ce --- /dev/null +++ b/backend/routers/notifications.py @@ -0,0 +1,88 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select, desc +from database import get_session +from models import Notification, User +from schemas import NotificationRead, NotificationCreate +from auth import get_current_user + +router = APIRouter(prefix="/notifications", tags=["notifications"]) + +@router.get("/", response_model=List[NotificationRead]) +def read_notifications( + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user), + limit: int = 20, + offset: int = 0 +): + notifications = session.exec( + select(Notification) + .where(Notification.user_id == current_user.id) + .order_by(desc(Notification.created_at)) + .offset(offset) + .limit(limit) + ).all() + return notifications + +@router.get("/unread-count") +def get_unread_count( + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + from sqlmodel import func + count = session.exec( + select(func.count(Notification.id)) + .where(Notification.user_id == current_user.id) + .where(Notification.is_read == False) + ).one() + return {"count": count} + +@router.post("/{notification_id}/read") +def mark_as_read( + notification_id: int, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + notification = session.get(Notification, notification_id) + if not notification: + raise HTTPException(status_code=404, detail="Notification not found") + + if notification.user_id != current_user.id: + raise HTTPException(status_code=403, detail="Not authorized") + + notification.is_read = True + session.add(notification) + session.commit() + return {"ok": True} + +@router.post("/mark-all-read") +def mark_all_read( + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + notifications = session.exec( + select(Notification) + .where(Notification.user_id == current_user.id) + .where(Notification.is_read == False) + ).all() + + for n in notifications: + n.is_read = True + session.add(n) + + session.commit() + return {"ok": True} + +# Helper function to create notifications (not an endpoint) +def create_notification(session: Session, user_id: int, type: str, title: str, message: str, link: str = None): + notification = Notification( + user_id=user_id, + type=type, + title=title, + message=message, + link=link + ) + session.add(notification) + session.commit() + session.refresh(notification) + return notification diff --git a/backend/routers/performances.py b/backend/routers/performances.py new file mode 100644 index 0000000..000c98a --- /dev/null +++ b/backend/routers/performances.py @@ -0,0 +1,93 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select +from database import get_session +from models import Performance, PerformanceNickname, Tag, EntityTag +from schemas import PerformanceDetailRead, PerformanceNicknameCreate, PerformanceNicknameRead +from auth import get_current_user + +router = APIRouter(prefix="/performances", tags=["performances"]) + +@router.get("/{performance_id}", response_model=PerformanceDetailRead) +def read_performance(performance_id: int, session: Session = Depends(get_session)): + performance = session.get(Performance, performance_id) + if not performance: + raise HTTPException(status_code=404, detail="Performance not found") + + # --- Calculate Stats & Navigation --- + from sqlmodel import select, func, desc + from models import Show + + # Get all performances of this song, ordered by date + # We need to join Show to order by date + all_perfs = session.exec( + select(Performance, Show.date) + .join(Show) + .where(Performance.song_id == performance.song_id) + .order_by(Show.date) + ).all() + + # Find current index + # all_perfs is a list of tuples (Performance, date) + current_index = -1 + for i, (p, d) in enumerate(all_perfs): + if p.id == performance_id: + current_index = i + break + + prev_id = None + next_id = None + gap = 0 + times_played = current_index + 1 # 1-based count + + if current_index > 0: + prev_id = all_perfs[current_index - 1][0].id + + # Calculate Gap + # Gap is number of shows between prev performance and this one + prev_date = all_perfs[current_index - 1][1] + current_date = all_perfs[current_index][1] + + gap = session.exec( + select(func.count(Show.id)) + .where(Show.date > prev_date) + .where(Show.date < current_date) + ).one() + + if current_index < len(all_perfs) - 1: + next_id = all_perfs[current_index + 1][0].id + + # Construct response manually to include extra fields + # We need to ensure nested models (show, song) are validated correctly + perf_dict = performance.model_dump() + perf_dict['show'] = performance.show + perf_dict['song'] = performance.song + perf_dict['nicknames'] = performance.nicknames + perf_dict['previous_performance_id'] = prev_id + perf_dict['next_performance_id'] = next_id + perf_dict['gap'] = gap + perf_dict['times_played'] = times_played + + return perf_dict + +@router.post("/{performance_id}/nicknames", response_model=PerformanceNicknameRead) +def suggest_nickname( + performance_id: int, + nickname: PerformanceNicknameCreate, + session: Session = Depends(get_session), + current_user = Depends(get_current_user) +): + # Check if performance exists + perf = session.get(Performance, performance_id) + if not perf: + raise HTTPException(status_code=404, detail="Performance not found") + + db_nickname = PerformanceNickname.model_validate(nickname) + db_nickname.performance_id = performance_id + db_nickname.suggested_by = current_user.id + db_nickname.status = "pending" # Default to pending + + session.add(db_nickname) + session.commit() + session.refresh(db_nickname) + return db_nickname diff --git a/backend/routers/preferences.py b/backend/routers/preferences.py new file mode 100644 index 0000000..fd7605a --- /dev/null +++ b/backend/routers/preferences.py @@ -0,0 +1,42 @@ +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel import Session, select +from database import get_session +from models import User, UserPreferences +from schemas import UserPreferencesRead, UserPreferencesUpdate +from auth import get_current_user + +router = APIRouter(prefix="/preferences", tags=["preferences"]) + +@router.get("/", response_model=UserPreferencesRead) +def read_preferences( + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + prefs = current_user.preferences + if not prefs: + # Create default preferences if they don't exist + prefs = UserPreferences(user_id=current_user.id) + session.add(prefs) + session.commit() + session.refresh(prefs) + return prefs + +@router.put("/", response_model=UserPreferencesRead) +def update_preferences( + preferences: UserPreferencesUpdate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + prefs = current_user.preferences + if not prefs: + prefs = UserPreferences(user_id=current_user.id) + session.add(prefs) + + prefs_data = preferences.model_dump(exclude_unset=True) + for key, value in prefs_data.items(): + setattr(prefs, key, value) + + session.add(prefs) + session.commit() + session.refresh(prefs) + return prefs diff --git a/backend/routers/reviews.py b/backend/routers/reviews.py new file mode 100644 index 0000000..2eedcea --- /dev/null +++ b/backend/routers/reviews.py @@ -0,0 +1,51 @@ +from typing import List, Optional +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select +from database import get_session +from models import Review, User +from schemas import ReviewCreate, ReviewRead +from auth import get_current_user + +router = APIRouter(prefix="/reviews", tags=["reviews"]) + +@router.post("/", response_model=ReviewRead) +def create_review( + review: ReviewCreate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + db_review = Review.model_validate(review) + db_review.user_id = current_user.id + session.add(db_review) + session.commit() + session.refresh(db_review) + return db_review + +@router.get("/", response_model=List[ReviewRead]) +def read_reviews( + show_id: Optional[int] = None, + venue_id: Optional[int] = None, + song_id: Optional[int] = None, + performance_id: Optional[int] = None, + tour_id: Optional[int] = None, + year: Optional[int] = None, + offset: int = 0, + limit: int = Query(default=100, le=100), + session: Session = Depends(get_session) +): + query = select(Review) + if show_id: + query = query.where(Review.show_id == show_id) + if venue_id: + query = query.where(Review.venue_id == venue_id) + if song_id: + query = query.where(Review.song_id == song_id) + if performance_id: + query = query.where(Review.performance_id == performance_id) + if tour_id: + query = query.where(Review.tour_id == tour_id) + if year: + query = query.where(Review.year == year) + + reviews = session.exec(query.offset(offset).limit(limit)).all() + return reviews diff --git a/backend/routers/search.py b/backend/routers/search.py new file mode 100644 index 0000000..45edd32 --- /dev/null +++ b/backend/routers/search.py @@ -0,0 +1,83 @@ +from typing import List, Optional +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select, col +from database import get_session +from models import Show, Song, Venue, Tour, User, Group, Performance, PerformanceNickname, Comment, Review +from schemas import ShowRead, SongRead, VenueRead, TourRead, UserRead, GroupRead + +router = APIRouter(prefix="/search", tags=["search"]) + +@router.get("/") +def global_search( + q: str, + session: Session = Depends(get_session), + limit: int = 5 +): + if len(q) < 2: + return {} + + q_str = f"%{q}%" + + # Search Songs + songs = session.exec(select(Song).where(col(Song.title).ilike(q_str)).limit(limit)).all() + + # Search Venues + venues = session.exec(select(Venue).where(col(Venue.name).ilike(q_str)).limit(limit)).all() + + # Search Tours + tours = session.exec(select(Tour).where(col(Tour.name).ilike(q_str)).limit(limit)).all() + + # Search Groups + groups = session.exec(select(Group).where(col(Group.name).ilike(q_str)).limit(limit)).all() + + # Search Users (by username or email) + users = session.exec(select(User).where(col(User.email).ilike(q_str)).limit(limit)).all() + + # Search Nicknames + nicknames = session.exec( + select(PerformanceNickname) + .where(col(PerformanceNickname.nickname).ilike(q_str)) + .where(PerformanceNickname.status == "approved") + .limit(limit) + ).all() + + # Search Performances (by notes, e.g. "unfinished", "slow version") + # We join with Song and Show to provide context in the frontend if needed, + # but for now let's just return the Performance object and let frontend fetch details + # or we can return a custom schema. + # Actually, let's just search notes for now. + performances = session.exec( + select(Performance) + .join(Song) + .where(col(Performance.notes).ilike(q_str)) + .limit(limit) + ).all() + + # Search Reviews + reviews = session.exec( + select(Review) + .where( + (col(Review.blurb).ilike(q_str)) | + (col(Review.content).ilike(q_str)) + ) + .limit(limit) + ).all() + + # Search Comments + comments = session.exec( + select(Comment) + .where(col(Comment.content).ilike(q_str)) + .limit(limit) + ).all() + + return { + "songs": songs, + "venues": venues, + "tours": tours, + "groups": groups, + "users": users, + "nicknames": nicknames, + "performances": performances, + "reviews": reviews, + "comments": comments + } diff --git a/backend/routers/shows.py b/backend/routers/shows.py new file mode 100644 index 0000000..38bc785 --- /dev/null +++ b/backend/routers/shows.py @@ -0,0 +1,88 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select +from database import get_session +from models import Show, Tag, EntityTag +from schemas import ShowCreate, ShowRead, ShowUpdate, TagRead +from auth import get_current_user + +router = APIRouter(prefix="/shows", tags=["shows"]) + +@router.post("/", response_model=ShowRead) +def create_show(show: ShowCreate, session: Session = Depends(get_session), current_user = Depends(get_current_user)): + db_show = Show.model_validate(show) + session.add(db_show) + session.commit() + session.refresh(db_show) + return db_show + +@router.get("/", response_model=List[ShowRead]) +def read_shows( + offset: int = 0, + limit: int = Query(default=100, le=100), + venue_id: int = None, + tour_id: int = None, + year: int = None, + session: Session = Depends(get_session) +): + query = select(Show) + if venue_id: + query = query.where(Show.venue_id == venue_id) + if tour_id: + query = query.where(Show.tour_id == tour_id) + # if year: + # # SQLite/Postgres specific year extraction might differ, + # # but usually we can filter by date range or extract year. + # # For simplicity let's skip year for now or use a range if needed. + # pass + + shows = session.exec(query.offset(offset).limit(limit)).all() + return shows + +@router.get("/{show_id}", response_model=ShowRead) +def read_show(show_id: int, session: Session = Depends(get_session)): + show = session.get(Show, show_id) + if not show: + raise HTTPException(status_code=404, detail="Show not found") + + tags = session.exec( + select(Tag) + .join(EntityTag, Tag.id == EntityTag.tag_id) + .where(EntityTag.entity_type == "show") + .where(EntityTag.entity_id == show_id) + ).all() + + # Manually populate performances to ensure nicknames are filtered if needed + # (Though for now we just return all, or filter approved in schema if we had a custom getter) + # The relationship `show.performances` is already loaded if we access it, but we might want to sort. + + # Re-fetch show with relationships if needed, or just rely on lazy loading + validation + # But for nicknames, we only want "approved" ones usually. + # Let's let the frontend filter or do it here. + # Doing it here is safer. + + show_data = ShowRead.model_validate(show) + show_data.tags = tags + + # Sort performances by position + sorted_perfs = sorted(show.performances, key=lambda p: p.position) + + # Filter nicknames for each performance + for perf in sorted_perfs: + perf.nicknames = [n for n in perf.nicknames if n.status == "approved"] + + show_data.performances = sorted_perfs + + return show_data + +@router.patch("/{show_id}", response_model=ShowRead) +def update_show(show_id: int, show: ShowUpdate, session: Session = Depends(get_session), current_user = Depends(get_current_user)): + db_show = session.get(Show, show_id) + if not db_show: + raise HTTPException(status_code=404, detail="Show not found") + show_data = show.model_dump(exclude_unset=True) + db_show.sqlmodel_update(show_data) + session.add(db_show) + session.commit() + session.refresh(db_show) + return db_show diff --git a/backend/routers/social.py b/backend/routers/social.py new file mode 100644 index 0000000..ccb1d4a --- /dev/null +++ b/backend/routers/social.py @@ -0,0 +1,99 @@ +from typing import List, Optional +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select, func +from database import get_session +from models import Comment, Rating, User +from schemas import CommentCreate, CommentRead, RatingCreate, RatingRead +from auth import get_current_user + +router = APIRouter(prefix="/social", tags=["social"]) + +# --- Comments --- + +@router.post("/comments", response_model=CommentRead) +def create_comment( + comment: CommentCreate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + db_comment = Comment.model_validate(comment) + db_comment.user_id = current_user.id + session.add(db_comment) + session.commit() + session.refresh(db_comment) + + # Notify parent author if reply (TODO: Add parent_id to Comment model) + # For now, let's just log it or skip. + + return db_comment + +@router.get("/comments", response_model=List[CommentRead]) +def read_comments( + show_id: Optional[int] = None, + venue_id: Optional[int] = None, + song_id: Optional[int] = None, + offset: int = 0, + limit: int = Query(default=50, le=100), + session: Session = Depends(get_session) +): + query = select(Comment) + if show_id: + query = query.where(Comment.show_id == show_id) + if venue_id: + query = query.where(Comment.venue_id == venue_id) + if song_id: + query = query.where(Comment.song_id == song_id) + + query = query.order_by(Comment.created_at.desc()).offset(offset).limit(limit) + comments = session.exec(query).all() + return comments + +# --- Ratings --- + +@router.post("/ratings", response_model=RatingRead) +def create_rating( + rating: RatingCreate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + # Check if user already rated this entity + query = select(Rating).where(Rating.user_id == current_user.id) + if rating.show_id: + query = query.where(Rating.show_id == rating.show_id) + elif rating.song_id: + query = query.where(Rating.song_id == rating.song_id) + else: + raise HTTPException(status_code=400, detail="Must rate a show or song") + + existing_rating = session.exec(query).first() + if existing_rating: + # Update existing + existing_rating.score = rating.score + session.add(existing_rating) + session.commit() + session.refresh(existing_rating) + return existing_rating + + db_rating = Rating.model_validate(rating) + db_rating.user_id = current_user.id + session.add(db_rating) + session.commit() + session.refresh(db_rating) + return db_rating + +@router.get("/ratings/average", response_model=float) +def get_average_rating( + show_id: Optional[int] = None, + song_id: Optional[int] = None, + session: Session = Depends(get_session) +): + query = select(func.avg(Rating.score)) + if show_id: + query = query.where(Rating.show_id == show_id) + elif song_id: + query = query.where(Rating.song_id == song_id) + else: + raise HTTPException(status_code=400, detail="Must specify show_id or song_id") + + avg = session.exec(query).first() + return float(avg) if avg else 0.0 diff --git a/backend/routers/songs.py b/backend/routers/songs.py new file mode 100644 index 0000000..233db68 --- /dev/null +++ b/backend/routers/songs.py @@ -0,0 +1,59 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select +from database import get_session +from models import Song, User, Tag, EntityTag +from schemas import SongCreate, SongRead, SongReadWithStats, SongUpdate, TagRead +from auth import get_current_user + +router = APIRouter(prefix="/songs", tags=["songs"]) + +@router.post("/", response_model=SongRead) +def create_song(song: SongCreate, session: Session = Depends(get_session), current_user = Depends(get_current_user)): + db_song = Song.model_validate(song) + session.add(db_song) + session.commit() + session.refresh(db_song) + return db_song + +@router.get("/", response_model=List[SongRead]) +def read_songs(offset: int = 0, limit: int = Query(default=100, le=100), session: Session = Depends(get_session)): + songs = session.exec(select(Song).offset(offset).limit(limit)).all() + return songs + +from services.stats import get_song_stats + +@router.get("/{song_id}", response_model=SongReadWithStats) +def read_song(song_id: int, session: Session = Depends(get_session)): + song = session.get(Song, song_id) + if not song: + raise HTTPException(status_code=404, detail="Song not found") + + stats = get_song_stats(session, song_id) + + tags = session.exec( + select(Tag) + .join(EntityTag, Tag.id == EntityTag.tag_id) + .where(EntityTag.entity_type == "song") + .where(EntityTag.entity_id == song_id) + ).all() + + # Merge song data with stats + song_with_stats = SongReadWithStats( + **song.model_dump(), + **stats + ) + song_with_stats.tags = tags + return song_with_stats + +@router.patch("/{song_id}", response_model=SongRead) +def update_song(song_id: int, song: SongUpdate, session: Session = Depends(get_session), current_user = Depends(get_current_user)): + db_song = session.get(Song, song_id) + if not db_song: + raise HTTPException(status_code=404, detail="Song not found") + song_data = song.model_dump(exclude_unset=True) + db_song.sqlmodel_update(song_data) + session.add(db_song) + session.commit() + session.refresh(db_song) + return db_song diff --git a/backend/routers/tours.py b/backend/routers/tours.py new file mode 100644 index 0000000..8a1c279 --- /dev/null +++ b/backend/routers/tours.py @@ -0,0 +1,37 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select +from database import get_session +from models import Tour, User +from schemas import TourCreate, TourRead, TourUpdate +from auth import get_current_user + +router = APIRouter(prefix="/tours", tags=["tours"]) + +@router.post("/", response_model=TourRead) +def create_tour( + tour: TourCreate, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_user) +): + db_tour = Tour.model_validate(tour) + session.add(db_tour) + session.commit() + session.refresh(db_tour) + return db_tour + +@router.get("/", response_model=List[TourRead]) +def read_tours( + offset: int = 0, + limit: int = Query(default=100, le=100), + session: Session = Depends(get_session) +): + tours = session.exec(select(Tour).offset(offset).limit(limit)).all() + return tours + +@router.get("/{tour_id}", response_model=TourRead) +def read_tour(tour_id: int, session: Session = Depends(get_session)): + tour = session.get(Tour, tour_id) + if not tour: + raise HTTPException(status_code=404, detail="Tour not found") + return tour diff --git a/backend/routers/users.py b/backend/routers/users.py new file mode 100644 index 0000000..93e2c90 --- /dev/null +++ b/backend/routers/users.py @@ -0,0 +1,80 @@ +from typing import List, Optional +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select, func +from database import get_session +from models import User, Review, Attendance, Group, GroupMember, Show +from schemas import UserRead, ReviewRead, ShowRead, GroupRead +from auth import get_current_user + +router = APIRouter(prefix="/users", tags=["users"]) + +# --- User Stats --- + +@router.get("/{user_id}/stats") +def get_user_stats(user_id: int, session: Session = Depends(get_session)): + # Check if user exists + user = session.get(User, user_id) + if not user: + raise HTTPException(status_code=404, detail="User not found") + + attendance_count = session.exec(select(func.count(Attendance.id)).where(Attendance.user_id == user_id)).one() + review_count = session.exec(select(func.count(Review.id)).where(Review.user_id == user_id)).one() + group_count = session.exec(select(func.count(GroupMember.id)).where(GroupMember.user_id == user_id)).one() + + return { + "attendance_count": attendance_count, + "review_count": review_count, + "group_count": group_count + } + +# --- User Data Lists --- + +@router.get("/{user_id}/attendance", response_model=List[ShowRead]) +def get_user_attendance( + user_id: int, + offset: int = 0, + limit: int = Query(default=50, le=100), + session: Session = Depends(get_session) +): + # Return shows the user attended + shows = session.exec( + select(Show) + .join(Attendance, Show.id == Attendance.show_id) + .where(Attendance.user_id == user_id) + .order_by(Show.date.desc()) + .offset(offset) + .limit(limit) + ).all() + return shows + +@router.get("/{user_id}/reviews", response_model=List[ReviewRead]) +def get_user_reviews( + user_id: int, + offset: int = 0, + limit: int = Query(default=50, le=100), + session: Session = Depends(get_session) +): + reviews = session.exec( + select(Review) + .where(Review.user_id == user_id) + .order_by(Review.created_at.desc()) + .offset(offset) + .limit(limit) + ).all() + return reviews + +@router.get("/{user_id}/groups", response_model=List[GroupRead]) +def get_user_groups( + user_id: int, + offset: int = 0, + limit: int = Query(default=50, le=100), + session: Session = Depends(get_session) +): + groups = session.exec( + select(Group) + .join(GroupMember, Group.id == GroupMember.group_id) + .where(GroupMember.user_id == user_id) + .offset(offset) + .limit(limit) + ).all() + return groups diff --git a/backend/routers/venues.py b/backend/routers/venues.py new file mode 100644 index 0000000..b53ec33 --- /dev/null +++ b/backend/routers/venues.py @@ -0,0 +1,41 @@ +from typing import List +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import Session, select +from database import get_session +from models import Venue +from schemas import VenueCreate, VenueRead, VenueUpdate +from auth import get_current_user + +router = APIRouter(prefix="/venues", tags=["venues"]) + +@router.post("/", response_model=VenueRead) +def create_venue(venue: VenueCreate, session: Session = Depends(get_session), current_user = Depends(get_current_user)): + db_venue = Venue.model_validate(venue) + session.add(db_venue) + session.commit() + session.refresh(db_venue) + return db_venue + +@router.get("/", response_model=List[VenueRead]) +def read_venues(offset: int = 0, limit: int = Query(default=100, le=100), session: Session = Depends(get_session)): + venues = session.exec(select(Venue).offset(offset).limit(limit)).all() + return venues + +@router.get("/{venue_id}", response_model=VenueRead) +def read_venue(venue_id: int, session: Session = Depends(get_session)): + venue = session.get(Venue, venue_id) + if not venue: + raise HTTPException(status_code=404, detail="Venue not found") + return venue + +@router.patch("/{venue_id}", response_model=VenueRead) +def update_venue(venue_id: int, venue: VenueUpdate, session: Session = Depends(get_session), current_user = Depends(get_current_user)): + db_venue = session.get(Venue, venue_id) + if not db_venue: + raise HTTPException(status_code=404, detail="Venue not found") + venue_data = venue.model_dump(exclude_unset=True) + db_venue.sqlmodel_update(venue_data) + session.add(db_venue) + session.commit() + session.refresh(db_venue) + return db_venue diff --git a/backend/schemas.py b/backend/schemas.py new file mode 100644 index 0000000..09c9e3c --- /dev/null +++ b/backend/schemas.py @@ -0,0 +1,336 @@ +from typing import Optional, List +from sqlmodel import SQLModel +from datetime import datetime + +class UserCreate(SQLModel): + email: str + password: str + username: str + +class UserRead(SQLModel): + id: int + email: str + is_active: bool + is_superuser: bool + +class Token(SQLModel): + access_token: str + token_type: str + +class TokenData(SQLModel): + email: Optional[str] = None + +# --- Venue Schemas --- +class VenueBase(SQLModel): + name: str + city: str + state: Optional[str] = None + country: str + capacity: Optional[int] = None + notes: Optional[str] = None + +class VenueCreate(VenueBase): + pass + +class VenueRead(VenueBase): + id: int + +class VenueUpdate(SQLModel): + name: Optional[str] = None + city: Optional[str] = None + state: Optional[str] = None + country: Optional[str] = None + capacity: Optional[int] = None + notes: Optional[str] = None + +# --- Song Schemas --- +class SongBase(SQLModel): + title: str + original_artist: Optional[str] = None + vertical_id: int + notes: Optional[str] = None + +class SongCreate(SongBase): + pass + +class SongRead(SongBase): + id: int + tags: List["TagRead"] = [] + +class SongReadWithStats(SongRead): + times_played: int + gap: int + last_played: Optional[datetime] = None + +class SongUpdate(SQLModel): + title: Optional[str] = None + original_artist: Optional[str] = None + notes: Optional[str] = None + +# --- Show Schemas --- +class ShowBase(SQLModel): + date: datetime + vertical_id: int + venue_id: Optional[int] = None + tour_id: Optional[int] = None + notes: Optional[str] = None + +class ShowCreate(ShowBase): + pass + +# --- Performance Schemas --- +class PerformanceBase(SQLModel): + show_id: int + song_id: int + position: int + set_name: Optional[str] = None + segue: bool = False + notes: Optional[str] = None + +class PerformanceRead(PerformanceBase): + id: int + song: Optional["SongRead"] = None + nicknames: List["PerformanceNicknameRead"] = [] + +class PerformanceDetailRead(PerformanceRead): + show: Optional["ShowRead"] = None + previous_performance_id: Optional[int] = None + next_performance_id: Optional[int] = None + gap: Optional[int] = 0 + times_played: Optional[int] = 0 + +# --- Groups --- +class GroupBase(SQLModel): + name: str + description: Optional[str] = None + privacy: str = "public" + +class GroupCreate(GroupBase): + pass + +class GroupRead(GroupBase): + id: int + created_by: int + created_at: datetime + member_count: Optional[int] = 0 + +class GroupPostBase(SQLModel): + content: str + +class GroupPostCreate(GroupPostBase): + group_id: int + +class GroupPostRead(GroupPostBase): + id: int + group_id: int + user_id: int + created_at: datetime + song: Optional["SongRead"] = None + nicknames: List["PerformanceNicknameRead"] = [] + +class ShowRead(ShowBase): + id: int + venue: Optional["VenueRead"] = None + tour: Optional["TourRead"] = None + tags: List["TagRead"] = [] + performances: List["PerformanceRead"] = [] + +class ShowUpdate(SQLModel): + date: Optional[datetime] = None + venue_id: Optional[int] = None + tour_id: Optional[int] = None + notes: Optional[str] = None + +# --- Tour Schemas --- +class TourBase(SQLModel): + name: str + start_date: Optional[datetime] = None + end_date: Optional[datetime] = None + notes: Optional[str] = None + +class TourCreate(TourBase): + pass + +class TourRead(TourBase): + id: int + +class TourUpdate(SQLModel): + name: Optional[str] = None + start_date: Optional[datetime] = None + end_date: Optional[datetime] = None + notes: Optional[str] = None + +# --- Artist Schemas --- +class ArtistBase(SQLModel): + name: str + instrument: Optional[str] = None + notes: Optional[str] = None + +class ArtistCreate(ArtistBase): + pass + +class ArtistRead(ArtistBase): + id: int + +class ArtistUpdate(SQLModel): + name: Optional[str] = None + instrument: Optional[str] = None + notes: Optional[str] = None + +# --- Attendance Schemas --- +class AttendanceBase(SQLModel): + show_id: int + notes: Optional[str] = None + +class AttendanceCreate(AttendanceBase): + pass + +class AttendanceRead(AttendanceBase): + id: int + user_id: int + created_at: datetime + +# --- Social Schemas --- +class CommentBase(SQLModel): + content: str + show_id: Optional[int] = None + venue_id: Optional[int] = None + song_id: Optional[int] = None + +class CommentCreate(CommentBase): + pass + +class CommentRead(CommentBase): + id: int + user_id: int + created_at: datetime + # We might want to include the username here later + +class RatingBase(SQLModel): + score: int + show_id: Optional[int] = None + song_id: Optional[int] = None + +class RatingCreate(RatingBase): + pass + +class RatingRead(RatingBase): + id: int + user_id: int + created_at: datetime + +class ReviewBase(SQLModel): + blurb: str + content: str + score: int + show_id: Optional[int] = None + venue_id: Optional[int] = None + song_id: Optional[int] = None + performance_id: Optional[int] = None + tour_id: Optional[int] = None + year: Optional[int] = None + +class ReviewCreate(ReviewBase): + pass + +class ReviewRead(ReviewBase): + id: int + user_id: int + created_at: datetime + +# --- Badge Schemas --- +class BadgeBase(SQLModel): + name: str + description: str + icon: str + slug: str + +class BadgeCreate(BadgeBase): + pass + +class BadgeRead(BadgeBase): + id: int + +class UserBadgeRead(SQLModel): + id: int + user_id: int + badge: BadgeRead + awarded_at: datetime + +# --- Nickname Schemas --- +class PerformanceNicknameBase(SQLModel): + performance_id: int + nickname: str + description: Optional[str] = None + +class PerformanceNicknameCreate(PerformanceNicknameBase): + pass + +class PerformanceNicknameRead(PerformanceNicknameBase): + id: int + status: str + suggested_by: int + created_at: datetime + +# --- Report Schemas --- +class ReportBase(SQLModel): + target_type: str + target_id: int + reason: str + +class ReportCreate(ReportBase): + pass + +class ReportRead(ReportBase): + id: int + user_id: int + status: str + created_at: datetime + +# --- User Preferences Schemas --- +class UserPreferencesBase(SQLModel): + wiki_mode: bool = False + show_ratings: bool = True + show_comments: bool = True + +class UserPreferencesCreate(UserPreferencesBase): + pass + +class UserPreferencesUpdate(SQLModel): + wiki_mode: Optional[bool] = None + show_ratings: Optional[bool] = None + show_comments: Optional[bool] = None + +class UserPreferencesRead(UserPreferencesBase): + user_id: int + +# --- Notification Schemas --- +class NotificationBase(SQLModel): + type: str + title: str + message: str + link: Optional[str] = None + is_read: bool = False + +class NotificationCreate(NotificationBase): + user_id: int + +class NotificationRead(NotificationBase): + id: int + created_at: datetime + +# --- Tag Schemas --- +class TagBase(SQLModel): + name: str + slug: str + +class TagCreate(TagBase): + pass + +class TagRead(TagBase): + id: int + + +# Circular refs +ShowRead.model_rebuild() +PerformanceDetailRead.model_rebuild() diff --git a/backend/seed.py b/backend/seed.py new file mode 100644 index 0000000..2c29568 --- /dev/null +++ b/backend/seed.py @@ -0,0 +1,52 @@ +from sqlmodel import Session, select +from database import engine, create_db_and_tables +from models import Vertical, Venue, Show, Song, Performance, User +from datetime import datetime + +def seed_data(): + with Session(engine) as session: + # Check if data exists + existing_vertical = session.exec(select(Vertical)).first() + if existing_vertical: + print("Data already seeded.") + return + + # Create Vertical + phish = Vertical(name="Phish", slug="phish", description="The band Phish.") + session.add(phish) + session.commit() + session.refresh(phish) + + # Create Venue + msg = Venue(name="Madison Square Garden", city="New York", state="NY", country="USA", capacity=20000) + session.add(msg) + session.commit() + session.refresh(msg) + + # Create Show + show = Show(date=datetime(2023, 12, 31), vertical_id=phish.id, venue_id=msg.id) + session.add(show) + session.commit() + session.refresh(show) + + # Create Songs + song1 = Song(title="Gamehendge Time Phactory", vertical_id=phish.id) + song2 = Song(title="Auld Lang Syne", vertical_id=phish.id) + session.add(song1) + session.add(song2) + session.commit() + session.refresh(song1) + session.refresh(song2) + + # Create Performances + p1 = Performance(show_id=show.id, song_id=song1.id, position=1, set_name="Set 1") + p2 = Performance(show_id=show.id, song_id=song2.id, position=2, set_name="Set 1", segue=True) + session.add(p1) + session.add(p2) + session.commit() + + print("Seeding complete!") + +if __name__ == "__main__": + # create_db_and_tables() # Alembic handles this now + seed_data() diff --git a/backend/seed_activity.py b/backend/seed_activity.py new file mode 100644 index 0000000..168ca82 --- /dev/null +++ b/backend/seed_activity.py @@ -0,0 +1,399 @@ +""" +El Goose Activity Seeder (Enhanced) +Populates the demo environment with 36 distinct user personas and realistic activity. +Generates attendance, ratings, reviews (Shows, Venues, Tours), and comments. +Includes "Wiki-Style" linking logic [[Entity:ID|Label]]. +""" +import random +import re +from datetime import datetime, timedelta +from sqlmodel import Session, select +from database import engine +from models import User, UserPreferences, Show, Song, Venue, Performance, Review, Attendance, Comment, Tour, Profile +from passlib.context import CryptContext + +pwd_context = CryptContext(schemes=["argon2"], deprecated="auto") + +# --- 1. Define 36 Distinct Personas --- +PERSONAS = [ + # Original 12 + {"username": "TheArchivist", "email": "archivist@demo.com", "role": "user", "style": "factual", "bio": "Tracking every tease since 2016."}, + {"username": "StatNerd420", "email": "statnerd@demo.com", "role": "user", "style": "analytical", "bio": "It's all about the gap times."}, + {"username": "CriticalListener", "email": "reviewer@demo.com", "role": "user", "style": "critical", "bio": "Honest takes only. 3.5 stars is a good rating."}, + {"username": "CasualFan", "email": "casual@demo.com", "role": "user", "style": "casual", "bio": "Just here for a good time."}, + {"username": "NortheastHonkers", "email": "groupleader@demo.com", "role": "user", "style": "community", "bio": "Rep the Northeast flock!"}, + {"username": "ModGoose", "email": "mod@demo.com", "role": "moderator", "style": "official", "bio": "Play nice in the comments."}, + {"username": "AdminBird", "email": "admin@demo.com", "role": "admin", "style": "official", "bio": "System Admin."}, + {"username": "NewToGoose", "email": "newbie@demo.com", "role": "user", "style": "excited_noob", "bio": "Just discovered them last week!"}, + {"username": "TaperTom", "email": "taper@demo.com", "role": "user", "style": "technical", "bio": "MK4 > V3 > 24/96"}, + {"username": "RoadWarrior", "email": "tourfollower@demo.com", "role": "user", "style": "traveler", "bio": "100+ shows and counting."}, + {"username": "SilentHonker", "email": "lurker@demo.com", "role": "user", "style": "quiet", "bio": "..."}, + {"username": "HypeGoose", "email": "hype@demo.com", "role": "user", "style": "hype", "bio": "BEST BAND EVER LFG!!!"}, + + # New 24 + {"username": "VinylJunkie", "email": "vinyl@demo.com", "role": "user", "style": "collector", "bio": "Spinning wax only. ISO Shenanigans variants."}, + {"username": "CouchTourCapt", "email": "couch@demo.com", "role": "user", "style": "streamer", "bio": "Streaming from the living room every night."}, + {"username": "RailRider", "email": "rail@demo.com", "role": "user", "style": "intense", "bio": "If you're not on the rail, were you even there?"}, + {"username": "PosterNutbag", "email": "poster@demo.com", "role": "user", "style": "collector", "bio": "Here for the foil prints."}, + {"username": "SetlistPredictor", "email": "predict@demo.com", "role": "user", "style": "analytical", "bio": "Called the opener 3 nights in a row."}, + {"username": "JamFlowMan", "email": "jam@demo.com", "role": "user", "style": "vibey", "bio": "Surrender to the flow."}, + {"username": "TedHead", "email": "ted@demo.com", "role": "user", "style": "insider", "bio": "Ted Tapes or bust."}, + {"username": "HonkIfUrHorny", "email": "honk@demo.com", "role": "user", "style": "meme", "bio": "HONK HONK"}, + {"username": "PeterSide", "email": "peter@demo.com", "role": "user", "style": "fanboy", "bio": "Mustache appreciation society."}, + {"username": "RicksPick", "email": "rick@demo.com", "role": "user", "style": "fanboy", "bio": "That tone though."}, + {"username": "TrevorBassFace", "email": "trevor@demo.com", "role": "user", "style": "fanboy", "bio": "T-Bone holds it down."}, + {"username": "SpudsMcKenzie", "email": "spuds@demo.com", "role": "user", "style": "fanboy", "bio": "Drums > Space"}, + {"username": "JeffsPercussion", "email": "jeff@demo.com", "role": "user", "style": "fanboy", "bio": "More gong please."}, + {"username": "CotterPin", "email": "cotter@demo.com", "role": "user", "style": "fanboy", "bio": "New era is here."}, + {"username": "OreboloFan", "email": "acoustic@demo.com", "role": "user", "style": "acoustic", "bio": "Acoustic sets are superior."}, + {"username": "IndieGroove", "email": "indie@demo.com", "role": "user", "style": "hipster", "bio": "I liked them when they played bars."}, + {"username": "PhishConvert", "email": "phish@demo.com", "role": "user", "style": "comparative", "bio": "3.0 vet giving this a shot."}, + {"username": "DeadheadConvert", "email": "dead@demo.com", "role": "user", "style": "old_school", "bio": "Searching for the sound."}, + {"username": "DiscoGoose", "email": "disco@demo.com", "role": "user", "style": "dancer", "bio": "Here to dance."}, + {"username": "ProgRockFan", "email": "prog@demo.com", "role": "user", "style": "technical", "bio": "Time signatures are my love language."}, + {"username": "FunkyUncle", "email": "funky@demo.com", "role": "user", "style": "vibey", "bio": "Keep it funky."}, + {"username": "WysteriaLocal", "email": "local@demo.com", "role": "user", "style": "local", "bio": "CT born and raised."}, + {"username": "FactoryFiction", "email": "factory@demo.com", "role": "user", "style": "intense", "bio": "Chasing that Factory Fiction."}, + {"username": "Shenanigans", "email": "squad@demo.com", "role": "user", "style": "party", "bio": "Squad up!"}, +] + +# --- 2. Content Generation Templates --- + +SHOW_REVIEWS = { + "hype": [ + "ABSOLUTELY INSANE SHOW! That [[Song:{song_id}|{song}]] jam changed my life.", + "Can we talk about [[Song:{song_id}|{song}]]?! [[Venue:{venue_id}|{venue}]] was shaking!", + "Best show of the tour hands down. The energy during [[Song:{song_id}|{song}]] was unreal.", + "LFG!!! [[Song:{song_id}|{song}]] > [[Song:{song2_id}|{song2}]] was the highlight for me.", + "I have no words. [[Song:{song_id}|{song}]] was a heater." + ], + "critical": [ + "Solid show, but the mix was a bit muddy during [[Song:{song_id}|{song}]].", + "First set was slow, but they picked it up with [[Song:{song_id}|{song}]] in the second.", + "Standard version of [[Song:{song_id}|{song}]], nothing special. [[Song:{song2_id}|{song2}]] was the improvisational peak.", + "Good energy at [[Venue:{venue_id}|{venue}]], but I've heard better versions of [[Song:{song_id}|{song}]].", + "3.5/5. [[Song:{song_id}|{song}]] went deep but didn't quite stick the landing." + ], + "analytical": [ + "First time [[Song:{song_id}|{song}]] has opened a second set since 2019.", + "The jam in [[Song:{song_id}|{song}]] clocked in at 22 minutes. Major key modulation at the 12-minute mark.", + "Interesting placement for [[Song:{song_id}|{song}]]. [[Venue:{venue_id}|{venue}]] always gets unique setlists.", + "Gap bust! Haven't played [[Song:{song_id}|{song}]] in 45 shows.", + "Statistically an above-average show. [[Song:{song_id}|{song}]] jam density was high." + ], + "casual": [ + "Had a blast at [[Venue:{venue_id}|{venue}]]! [[Song:{song_id}|{song}]] was my favorite.", + "Great vibes tonight. Loved hearing [[Song:{song_id}|{song}]].", + "So much fun dancing to [[Song:{song_id}|{song}]]. Can't wait for the next one.", + "Took my friend to their first show and they loved [[Song:{song_id}|{song}]].", + "Good times. [[Song:{song_id}|{song}]] was cool." + ], + "technical": [ + "Rick's tone on [[Song:{song_id}|{song}]] was dialed in perfectly.", + "Peter's clav work on [[Song:{song_id}|{song}]] was funky as hell.", + "The lighting rig looked amazing during [[Song:{song_id}|{song}]].", + "Trevor was locked in for [[Song:{song_id}|{song}]]. Great low end.", + "Spuds and Jeff were driving the bus on [[Song:{song_id}|{song}]]." + ], + "comparative": [ + "Getting major 97 Phish vibes from that [[Song:{song_id}|{song}]] jam.", + "That [[Song:{song_id}|{song}]] transition reminded me of a Dead segue.", + "This version of [[Song:{song_id}|{song}]] rivals the one from last month.", + "They are really finding their own sound on [[Song:{song_id}|{song}]].", + "Better than the studio version of [[Song:{song_id}|{song}]] for sure." + ] +} + +VENUE_REVIEWS = { + "hype": [ + "[[Venue:{venue_id}|{venue}]] IS THE CHURCH! Best place to see them.", + "The energy in this room is unmatched. I love [[Venue:{venue_id}|{venue}]]!", + "Always a party at [[Venue:{venue_id}|{venue}]]. Can't wait to go back." + ], + "critical": [ + "Sightlines at [[Venue:{venue_id}|{venue}]] are terrible if you're not on the floor.", + "Security at [[Venue:{venue_id}|{venue}]] was a nightmare. Great show otherwise.", + "Sound was bouncy in the balcony. [[Venue:{venue_id}|{venue}]] needs acoustic treatment." + ], + "casual": [ + "Nice venue, easy parking. [[Venue:{venue_id}|{venue}]] is a solid spot.", + "Had a good time at [[Venue:{venue_id}|{venue}]]. Beers were expensive though.", + "Beautiful theatre. [[Venue:{venue_id}|{venue}]] has great architecture." + ], + "traveler": [ + "Drove 6 hours to get to [[Venue:{venue_id}|{venue}]]. Worth it.", + "Checked [[Venue:{venue_id}|{venue}]] off my bucket list. 10/10.", + "One of my favorite stops on tour. [[Venue:{venue_id}|{venue}]] never disappoints." + ] +} + +TOUR_REVIEWS = { + "hype": [ + "[[Tour:{tour_id}|{tour}]] was their best tour yet! Every night was fire.", + "They leveled up on [[Tour:{tour_id}|{tour}]]. New songs are sounding great.", + "Take me back to [[Tour:{tour_id}|{tour}]]! What a run." + ], + "analytical": [ + "[[Tour:{tour_id}|{tour}]] featured the most 20+ minute jams of any era.", + "Setlist variety on [[Tour:{tour_id}|{tour}]] was at an all-time high.", + "Interesting evolution of the sound during [[Tour:{tour_id}|{tour}]]." + ], + "critical": [ + "[[Tour:{tour_id}|{tour}]] started strong but fizzled out at the end.", + "Too many repeats on [[Tour:{tour_id}|{tour}]]. Need to dig deeper.", + "Not my favorite era. [[Tour:{tour_id}|{tour}]] felt a bit disjointed." + ] +} + +# --- 3. Helper Functions --- + +def get_random_show_review(style, song1, song2, venue): + """Generate a show review with wiki links""" + templates = SHOW_REVIEWS.get(style, SHOW_REVIEWS["casual"]) + # Map specific styles to generic categories + if style in ["excited_noob", "intense", "party", "fanboy"]: templates = SHOW_REVIEWS["hype"] + elif style in ["factual", "technical", "collector"]: templates = SHOW_REVIEWS["technical"] + elif style in ["old_school", "hipster"]: templates = SHOW_REVIEWS["comparative"] + elif style in ["quiet", "streamer"]: templates = SHOW_REVIEWS["casual"] + elif style in ["analytical", "insider"]: templates = SHOW_REVIEWS["analytical"] + + template = random.choice(templates) + return template.format( + song=song1.title, song_id=song1.id, + song2=song2.title, song2_id=song2.id, + venue=venue.name, venue_id=venue.id + ) + +def get_random_venue_review(style, venue): + """Generate a venue review with wiki links""" + templates = VENUE_REVIEWS.get(style, VENUE_REVIEWS["casual"]) + if style in ["excited_noob", "intense", "party"]: templates = VENUE_REVIEWS["hype"] + elif style in ["critical", "technical"]: templates = VENUE_REVIEWS["critical"] + elif style in ["traveler", "collector"]: templates = VENUE_REVIEWS["traveler"] + + template = random.choice(templates) + return template.format(venue=venue.name, venue_id=venue.id) + +def get_random_tour_review(style, tour): + """Generate a tour review with wiki links""" + templates = TOUR_REVIEWS.get(style, TOUR_REVIEWS["analytical"]) + if style in ["excited_noob", "intense", "party"]: templates = TOUR_REVIEWS["hype"] + elif style in ["critical", "old_school"]: templates = TOUR_REVIEWS["critical"] + + template = random.choice(templates) + return template.format(tour=tour.name, tour_id=tour.id) + +def seed_users(session): + """Create all 36 users if they don't exist""" + print("๐Ÿ‘ฅ Seeding Users...") + users = {} + for p in PERSONAS: + user = session.exec(select(User).where(User.email == p["email"])).first() + if not user: + user = User( + email=p["email"], + hashed_password=pwd_context.hash("demo123"), + role=p["role"], + bio=p["bio"], + is_active=True + ) + session.add(user) + session.commit() + session.refresh(user) + + # Create Profile + profile = Profile( + user_id=user.id, + username=p["username"], + display_name=p["username"] + ) + session.add(profile) + + # Create preferences + prefs = UserPreferences( + user_id=user.id, + wiki_mode=(p["style"] == "factual" or p["style"] == "quiet"), + show_ratings=True, + show_comments=True + ) + session.add(prefs) + session.commit() + print(f" + Created {p['username']}") + else: + # Update bio if missing + if not user.bio: + user.bio = p["bio"] + session.add(user) + session.commit() + + # Ensure profile exists + profile = session.exec(select(Profile).where(Profile.user_id == user.id)).first() + if not profile: + profile = Profile( + user_id=user.id, + username=p["username"], + display_name=p["username"] + ) + session.add(profile) + session.commit() + + users[p["username"]] = user + return users + +def seed_activity(session, users): + """Generate attendance, reviews, and ratings""" + print("\n๐ŸŽฒ Generating Activity...") + + # Fetch data + shows = session.exec(select(Show).order_by(Show.date.desc())).all() + venues = session.exec(select(Venue)).all() + tours = session.exec(select(Tour)).all() + + if not shows: + print("โŒ No shows found! Run import_elgoose.py first.") + return + + print(f" Found {len(shows)} shows, {len(venues)} venues, {len(tours)} tours.") + + count_attendance = 0 + count_reviews = 0 + + # 1. Show Activity (Attendance + Reviews) + target_shows = shows[:50] + random.sample(shows[50:], min(len(shows)-50, 50)) + + for show in target_shows: + venue = session.get(Venue, show.venue_id) + if not venue: continue + + performances = session.exec(select(Performance).where(Performance.show_id == show.id)).all() + if not performances: continue + + songs = [session.get(Song, p.song_id) for p in performances] + if not songs: continue + + attendees = random.sample(list(users.values()), k=random.randint(3, 15)) + + for user in attendees: + # Attendance + exists = session.exec(select(Attendance).where( + Attendance.user_id == user.id, + Attendance.show_id == show.id + )).first() + + if not exists: + att = Attendance( + user_id=user.id, + show_id=show.id, + created_at=show.date + timedelta(days=random.randint(1, 5)) + ) + session.add(att) + count_attendance += 1 + + # Show Review (30% chance) + if random.random() < 0.3: + persona = next((p for p in PERSONAS if p["email"] == user.email), None) + style = persona["style"] if persona else "casual" + + song1 = random.choice(songs) + song2 = random.choice(songs) + + review_text = get_random_show_review(style, song1, song2, venue) + + rating = random.randint(3, 5) + if style == "critical": rating = random.randint(2, 4) + if style == "hype": rating = 5 + + rev_exists = session.exec(select(Review).where( + Review.user_id == user.id, + Review.show_id == show.id + )).first() + + if not rev_exists: + blurb = review_text.split('.')[0] + "." + if len(blurb) > 100: blurb = blurb[:97] + "..." + + review = Review( + user_id=user.id, + show_id=show.id, + score=rating, + content=review_text, + blurb=blurb, + created_at=show.date + timedelta(days=random.randint(1, 10)) + ) + session.add(review) + count_reviews += 1 + + # 2. Venue Reviews + target_venues = random.sample(venues, min(len(venues), 30)) + for venue in target_venues: + reviewers = random.sample(list(users.values()), k=random.randint(1, 5)) + for user in reviewers: + persona = next((p for p in PERSONAS if p["email"] == user.email), None) + style = persona["style"] if persona else "casual" + + review_text = get_random_venue_review(style, venue) + rating = random.randint(3, 5) + + rev_exists = session.exec(select(Review).where( + Review.user_id == user.id, + Review.venue_id == venue.id + )).first() + + if not rev_exists: + blurb = review_text.split('.')[0] + "." + if len(blurb) > 100: blurb = blurb[:97] + "..." + + review = Review( + user_id=user.id, + venue_id=venue.id, + score=rating, + content=review_text, + blurb=blurb, + created_at=datetime.utcnow() - timedelta(days=random.randint(1, 365)) + ) + session.add(review) + count_reviews += 1 + + # 3. Tour Reviews + target_tours = random.sample(tours, min(len(tours), 10)) + for tour in target_tours: + reviewers = random.sample(list(users.values()), k=random.randint(1, 3)) + for user in reviewers: + persona = next((p for p in PERSONAS if p["email"] == user.email), None) + style = persona["style"] if persona else "casual" + + review_text = get_random_tour_review(style, tour) + rating = random.randint(3, 5) + + rev_exists = session.exec(select(Review).where( + Review.user_id == user.id, + Review.tour_id == tour.id + )).first() + + if not rev_exists: + blurb = review_text.split('.')[0] + "." + if len(blurb) > 100: blurb = blurb[:97] + "..." + + review = Review( + user_id=user.id, + tour_id=tour.id, + score=rating, + content=review_text, + blurb=blurb, + created_at=datetime.utcnow() - timedelta(days=random.randint(1, 365)) + ) + session.add(review) + count_reviews += 1 + + session.commit() + print(f"\nโœ… Activity Generation Complete!") + print(f" + {count_attendance} Attendance records") + print(f" + {count_reviews} Reviews generated (Shows, Venues, Tours)") + +def main(): + with Session(engine) as session: + users_map = seed_users(session) + seed_activity(session, users_map) + +if __name__ == "__main__": + main() diff --git a/backend/seed_demo.py b/backend/seed_demo.py new file mode 100644 index 0000000..06b8d18 --- /dev/null +++ b/backend/seed_demo.py @@ -0,0 +1,300 @@ +""" +Comprehensive Demo Data Seeder for Elmeg +Creates 12 diverse user personas and populates with Goose data +""" +import sys +from datetime import datetime, timedelta +from sqlmodel import Session, select +from passlib.context import CryptContext +from database import engine +from models import ( + User, Vertical, Venue, Tour, Show, Song, Performance, Artist, + Attendance, Comment, Rating, Review, Group, GroupMember, GroupPost, + PerformanceNickname, UserPreferences, Badge, UserBadge, Tag, EntityTag +) + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + +# User Personas +USERS = [ + {"email": "archivist@demo.com", "username": "TheArchivist", "role": "user", "wiki_mode": True, "bio": "Pure data, no noise"}, + {"email": "statnerd@demo.com", "username": "StatNerd420", "role": "user", "wiki_mode": False, "bio": "Gap charts are life"}, + {"email": "reviewer@demo.com", "username": "CriticalListener", "role": "user", "wiki_mode": False, "bio": "Every show deserves analysis"}, + {"email": "casual@demo.com", "username": "CasualFan", "role": "user", "wiki_mode": False, "bio": "Just here for the vibes"}, + {"email": "groupleader@demo.com", "username": "NortheastHonkers", "role": "user", "wiki_mode": False, "bio": "NYC/Boston crew organizer"}, + {"email": "mod@demo.com", "username": "ModGoose", "role": "moderator", "wiki_mode": False, "bio": "Keeping it clean"}, + {"email": "admin@demo.com", "username": "AdminBird", "role": "admin", "wiki_mode": False, "bio": "Platform steward"}, + {"email": "newbie@demo.com", "username": "NewToGoose", "role": "user", "wiki_mode": False, "bio": "Just discovered them!"}, + {"email": "taper@demo.com", "username": "TaperTom", "role": "user", "wiki_mode": False, "bio": "Recording quality matters"}, + {"email": "tourfollower@demo.com", "username": "RoadWarrior", "role": "user", "wiki_mode": False, "bio": "50+ shows and counting"}, + {"email": "lurker@demo.com", "username": "SilentHonker", "role": "user", "wiki_mode": True, "bio": "Observer mode"}, + {"email": "hype@demo.com", "username": "HypeGoose", "role": "user", "wiki_mode": False, "bio": "EVERYTHING IS FIRE ๐Ÿ”ฅ"}, +] + +def create_users(session: Session): + """Create 12 diverse user personas""" + print("Creating users...") + users = [] + for user_data in USERS: + user = User( + email=user_data["email"], + hashed_password=pwd_context.hash("demo123"), + is_active=True, + is_superuser=(user_data["role"] == "admin"), + role=user_data["role"] + ) + session.add(user) + session.commit() + session.refresh(user) + + # Set preferences + prefs = UserPreferences( + user_id=user.id, + wiki_mode=user_data["wiki_mode"], + show_ratings=not user_data["wiki_mode"], + show_comments=not user_data["wiki_mode"] + ) + session.add(prefs) + + users.append(user) + + session.commit() + print(f"โœ“ Created {len(users)} users") + return users + +def create_vertical(session: Session): + """Create Goose vertical""" + print("Creating Goose vertical...") + vertical = Vertical( + name="Goose", + slug="goose", + description="Goose is a jam band from Connecticut" + ) + session.add(vertical) + session.commit() + session.refresh(vertical) + print("โœ“ Created Goose vertical") + return vertical + +def create_sample_data(session: Session, vertical: Vertical, users: list): + """Create comprehensive sample data""" + print("Creating sample venues, tours, and shows...") + + # Venues + venues = [ + Venue(name="Red Rocks Amphitheatre", city="Morrison", state="CO", country="USA", capacity=9525), + Venue(name="The Capitol Theatre", city="Port Chester", state="NY", country="USA", capacity=1800), + Venue(name="Radio City Music Hall", city="New York", state="NY", country="USA", capacity=6015), + Venue(name="The Gorge Amphitheatre", city="George", state="WA", country="USA", capacity=27500), + Venue(name="Brooklyn Bowl", city="Brooklyn", state="NY", country="USA", capacity=600), + ] + for v in venues: + session.add(v) + session.commit() + + # Tours + tours = [ + Tour(name="Fall 2023 Tour", start_date=datetime(2023, 10, 1), end_date=datetime(2023, 11, 15)), + Tour(name="Summer 2024 Tour", start_date=datetime(2024, 6, 1), end_date=datetime(2024, 8, 31)), + ] + for t in tours: + session.add(t) + session.commit() + + # Songs + songs = [ + Song(title="Hungersite", vertical_id=vertical.id, original_artist=None), + Song(title="Arcadia", vertical_id=vertical.id, original_artist=None), + Song(title="Hot Tea", vertical_id=vertical.id, original_artist=None), + Song(title="Tumble", vertical_id=vertical.id, original_artist=None), + Song(title="Seekers on the Ridge", vertical_id=vertical.id, original_artist=None), + Song(title="Arrow", vertical_id=vertical.id, original_artist=None), + Song(title="Empress of Organos", vertical_id=vertical.id, original_artist=None), + Song(title="Rockdale", vertical_id=vertical.id, original_artist=None), + ] + for s in songs: + session.add(s) + session.commit() + + # Shows + shows = [] + for i in range(10): + show = Show( + date=datetime(2024, 1, 1) + timedelta(days=i*30), + vertical_id=vertical.id, + venue_id=venues[i % len(venues)].id, + tour_id=tours[i % len(tours)].id if i < 8 else None, + notes=f"Night {i+1} of tour" if i < 5 else None + ) + session.add(show) + shows.append(show) + session.commit() + + # Performances (setlists) + for show in shows: + for pos, song in enumerate(songs[:5], 1): + perf = Performance( + show_id=show.id, + song_id=song.id, + position=pos, + set_name="Set 1" if pos <= 3 else "Set 2", + segue=(pos % 2 == 0), + notes="Extended jam" if pos == 3 else None + ) + session.add(perf) + session.commit() + + print(f"โœ“ Created {len(venues)} venues, {len(tours)} tours, {len(songs)} songs, {len(shows)} shows") + return venues, tours, songs, shows + +def seed_user_activity(session: Session, users: list, shows: list, songs: list): + """Seed diverse user activity based on personas""" + print("Seeding user activity...") + + # Archivist - No social activity + # StatNerd - Attendance tracking + stat_nerd = users[1] + for show in shows[:8]: + att = Attendance(user_id=stat_nerd.id, show_id=show.id, notes="Stats logged") + session.add(att) + + # Reviewer - Detailed reviews + reviewer = users[2] + for show in shows[:3]: + review = Review( + user_id=reviewer.id, + show_id=show.id, + blurb="A masterclass in improvisation", + content="The Hungersite opener set the tone perfectly. The segue into Arcadia was seamless...", + score=9, + created_at=show.date + timedelta(days=1) + ) + session.add(review) + + # Casual Fan - Occasional comments + casual = users[3] + perfs = session.exec(select(Performance).limit(5)).all() + for perf in perfs[:2]: + comment = Comment( + user_id=casual.id, + show_id=perf.show_id, + content="This was so good live!", + created_at=datetime.utcnow() - timedelta(days=10) + ) + session.add(comment) + + # Group Leader - Creates group + leader = users[4] + group = Group( + name="Northeast Honkers", + description="Goose fans from NYC and Boston area", + privacy="public", + created_by=leader.id + ) + session.add(group) + session.commit() + + # Add members + for user in users[:6]: + member = GroupMember(group_id=group.id, user_id=user.id, role="admin" if user.id == leader.id else "member") + session.add(member) + + # Group posts + post = GroupPost( + group_id=group.id, + user_id=leader.id, + content="Who's going to the Capitol show next month?" + ) + session.add(post) + + # Taper - Notes on recording quality + taper = users[8] + for show in shows[:4]: + comment = Comment( + user_id=taper.id, + show_id=show.id, + content="AUD recording available - great soundboard quality", + created_at=show.date + timedelta(hours=12) + ) + session.add(comment) + + # Road Warrior - Massive attendance + warrior = users[9] + for show in shows: + att = Attendance(user_id=warrior.id, show_id=show.id, notes="On the road!") + session.add(att) + + # Hype Person - Rates everything 10/10 + hype = users[11] + for show in shows[:5]: + rating = Rating(user_id=hype.id, show_id=show.id, score=10) + review = Review( + user_id=hype.id, + show_id=show.id, + blurb="ABSOLUTE HEATER ๐Ÿ”ฅ๐Ÿ”ฅ๐Ÿ”ฅ", + content="This show SLAPPED. Every song was FIRE. Best show ever!!!", + score=10, + created_at=show.date + timedelta(hours=6) + ) + session.add(rating) + session.add(review) + + # Performance Nicknames + mod = users[5] + perfs = session.exec(select(Performance).limit(3)).all() + for perf in perfs: + nick = PerformanceNickname( + performance_id=perf.id, + nickname="Red Rocks Hungersite" if perf.position == 1 else "Epic Jam", + description="Legendary version", + status="approved", + suggested_by=mod.id + ) + session.add(nick) + + session.commit() + print("โœ“ Seeded comprehensive user activity") + +def main(): + print("=" * 60) + print("ELMEG DEMO DATA SEEDER") + print("=" * 60) + + with Session(engine) as session: + # Check if already seeded + existing = session.exec(select(User)).first() + if existing: + print("โš  Database already contains data!") + response = input("Clear and reseed? (yes/no): ") + if response.lower() != "yes": + print("Aborted.") + return + + # Clear all data (in reverse dependency order) + from sqlalchemy import text + for model in [GroupPost, GroupMember, Group, UserBadge, Badge, + PerformanceNickname, EntityTag, Tag, Attendance, + Comment, Rating, Review, Performance, Show, Tour, + Song, Venue, Artist, UserPreferences, User, Vertical]: + session.exec(text(f"DELETE FROM {model.__tablename__}")) + session.commit() + print("โœ“ Cleared existing data") + + # Seed + users = create_users(session) + vertical = create_vertical(session) + venues, tours, songs, shows = create_sample_data(session, vertical, users) + seed_user_activity(session, users, shows, songs) + + print("=" * 60) + print("โœ“ DEMO DATA SEEDED SUCCESSFULLY!") + print("=" * 60) + print("\nLogin credentials (all passwords: demo123):") + for user_data in USERS: + print(f" {user_data['email']:30} ({user_data['username']})") + print("\nStart the demo server:") + print(" cd /Users/ten/ANTIGRAVITY/elmeg-demo/backend") + print(" DATABASE_URL='sqlite:///./elmeg-demo.db' uvicorn main:app --reload --port 8001") + +if __name__ == "__main__": + main() diff --git a/backend/seed_output.txt b/backend/seed_output.txt new file mode 100644 index 0000000..c48c36b --- /dev/null +++ b/backend/seed_output.txt @@ -0,0 +1,72 @@ +Starting demo seed... +2025-12-03 17:37:58,631 INFO sqlalchemy.engine.Engine BEGIN (implicit) +2025-12-03 17:37:58,634 INFO sqlalchemy.engine.Engine INSERT INTO vertical (name, slug, description) VALUES (?, ?, ?) +2025-12-03 17:37:58,635 INFO sqlalchemy.engine.Engine [generated in 0.00053s] ('Goose', 'goose', 'Jam band from CT') +2025-12-03 17:37:58,648 INFO sqlalchemy.engine.Engine ROLLBACK +Traceback (most recent call last): + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1967, in _exec_single_context + self.dialect.do_execute( + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/engine/default.py", line 951, in do_execute + cursor.execute(statement, parameters) +sqlite3.IntegrityError: UNIQUE constraint failed: vertical.slug + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "/Users/ten/ANTIGRAVITY/elmeg-demo/backend/quick_seed.py", line 20, in + session.commit() + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/session.py", line 2030, in commit + trans.commit(_to_root=True) + File "", line 2, in commit + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/state_changes.py", line 137, in _go + ret_value = fn(self, *arg, **kw) + ^^^^^^^^^^^^^^^^^^^^ + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/session.py", line 1311, in commit + self._prepare_impl() + File "", line 2, in _prepare_impl + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/state_changes.py", line 137, in _go + ret_value = fn(self, *arg, **kw) + ^^^^^^^^^^^^^^^^^^^^ + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/session.py", line 1286, in _prepare_impl + self.session.flush() + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/session.py", line 4331, in flush + self._flush(objects) + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/session.py", line 4466, in _flush + with util.safe_reraise(): + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/util/langhelpers.py", line 224, in __exit__ + raise exc_value.with_traceback(exc_tb) + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/session.py", line 4427, in _flush + flush_context.execute() + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/unitofwork.py", line 466, in execute + rec.execute(self) + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/unitofwork.py", line 642, in execute + util.preloaded.orm_persistence.save_obj( + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/persistence.py", line 93, in save_obj + _emit_insert_statements( + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/orm/persistence.py", line 1233, in _emit_insert_statements + result = connection.execute( + ^^^^^^^^^^^^^^^^^^^ + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1419, in execute + return meth( + ^^^^^ + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/sql/elements.py", line 526, in _execute_on_connection + return connection._execute_clauseelement( + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1641, in _execute_clauseelement + ret = self._execute_context( + ^^^^^^^^^^^^^^^^^^^^^^ + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1846, in _execute_context + return self._exec_single_context( + ^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1986, in _exec_single_context + self._handle_dbapi_exception( + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 2355, in _handle_dbapi_exception + raise sqlalchemy_exception.with_traceback(exc_info[2]) from e + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/engine/base.py", line 1967, in _exec_single_context + self.dialect.do_execute( + File "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sqlalchemy/engine/default.py", line 951, in do_execute + cursor.execute(statement, parameters) +sqlalchemy.exc.IntegrityError: (sqlite3.IntegrityError) UNIQUE constraint failed: vertical.slug +[SQL: INSERT INTO vertical (name, slug, description) VALUES (?, ?, ?)] +[parameters: ('Goose', 'goose', 'Jam band from CT')] +(Background on this error at: https://sqlalche.me/e/20/gkpj) diff --git a/backend/services/stats.py b/backend/services/stats.py new file mode 100644 index 0000000..2485a50 --- /dev/null +++ b/backend/services/stats.py @@ -0,0 +1,71 @@ +from sqlmodel import Session, select, func, desc +from models import Performance, Show, Song, Attendance, UserBadge, Badge +from datetime import datetime + +def get_song_stats(session: Session, song_id: int): + """Calculate times played and gap for a song.""" + # Times Played + times_played = session.exec( + select(func.count(Performance.id)).where(Performance.song_id == song_id) + ).one() + + # Last Played + last_performance = session.exec( + select(Show) + .join(Performance) + .where(Performance.song_id == song_id) + .order_by(desc(Show.date)) + .limit(1) + ).first() + + gap = 0 + if last_performance: + # Calculate gap: number of shows since last_performance + # This is a bit heavy if we count all shows. + # For now, let's just return the date of last played. + # To calculate true gap, we'd need: count(shows where date > last_performance.date) + gap = session.exec( + select(func.count(Show.id)).where(Show.date > last_performance.date) + ).one() + + return { + "times_played": times_played, + "last_played": last_performance.date if last_performance else None, + "gap": gap + } + +def check_and_award_badges(session: Session, user_id: int): + """Check for badge milestones and award them.""" + # Example: 10 Shows Attended + attendance_count = session.exec( + select(func.count(Attendance.id)).where(Attendance.user_id == user_id) + ).one() + + if attendance_count >= 10: + award_badge(session, user_id, "10-shows", "10 Shows Club", "Awarded for attending 10 shows.", "ticket") + + # Example: First Review + # (Need Review model import if implemented, assuming it is from previous steps) + # ... + +def award_badge(session: Session, user_id: int, slug: str, name: str, description: str, icon: str): + """Award a badge if not already owned.""" + # Check if badge exists, create if not (for system badges) + badge = session.exec(select(Badge).where(Badge.slug == slug)).first() + if not badge: + badge = Badge(name=name, description=description, icon=icon, slug=slug) + session.add(badge) + session.commit() + session.refresh(badge) + + # Check if user has it + user_badge = session.exec( + select(UserBadge) + .where(UserBadge.user_id == user_id) + .where(UserBadge.badge_id == badge.id) + ).first() + + if not user_badge: + user_badge = UserBadge(user_id=user_id, badge_id=badge.id) + session.add(user_badge) + session.commit() diff --git a/backend/test_seed.py b/backend/test_seed.py new file mode 100644 index 0000000..218647f --- /dev/null +++ b/backend/test_seed.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 +import sys +print("Starting seed script...") +sys.stdout.flush() + +from database import engine +from sqlmodel import Session +print("Imported database...") +sys.stdout.flush() + +with Session(engine) as session: + print("Session created successfully!") + sys.stdout.flush() diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py new file mode 100644 index 0000000..c826213 --- /dev/null +++ b/backend/tests/conftest.py @@ -0,0 +1,56 @@ +import pytest +from fastapi.testclient import TestClient +from sqlmodel import Session, SQLModel, create_engine +from sqlmodel.pool import StaticPool + +from main import app +from database import get_session +from models import User, Vertical + +# Use in-memory SQLite for testing +sqlite_file_name = "database.db" +sqlite_url = f"sqlite://" + +engine = create_engine( + sqlite_url, + connect_args={"check_same_thread": False}, + poolclass=StaticPool +) + +@pytest.fixture(name="session") +def session_fixture(): + engine = create_engine( + "sqlite://", + connect_args={"check_same_thread": False}, + poolclass=StaticPool + ) + SQLModel.metadata.create_all(engine) + with Session(engine) as session: + yield session + +@pytest.fixture(name="client") +def client_fixture(session: Session): + def get_session_override(): + return session + + app.dependency_overrides[get_session] = get_session_override + client = TestClient(app) + yield client + app.dependency_overrides.clear() + +@pytest.fixture(name="test_user_token") +def test_user_token_fixture(client: TestClient): + # Create a user + client.post("/auth/register", json={ + "email": "test@example.com", + "password": "password123", + "username": "testuser" + }) + + # Login to get token + response = client.post("/auth/token", data={ + "username": "test@example.com", + "password": "password123" + }) + token = response.json()["access_token"] + return token diff --git a/backend/tests/test_shows.py b/backend/tests/test_shows.py new file mode 100644 index 0000000..e48c724 --- /dev/null +++ b/backend/tests/test_shows.py @@ -0,0 +1,70 @@ +from fastapi.testclient import TestClient +from sqlmodel import Session +from models import Vertical, Show + +def test_create_show(client: TestClient, session: Session, test_user_token: str): + # 1. Create a Vertical first (needed for FK) + vertical = Vertical(name="Phish", slug="phish") + session.add(vertical) + session.commit() + + # 2. Create Show + response = client.post( + "/shows/", + json={ + "date": "2023-12-31T00:00:00", + "vertical_id": vertical.id, + "notes": "New Year's Eve" + }, + headers={"Authorization": f"Bearer {test_user_token}"} + ) + + assert response.status_code == 200 + data = response.json() + assert data["notes"] == "New Year's Eve" + assert data["id"] is not None + +from datetime import datetime + +def test_read_shows(client: TestClient, session: Session): + # Setup data + vertical = Vertical(name="Goose", slug="goose") + session.add(vertical) + session.commit() + + show = Show(date=datetime(2024, 1, 1), vertical_id=vertical.id, notes="Test Show") + session.add(show) + session.commit() + + # Test Read + response = client.get("/shows/") + assert response.status_code == 200 + data = response.json() + assert len(data) > 0 + assert data[0]["notes"] == "Test Show" + +def test_attendance(client: TestClient, session: Session, test_user_token: str): + # Setup + vertical = Vertical(name="Test Band", slug="test-band") + session.add(vertical) + session.commit() + show = Show(date=datetime(2024, 1, 1), vertical_id=vertical.id) + session.add(show) + session.commit() + + # Mark Attendance + response = client.post( + "/attendance/", + json={"show_id": show.id}, + headers={"Authorization": f"Bearer {test_user_token}"} + ) + assert response.status_code == 200 + assert response.json()["show_id"] == show.id + + # Verify in "My Attendance" + response = client.get( + "/attendance/me", + headers={"Authorization": f"Bearer {test_user_token}"} + ) + assert response.status_code == 200 + assert len(response.json()) == 1 diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..4ead806 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,42 @@ +version: '3.8' + +services: + backend: + build: ./backend + ports: + - "8000:8000" + volumes: + - ./backend:/app + environment: + - DATABASE_URL=postgresql://user:password@db:5432/elmeg_db + depends_on: + - db + + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + ports: + - "3000:3000" + volumes: + - ./frontend:/app + - /app/node_modules + environment: + - NEXT_PUBLIC_API_URL=http://localhost:8000 + - INTERNAL_API_URL=http://backend:8000 + depends_on: + - backend + + db: + image: postgres:15-alpine + ports: + - "5432:5432" + environment: + - POSTGRES_USER=user + - POSTGRES_PASSWORD=password + - POSTGRES_DB=elmeg_db + volumes: + - postgres_data:/var/lib/postgresql/data + +volumes: + postgres_data: diff --git a/docs/API.md b/docs/API.md new file mode 100644 index 0000000..1b42b7b --- /dev/null +++ b/docs/API.md @@ -0,0 +1,26 @@ +# Elmeg API Documentation + +The Elmeg API is built with **FastAPI**, which automatically generates interactive documentation. + +## Accessing the Docs + +When the backend server is running locally: + +- **Swagger UI**: [http://localhost:8000/docs](http://localhost:8000/docs) - Test endpoints directly in the browser. +- **ReDoc**: [http://localhost:8000/redoc](http://localhost:8000/redoc) - Alternative documentation view. + +## Authentication + +Most write operations (POST, PUT, DELETE) require a Bearer Token. + +1. Use the `/auth/token` endpoint (or login via Frontend) to get a token. +2. In Swagger UI, click the **Authorize** button and enter the token. + +## Key Endpoints + +- `/shows`: CRUD for Shows. +- `/songs`: CRUD for Songs and Stats. +- `/attendance`: Manage user attendance. +- `/social`: Comments and Ratings. +- `/moderation`: Moderator tools. +- `/preferences`: User preferences (Wiki Mode). diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md new file mode 100644 index 0000000..b75af4c --- /dev/null +++ b/docs/CHANGELOG.md @@ -0,0 +1,67 @@ +# Changelog + +## [Unreleased] - 2025-12-03 + +### Added + +- **Advanced Content (Nicknames)**: + - `PerformanceNickname` model and API. + - "Suggest Nickname" dialog on Show Detail page. + - Display of approved nicknames (e.g., "Tahoe Tweezer") on setlists. +- **Review System**: + - `Review` model supporting multiple entity types (Show, Venue, Song, Performance, Tour, Year). + - `EntityReviews` component for generic review functionality. + - Integrated reviews into Show Detail page. +- **Social Features**: + - `CommentSection` and `EntityRating` components. + - Integrated Comments and Ratings into Show, Song, Venue, and Tour pages. +- **New Pages**: + - `VenueDetailPage`: Venue info, shows list, social features. + - `TourDetailPage`: Tour info, shows list, social features. + - `GroupsPage`: List of communities. + - `GroupDetailPage`: Feed and members. +- **Groups / Communities**: + - `Group`, `GroupMember`, `GroupPost` models. + - Users can create, join, and post to groups. +- **User Profile**: + - Enhanced Profile page with Tabs (Overview, Attendance, Reviews, Groups). + - Added Stats (counts for attendance, reviews, groups). + - New components: `UserAttendanceList`, `UserReviewsList`, `UserGroupsList`. +- **Global Search**: + - Implemented `Cmd+K` global search dialog. + - Searches across Songs, Venues, Tours, Groups, Users, Nicknames, and Performances. +- **Performance Pages**: + - Dedicated `/performances/[id]` page for deep-diving into a specific version of a song. + - Includes comments, reviews, stats (Gap, Times Played), and navigation to Previous/Next versions. +- **Notifications**: + - Notification system for group joins (and future replies/mentions). + - Real-time(ish) bell icon in Navbar with unread count. +- **Wiki Mode**: + - User preference to hide all social features for a pure archive experience. + - Settings page at `/settings` with toggles for Wiki Mode, Show Ratings, and Show Comments. +- **Moderation Dashboard**: + - Admin interface at `/admin` for managing pending nicknames and reports. + - Approve/Reject workflows for community-submitted content. +- **Activity Feed**: + - Global feed on home page showing recent reviews, attendance, and group posts. + - Real-time pulse of community activity. +- **Testing**: + - Backend `pytest` setup with fixtures. + - Frontend `jest` setup with component tests. +- **Documentation**: + - `USER_GUIDE.md`, `DEVELOPER.md`, `API.md`, and `DEPLOY.md`. + +### Changed + +- **Authentication**: + - Refactored `auth` router to use consistent `get_current_user`. + - Switched password hashing to `argon2`. +- **Frontend Architecture**: + - Implemented `getApiUrl` helper for Docker-compatible SSR fetching. + - Refactored `ShowReviews` to generic `EntityReviews`. + +### Fixed + +- **Database**: + - Added `psycopg2-binary` for PostgreSQL support. + - Fixed `Attendance` creation logic. diff --git a/docs/DEVELOPER.md b/docs/DEVELOPER.md new file mode 100644 index 0000000..d948ce0 --- /dev/null +++ b/docs/DEVELOPER.md @@ -0,0 +1,118 @@ +# Elmeg Developer Guide + +## Tech Stack + +- **Backend**: Python (FastAPI), SQLModel, Alembic, SQLite. +- **Frontend**: TypeScript (Next.js 15), Tailwind CSS 4, Shadcn UI. +- **Containerization**: Docker. + +## Getting Started + +### Prerequisites + +- Python 3.10+ +- Node.js 18+ +- Docker (optional but recommended) + +### Backend Setup + +1. Navigate to `backend/`: + + ```bash + cd backend + ``` + +2. Create virtual environment: + + ```bash + python -m venv venv + source venv/bin/activate + ``` + +3. Install dependencies: + + ```bash + pip install -r requirements.txt + ``` + +4. Run Migrations: + + ```bash + alembic upgrade head + ``` + +5. Seed Data (Optional): + + ```bash + python seed.py + ``` + +6. Start Server: + + ```bash + uvicorn main:app --reload + ``` + + API will be available at `http://localhost:8000`. + Swagger Docs at `http://localhost:8000/docs`. + +### Frontend Setup + +1. Navigate to `frontend/`: + + ```bash + cd frontend + ``` + +2. Install dependencies: + + ```bash + npm install + ``` + +3. Start Dev Server: + + ```bash + npm run dev + ``` + + App will be available at `http://localhost:3000`. + +## Project Structure + +### Backend (`/backend`) + +- `main.py`: Entry point. +- `models.py`: Database models (SQLModel). +- `routers/`: API route handlers (split by feature). +- `services/`: Business logic (e.g., stats calculation). +- `alembic/`: Database migrations. + +### Frontend (`/frontend`) + +- `app/`: Next.js App Router pages. +- `components/`: Reusable UI components. + - `ui/`: Shadcn UI primitives. + - `social/`, `shows/`, `profile/`: Feature-specific components. +- `contexts/`: React Contexts (e.g., Preferences). +- `lib/`: Utilities. + +## Key Workflows + +### Adding a New Model + +1. Define model in `backend/models.py`. +2. Generate migration: `alembic revision --autogenerate -m "add model"`. +3. Apply migration: `alembic upgrade head`. +4. Create CRUD router in `backend/routers/`. + +### Adding a New Page + +1. Create folder in `frontend/app/` (e.g., `my-feature`). +2. Add `page.tsx`. +3. Fetch data from API (use `fetch` in Server Components or `useEffect` in Client Components). + +## Testing + +- Currently manual testing via Swagger UI and Frontend. +- Future: Add `pytest` for backend and `jest`/`playwright` for frontend. diff --git a/docs/ROADMAP.md b/docs/ROADMAP.md new file mode 100644 index 0000000..2b76d24 --- /dev/null +++ b/docs/ROADMAP.md @@ -0,0 +1,72 @@ +# Future Roadmap & Implementation Plan + +## 1. Cross-Vertical "Fandom Federation" (Future Feature) + +**Concept**: Enable cross-pollination between different band/fandom instances (Verticals). +**Use Case**: A user mentions `@Phish` in the `Goose` instance, or a guest artist like "Trey Anastasio" links to his stats in the Phish vertical. +**Implementation Strategy**: + +* **Federated Identity**: A single `User` account works across all verticals (already partially supported by our schema). +* **Universal Resolver**: A service that resolves links like `elmeg://phish/shows/123` or `@phish:user_123`. +* **Shared Artist Database**: A global table of Artists that links to specific performances across all verticals. + +--- + +## 2. Immediate Implementation Plan (V1.1 Polish) + +We will tackle the following gaps to round out the V1 experience: + +### Phase A: Personalization & "Wiki Mode" + +**Goal**: Allow users to customize their experience, specifically enabling the "pure archive" feel. + +1. **Settings Page**: Create `/settings` route. +2. **Preferences UI**: Toggles for: + * `Wiki Mode` (Hides comments, ratings, social noise). + * `Show Ratings` (Toggle visibility of 1-10 scores). + * `Show Comments` (Toggle visibility of discussion sections). +3. **Frontend Logic**: Wrap social components in a context provider that respects these flags. + +### Phase B: Moderation Dashboard + +**Goal**: Empower admins to maintain data quality and community standards. + +1. **Admin Route**: Create `/admin` (protected by `is_superuser` or `role=admin`). +2. **Nickname Queue**: List `pending` nicknames with Approve/Reject actions. +3. **Report Queue**: List reported content with Dismiss/Delete actions. +4. **User Management**: Basic list of users with Ban/Promote options. + +### Phase C: Activity Feed (The "Pulse") + +**Goal**: Make the platform feel alive and aid discovery. + +1. **Global Feed**: Aggregated stream of: + * New Reviews +1. **Global Feed**: Aggregated stream of: + * New Reviews + * New Show Attendance + * New Groups created + * Rare stats/milestones (e.g., "User X attended their 100th show") +2. **Home Page Widget**: Replace static content on Home with this dynamic feed. + +### Phase D: Visualizations & Deep Stats + +**Goal**: Provide the "crunchy" data fans love. + +1. **Gap Chart**: A visual bar chart on Song Pages showing the gap between performances. +2. **Heatmaps**: "Shows by Year" or "Shows by State" maps on Artist/Band pages. +3. **Graph View**: (Mind Map precursor) Simple node-link diagram of related songs/shows. + +### Phase E: Glossary (Wiki-Style Knowledge Base) + +**Goal**: Build a community-curated glossary of fandom terms. + +1. **Glossary Entry Model**: Term, definition, example, category, status. +2. **Edit History**: Track suggested edits with approval workflow. +3. **Public Pages**: `/glossary` index and `/glossary/[term]` detail pages. +4. **Moderation**: Admin queue for approving/rejecting entries and edits. +5. **Integration**: Include in global search, auto-link in comments. + +## 3. Execution Order + +4. **Phase D (Stats)**: "Nice to have" polish. diff --git a/docs/USER_GUIDE.md b/docs/USER_GUIDE.md new file mode 100644 index 0000000..9df4ca0 --- /dev/null +++ b/docs/USER_GUIDE.md @@ -0,0 +1,59 @@ +# Elmeg User Guide + +Welcome to **Elmeg**, the ultimate platform for fandom communities. This guide will help you navigate the features of the site. + +## Core Features + +### 1. The Archive + +Explore the massive database of shows, songs, and venues. + +- **Shows**: Browse by date, tour, or venue. View setlists and notes. +- **Songs**: See every time a song has been played, gap charts, and history. +- **Tours**: View shows grouped by specific tours (e.g., "Fall 2023"). + +### 2. Attendance Tracking ("I Was There") + +Track your stats by marking shows you attended. + +- Go to any **Show Detail** page. +- Click the **"I Was There"** button. +- Your profile will update with your total show count. + +### 3. Social Features + +Interact with the community: + +- **Comments**: Discuss shows and songs with other fans. +- **Ratings**: Rate shows and songs on a 1-10 scale. +- **Reviews**: Write in-depth reviews for shows you've attended. + +### 4. Wiki Mode + +Prefer a pure data experience? Enable **Wiki Mode**. + +- Go to **Settings** (via your Profile). +- Toggle **Wiki Mode** to **ON**. +- All social features (comments, ratings, reviews) will be hidden, giving you a clean, distraction-free archive. + +### 5. Gamification & Badges + +Earn badges for your engagement! + +- **10 Shows Club**: Attend 10 shows. +- **First Review**: Write your first review. +- Check your **Profile** to see your earned badges and stats. + +### 6. Contribution + +Help build the archive: + +- **Nicknames**: Suggest "City Song" nicknames for specific performances (e.g., "Tahoe Tweezer"). +- **Tags**: Tag shows and songs with descriptors (e.g., "Jam", "Tease"). + +## For Moderators + +If you have moderator privileges, access the **Moderator Dashboard** via the `/mod` route. + +- **Queue**: Approve or Reject suggested nicknames. +- **Reports**: Review user reports (Coming Soon). diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..5ef6a52 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,41 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/versions + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# env files (can opt-in for committing if needed) +.env* + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..80e87a6 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,11 @@ +FROM node:18-alpine + +WORKDIR /app + +COPY package*.json ./ + +RUN npm install + +COPY . . + +CMD ["npm", "run", "dev"] diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000..e215bc4 --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,36 @@ +This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app). + +## Getting Started + +First, run the development server: + +```bash +npm run dev +# or +yarn dev +# or +pnpm dev +# or +bun dev +``` + +Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. + +You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. + +This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel. + +## Learn More + +To learn more about Next.js, take a look at the following resources: + +- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. +- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. + +You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome! + +## Deploy on Vercel + +The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. + +Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details. diff --git a/frontend/__tests__/badge-list.test.tsx b/frontend/__tests__/badge-list.test.tsx new file mode 100644 index 0000000..ead4e7f --- /dev/null +++ b/frontend/__tests__/badge-list.test.tsx @@ -0,0 +1,18 @@ +import { render, screen } from '@testing-library/react' +import { BadgeList } from '../components/profile/badge-list' + +describe('BadgeList', () => { + it('renders "No badges" message when list is empty', () => { + render() + expect(screen.getByText(/No badges earned yet/i)).toBeInTheDocument() + }) + + it('renders badges when provided', () => { + const badges = [ + { id: 1, name: 'Test Badge', description: 'A test badge', icon: 'star', slug: 'test' } + ] + render() + expect(screen.getByText('Test Badge')).toBeInTheDocument() + expect(screen.getByText('A test badge')).toBeInTheDocument() + }) +}) diff --git a/frontend/app/admin/layout.tsx b/frontend/app/admin/layout.tsx new file mode 100644 index 0000000..1b9228e --- /dev/null +++ b/frontend/app/admin/layout.tsx @@ -0,0 +1,64 @@ +"use client" + +import Link from "next/link" +import { usePathname } from "next/navigation" +import { cn } from "@/lib/utils" +import { Button } from "@/components/ui/button" +import { LayoutDashboard, MessageSquare, ShieldAlert, Users } from "lucide-react" + +export default function AdminLayout({ + children, +}: { + children: React.ReactNode +}) { + const pathname = usePathname() + + const navItems = [ + { + title: "Dashboard", + href: "/admin", + icon: LayoutDashboard + }, + { + title: "Nicknames", + href: "/admin/nicknames", + icon: MessageSquare + }, + { + title: "Reports", + href: "/admin/reports", + icon: ShieldAlert + }, + { + title: "Users", + href: "/admin/users", + icon: Users + } + ] + + return ( +
+ +
{children}
+
+ ) +} diff --git a/frontend/app/admin/nicknames/page.tsx b/frontend/app/admin/nicknames/page.tsx new file mode 100644 index 0000000..5dc028b --- /dev/null +++ b/frontend/app/admin/nicknames/page.tsx @@ -0,0 +1,12 @@ +"use client" + +import { NicknameQueue } from "@/components/admin/nickname-queue" + +export default function NicknamesPage() { + return ( +
+

Nicknames

+ +
+ ) +} diff --git a/frontend/app/admin/page.tsx b/frontend/app/admin/page.tsx new file mode 100644 index 0000000..92a8ab0 --- /dev/null +++ b/frontend/app/admin/page.tsx @@ -0,0 +1,30 @@ +"use client" + +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card" + +export default function AdminDashboard() { + return ( +
+

Admin Dashboard

+
+ + + Pending Nicknames + + +
--
+
+
+ + + Pending Reports + + +
--
+
+
+
+

Select a category from the sidebar to manage content.

+
+ ) +} diff --git a/frontend/app/admin/reports/page.tsx b/frontend/app/admin/reports/page.tsx new file mode 100644 index 0000000..4a04183 --- /dev/null +++ b/frontend/app/admin/reports/page.tsx @@ -0,0 +1,12 @@ +"use client" + +import { ReportQueue } from "@/components/admin/report-queue" + +export default function ReportsPage() { + return ( +
+

Reports

+ +
+ ) +} diff --git a/frontend/app/archive/page.tsx b/frontend/app/archive/page.tsx new file mode 100644 index 0000000..c8437cd --- /dev/null +++ b/frontend/app/archive/page.tsx @@ -0,0 +1,33 @@ +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card" +import Link from "next/link" + +// Mock data for now - will fetch from API later +const recentShows = [ + { id: 1, date: "2023-12-31", venue: "Madison Square Garden", location: "New York, NY", band: "Phish" }, + { id: 2, date: "2023-12-30", venue: "Madison Square Garden", location: "New York, NY", band: "Phish" }, + { id: 3, date: "2023-12-29", venue: "Madison Square Garden", location: "New York, NY", band: "Phish" }, +] + +export default function ArchivePage() { + return ( +
+

Archive

+
+ {recentShows.map((show) => ( + + + + {show.date} + + +

{show.band}

+

{show.venue}

+

{show.location}

+
+
+ + ))} +
+
+ ) +} diff --git a/frontend/app/favicon.ico b/frontend/app/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..718d6fea4835ec2d246af9800eddb7ffb276240c GIT binary patch literal 25931 zcmeHv30#a{`}aL_*G&7qml|y<+KVaDM2m#dVr!KsA!#An?kSQM(q<_dDNCpjEux83 zLb9Z^XxbDl(w>%i@8hT6>)&Gu{h#Oeyszu?xtw#Zb1mO{pgX9699l+Qppw7jXaYf~-84xW z)w4x8?=youko|}Vr~(D$UXIbiXABHh`p1?nn8Po~fxRJv}|0e(BPs|G`(TT%kKVJAdg5*Z|x0leQq0 zkdUBvb#>9F()jo|T~kx@OM8$9wzs~t2l;K=woNssA3l6|sx2r3+kdfVW@e^8e*E}v zA1y5{bRi+3Z`uD3{F7LgFJDdvm;nJilkzDku>BwXH(8ItVCXk*-lSJnR?-2UN%hJ){&rlvg`CDTj z)Bzo!3v7Ou#83zEDEFcKt(f1E0~=rqeEbTnMvWR#{+9pg%7G8y>u1OVRUSoox-ovF z2Ydma(;=YuBY(eI|04{hXzZD6_f(v~H;C~y5=DhAC{MMS>2fm~1H_t2$56pc$NH8( z5bH|<)71dV-_oCHIrzrT`2s-5w_+2CM0$95I6X8p^r!gHp+j_gd;9O<1~CEQQGS8) zS9Qh3#p&JM-G8rHekNmKVewU;pJRcTAog68KYo^dRo}(M>36U4Us zfgYWSiHZL3;lpWT=zNAW>Dh#mB!_@Lg%$ms8N-;aPqMn+C2HqZgz&9~Eu z4|Kp<`$q)Uw1R?y(~S>ePdonHxpV1#eSP1B;Ogo+-Pk}6#0GsZZ5!||ev2MGdh}_m z{DeR7?0-1^zVs&`AV6Vt;r3`I`OI_wgs*w=eO%_#7Kepl{B@xiyCANc(l zzIyd4y|c6PXWq9-|KM8(zIk8LPk(>a)zyFWjhT!$HJ$qX1vo@d25W<fvZQ2zUz5WRc(UnFMKHwe1| zWmlB1qdbiA(C0jmnV<}GfbKtmcu^2*P^O?MBLZKt|As~ge8&AAO~2K@zbXelK|4T<{|y4`raF{=72kC2Kn(L4YyenWgrPiv z@^mr$t{#X5VuIMeL!7Ab6_kG$&#&5p*Z{+?5U|TZ`B!7llpVmp@skYz&n^8QfPJzL z0G6K_OJM9x+Wu2gfN45phANGt{7=C>i34CV{Xqlx(fWpeAoj^N0Biu`w+MVcCUyU* zDZuzO0>4Z6fbu^T_arWW5n!E45vX8N=bxTVeFoep_G#VmNlQzAI_KTIc{6>c+04vr zx@W}zE5JNSU>!THJ{J=cqjz+4{L4A{Ob9$ZJ*S1?Ggg3klFp!+Y1@K+pK1DqI|_gq z5ZDXVpge8-cs!o|;K73#YXZ3AShj50wBvuq3NTOZ`M&qtjj#GOFfgExjg8Gn8>Vq5 z`85n+9|!iLCZF5$HJ$Iu($dm?8~-ofu}tEc+-pyke=3!im#6pk_Wo8IA|fJwD&~~F zc16osQ)EBo58U7XDuMexaPRjU@h8tXe%S{fA0NH3vGJFhuyyO!Uyl2^&EOpX{9As0 zWj+P>{@}jxH)8|r;2HdupP!vie{sJ28b&bo!8`D^x}TE$%zXNb^X1p@0PJ86`dZyj z%ce7*{^oo+6%&~I!8hQy-vQ7E)0t0ybH4l%KltWOo~8cO`T=157JqL(oq_rC%ea&4 z2NcTJe-HgFjNg-gZ$6!Y`SMHrlj}Etf7?r!zQTPPSv}{so2e>Fjs1{gzk~LGeesX%r(Lh6rbhSo_n)@@G-FTQy93;l#E)hgP@d_SGvyCp0~o(Y;Ee8{ zdVUDbHm5`2taPUOY^MAGOw*>=s7=Gst=D+p+2yON!0%Hk` zz5mAhyT4lS*T3LS^WSxUy86q&GnoHxzQ6vm8)VS}_zuqG?+3td68_x;etQAdu@sc6 zQJ&5|4(I?~3d-QOAODHpZ=hlSg(lBZ!JZWCtHHSj`0Wh93-Uk)_S%zsJ~aD>{`A0~ z9{AG(e|q3g5B%wYKRxiL2Y$8(4w6bzchKuloQW#e&S3n+P- z8!ds-%f;TJ1>)v)##>gd{PdS2Oc3VaR`fr=`O8QIO(6(N!A?pr5C#6fc~Ge@N%Vvu zaoAX2&(a6eWy_q&UwOhU)|P3J0Qc%OdhzW=F4D|pt0E4osw;%<%Dn58hAWD^XnZD= z>9~H(3bmLtxpF?a7su6J7M*x1By7YSUbxGi)Ot0P77`}P3{)&5Un{KD?`-e?r21!4vTTnN(4Y6Lin?UkSM z`MXCTC1@4A4~mvz%Rh2&EwY))LeoT=*`tMoqcEXI>TZU9WTP#l?uFv+@Dn~b(>xh2 z;>B?;Tz2SR&KVb>vGiBSB`@U7VIWFSo=LDSb9F{GF^DbmWAfpms8Sx9OX4CnBJca3 zlj9(x!dIjN?OG1X4l*imJNvRCk}F%!?SOfiOq5y^mZW)jFL@a|r-@d#f7 z2gmU8L3IZq0ynIws=}~m^#@&C%J6QFo~Mo4V`>v7MI-_!EBMMtb%_M&kvAaN)@ZVw z+`toz&WG#HkWDjnZE!6nk{e-oFdL^$YnbOCN}JC&{$#$O27@|Tn-skXr)2ml2~O!5 zX+gYoxhoc7qoU?C^3~&!U?kRFtnSEecWuH0B0OvLodgUAi}8p1 zrO6RSXHH}DMc$&|?D004DiOVMHV8kXCP@7NKB zgaZq^^O<7PoKEp72kby@W0Z!Y*Ay{&vfg#C&gG@YVR9g?FEocMUi1gSN$+V+ayF45{a zuDZDTN}mS|;BO%gEf}pjBfN2-gIrU#G5~cucA;dokXW89%>AyXJJI z9X4UlIWA|ZYHgbI z5?oFk@A=Ik7lrEQPDH!H+b`7_Y~aDb_qa=B2^Y&Ow41cU=4WDd40dp5(QS-WMN-=Y z9g;6_-JdNU;|6cPwf$ak*aJIcwL@1n$#l~zi{c{EW?T;DaW*E8DYq?Umtz{nJ&w-M zEMyTDrC&9K$d|kZe2#ws6)L=7K+{ zQw{XnV6UC$6-rW0emqm8wJoeZK)wJIcV?dST}Z;G0Arq{dVDu0&4kd%N!3F1*;*pW zR&qUiFzK=@44#QGw7k1`3t_d8&*kBV->O##t|tonFc2YWrL7_eqg+=+k;!F-`^b8> z#KWCE8%u4k@EprxqiV$VmmtiWxDLgnGu$Vs<8rppV5EajBXL4nyyZM$SWVm!wnCj-B!Wjqj5-5dNXukI2$$|Bu3Lrw}z65Lc=1G z^-#WuQOj$hwNGG?*CM_TO8Bg-1+qc>J7k5c51U8g?ZU5n?HYor;~JIjoWH-G>AoUP ztrWWLbRNqIjW#RT*WqZgPJXU7C)VaW5}MiijYbABmzoru6EmQ*N8cVK7a3|aOB#O& zBl8JY2WKfmj;h#Q!pN%9o@VNLv{OUL?rixHwOZuvX7{IJ{(EdPpuVFoQqIOa7giLVkBOKL@^smUA!tZ1CKRK}#SSM)iQHk)*R~?M!qkCruaS!#oIL1c z?J;U~&FfH#*98^G?i}pA{ z9Jg36t4=%6mhY(quYq*vSxptes9qy|7xSlH?G=S@>u>Ebe;|LVhs~@+06N<4CViBk zUiY$thvX;>Tby6z9Y1edAMQaiH zm^r3v#$Q#2T=X>bsY#D%s!bhs^M9PMAcHbCc0FMHV{u-dwlL;a1eJ63v5U*?Q_8JO zT#50!RD619#j_Uf))0ooADz~*9&lN!bBDRUgE>Vud-i5ck%vT=r^yD*^?Mp@Q^v+V zG#-?gKlr}Eeqifb{|So?HM&g91P8|av8hQoCmQXkd?7wIJwb z_^v8bbg`SAn{I*4bH$u(RZ6*xUhuA~hc=8czK8SHEKTzSxgbwi~9(OqJB&gwb^l4+m`k*Q;_?>Y-APi1{k zAHQ)P)G)f|AyjSgcCFps)Fh6Bca*Xznq36!pV6Az&m{O8$wGFD? zY&O*3*J0;_EqM#jh6^gMQKpXV?#1?>$ml1xvh8nSN>-?H=V;nJIwB07YX$e6vLxH( zqYwQ>qxwR(i4f)DLd)-$P>T-no_c!LsN@)8`e;W@)-Hj0>nJ-}Kla4-ZdPJzI&Mce zv)V_j;(3ERN3_@I$N<^|4Lf`B;8n+bX@bHbcZTopEmDI*Jfl)-pFDvo6svPRoo@(x z);_{lY<;);XzT`dBFpRmGrr}z5u1=pC^S-{ce6iXQlLGcItwJ^mZx{m$&DA_oEZ)B{_bYPq-HA zcH8WGoBG(aBU_j)vEy+_71T34@4dmSg!|M8Vf92Zj6WH7Q7t#OHQqWgFE3ARt+%!T z?oLovLVlnf?2c7pTc)~cc^($_8nyKwsN`RA-23ed3sdj(ys%pjjM+9JrctL;dy8a( z@en&CQmnV(()bu|Y%G1-4a(6x{aLytn$T-;(&{QIJB9vMox11U-1HpD@d(QkaJdEb zG{)+6Dos_L+O3NpWo^=gR?evp|CqEG?L&Ut#D*KLaRFOgOEK(Kq1@!EGcTfo+%A&I z=dLbB+d$u{sh?u)xP{PF8L%;YPPW53+@{>5W=Jt#wQpN;0_HYdw1{ksf_XhO4#2F= zyPx6Lx2<92L-;L5PD`zn6zwIH`Jk($?Qw({erA$^bC;q33hv!d!>%wRhj# zal^hk+WGNg;rJtb-EB(?czvOM=H7dl=vblBwAv>}%1@{}mnpUznfq1cE^sgsL0*4I zJ##!*B?=vI_OEVis5o+_IwMIRrpQyT_Sq~ZU%oY7c5JMIADzpD!Upz9h@iWg_>>~j zOLS;wp^i$-E?4<_cp?RiS%Rd?i;f*mOz=~(&3lo<=@(nR!_Rqiprh@weZlL!t#NCc zO!QTcInq|%#>OVgobj{~ixEUec`E25zJ~*DofsQdzIa@5^nOXj2T;8O`l--(QyU^$t?TGY^7#&FQ+2SS3B#qK*k3`ye?8jUYSajE5iBbJls75CCc(m3dk{t?- zopcER9{Z?TC)mk~gpi^kbbu>b-+a{m#8-y2^p$ka4n60w;Sc2}HMf<8JUvhCL0B&Btk)T`ctE$*qNW8L$`7!r^9T+>=<=2qaq-;ll2{`{Rg zc5a0ZUI$oG&j-qVOuKa=*v4aY#IsoM+1|c4Z)<}lEDvy;5huB@1RJPquU2U*U-;gu z=En2m+qjBzR#DEJDO`WU)hdd{Vj%^0V*KoyZ|5lzV87&g_j~NCjwv0uQVqXOb*QrQ zy|Qn`hxx(58c70$E;L(X0uZZ72M1!6oeg)(cdKO ze0gDaTz+ohR-#d)NbAH4x{I(21yjwvBQfmpLu$)|m{XolbgF!pmsqJ#D}(ylp6uC> z{bqtcI#hT#HW=wl7>p!38sKsJ`r8}lt-q%Keqy%u(xk=yiIJiUw6|5IvkS+#?JTBl z8H5(Q?l#wzazujH!8o>1xtn8#_w+397*_cy8!pQGP%K(Ga3pAjsaTbbXJlQF_+m+-UpUUent@xM zg%jqLUExj~o^vQ3Gl*>wh=_gOr2*|U64_iXb+-111aH}$TjeajM+I20xw(((>fej-@CIz4S1pi$(#}P7`4({6QS2CaQS4NPENDp>sAqD z$bH4KGzXGffkJ7R>V>)>tC)uax{UsN*dbeNC*v}#8Y#OWYwL4t$ePR?VTyIs!wea+ z5Urmc)X|^`MG~*dS6pGSbU+gPJoq*^a=_>$n4|P^w$sMBBy@f*Z^Jg6?n5?oId6f{ z$LW4M|4m502z0t7g<#Bx%X;9<=)smFolV&(V^(7Cv2-sxbxopQ!)*#ZRhTBpx1)Fc zNm1T%bONzv6@#|dz(w02AH8OXe>kQ#1FMCzO}2J_mST)+ExmBr9cva-@?;wnmWMOk z{3_~EX_xadgJGv&H@zK_8{(x84`}+c?oSBX*Ge3VdfTt&F}yCpFP?CpW+BE^cWY0^ zb&uBN!Ja3UzYHK-CTyA5=L zEMW{l3Usky#ly=7px648W31UNV@K)&Ub&zP1c7%)`{);I4b0Q<)B}3;NMG2JH=X$U zfIW4)4n9ZM`-yRj67I)YSLDK)qfUJ_ij}a#aZN~9EXrh8eZY2&=uY%2N0UFF7<~%M zsB8=erOWZ>Ct_#^tHZ|*q`H;A)5;ycw*IcmVxi8_0Xk}aJA^ath+E;xg!x+As(M#0=)3!NJR6H&9+zd#iP(m0PIW8$ z1Y^VX`>jm`W!=WpF*{ioM?C9`yOR>@0q=u7o>BP-eSHqCgMDj!2anwH?s%i2p+Q7D zzszIf5XJpE)IG4;d_(La-xenmF(tgAxK`Y4sQ}BSJEPs6N_U2vI{8=0C_F?@7<(G; zo$~G=8p+076G;`}>{MQ>t>7cm=zGtfbdDXm6||jUU|?X?CaE?(<6bKDYKeHlz}DA8 zXT={X=yp_R;HfJ9h%?eWvQ!dRgz&Su*JfNt!Wu>|XfU&68iRikRrHRW|ZxzRR^`eIGt zIeiDgVS>IeExKVRWW8-=A=yA`}`)ZkWBrZD`hpWIxBGkh&f#ijr449~m`j6{4jiJ*C!oVA8ZC?$1RM#K(_b zL9TW)kN*Y4%^-qPpMP7d4)o?Nk#>aoYHT(*g)qmRUb?**F@pnNiy6Fv9rEiUqD(^O zzyS?nBrX63BTRYduaG(0VVG2yJRe%o&rVrLjbxTaAFTd8s;<<@Qs>u(<193R8>}2_ zuwp{7;H2a*X7_jryzriZXMg?bTuegABb^87@SsKkr2)0Gyiax8KQWstw^v#ix45EVrcEhr>!NMhprl$InQMzjSFH54x5k9qHc`@9uKQzvL4ihcq{^B zPrVR=o_ic%Y>6&rMN)hTZsI7I<3&`#(nl+3y3ys9A~&^=4?PL&nd8)`OfG#n zwAMN$1&>K++c{^|7<4P=2y(B{jJsQ0a#U;HTo4ZmWZYvI{+s;Td{Yzem%0*k#)vjpB zia;J&>}ICate44SFYY3vEelqStQWFihx%^vQ@Do(sOy7yR2@WNv7Y9I^yL=nZr3mb zXKV5t@=?-Sk|b{XMhA7ZGB@2hqsx}4xwCW!in#C zI@}scZlr3-NFJ@NFaJlhyfcw{k^vvtGl`N9xSo**rDW4S}i zM9{fMPWo%4wYDG~BZ18BD+}h|GQKc-g^{++3MY>}W_uq7jGHx{mwE9fZiPCoxN$+7 zrODGGJrOkcPQUB(FD5aoS4g~7#6NR^ma7-!>mHuJfY5kTe6PpNNKC9GGRiu^L31uG z$7v`*JknQHsYB!Tm_W{a32TM099djW%5e+j0Ve_ct}IM>XLF1Ap+YvcrLV=|CKo6S zb+9Nl3_YdKP6%Cxy@6TxZ>;4&nTneadr z_ES90ydCev)LV!dN=#(*f}|ZORFdvkYBni^aLbUk>BajeWIOcmHP#8S)*2U~QKI%S zyrLmtPqb&TphJ;>yAxri#;{uyk`JJqODDw%(Z=2`1uc}br^V%>j!gS)D*q*f_-qf8&D;W1dJgQMlaH5er zN2U<%Smb7==vE}dDI8K7cKz!vs^73o9f>2sgiTzWcwY|BMYHH5%Vn7#kiw&eItCqa zIkR2~Q}>X=Ar8W|^Ms41Fm8o6IB2_j60eOeBB1Br!boW7JnoeX6Gs)?7rW0^5psc- zjS16yb>dFn>KPOF;imD}e!enuIniFzv}n$m2#gCCv4jM#ArwlzZ$7@9&XkFxZ4n!V zj3dyiwW4Ki2QG{@i>yuZXQizw_OkZI^-3otXC{!(lUpJF33gI60ak;Uqitp74|B6I zgg{b=Iz}WkhCGj1M=hu4#Aw173YxIVbISaoc z-nLZC*6Tgivd5V`K%GxhBsp@SUU60-rfc$=wb>zdJzXS&-5(NRRodFk;Kxk!S(O(a0e7oY=E( zAyS;Ow?6Q&XA+cnkCb{28_1N8H#?J!*$MmIwLq^*T_9-z^&UE@A(z9oGYtFy6EZef LrJugUA?W`A8`#=m literal 0 HcmV?d00001 diff --git a/frontend/app/globals.css b/frontend/app/globals.css new file mode 100644 index 0000000..97afb5e --- /dev/null +++ b/frontend/app/globals.css @@ -0,0 +1,122 @@ +@import "tailwindcss"; +@import "tw-animate-css"; + +@custom-variant dark (&:is(.dark *)); + +@theme inline { + --color-background: var(--background); + --color-foreground: var(--foreground); + --font-sans: var(--font-geist-sans); + --font-mono: var(--font-geist-mono); + --color-sidebar-ring: var(--sidebar-ring); + --color-sidebar-border: var(--sidebar-border); + --color-sidebar-accent-foreground: var(--sidebar-accent-foreground); + --color-sidebar-accent: var(--sidebar-accent); + --color-sidebar-primary-foreground: var(--sidebar-primary-foreground); + --color-sidebar-primary: var(--sidebar-primary); + --color-sidebar-foreground: var(--sidebar-foreground); + --color-sidebar: var(--sidebar); + --color-chart-5: var(--chart-5); + --color-chart-4: var(--chart-4); + --color-chart-3: var(--chart-3); + --color-chart-2: var(--chart-2); + --color-chart-1: var(--chart-1); + --color-ring: var(--ring); + --color-input: var(--input); + --color-border: var(--border); + --color-destructive: var(--destructive); + --color-accent-foreground: var(--accent-foreground); + --color-accent: var(--accent); + --color-muted-foreground: var(--muted-foreground); + --color-muted: var(--muted); + --color-secondary-foreground: var(--secondary-foreground); + --color-secondary: var(--secondary); + --color-primary-foreground: var(--primary-foreground); + --color-primary: var(--primary); + --color-popover-foreground: var(--popover-foreground); + --color-popover: var(--popover); + --color-card-foreground: var(--card-foreground); + --color-card: var(--card); + --radius-sm: calc(var(--radius) - 4px); + --radius-md: calc(var(--radius) - 2px); + --radius-lg: var(--radius); + --radius-xl: calc(var(--radius) + 4px); +} + +:root { + --radius: 0.625rem; + --background: oklch(1 0 0); + --foreground: oklch(0.141 0.005 285.823); + --card: oklch(1 0 0); + --card-foreground: oklch(0.141 0.005 285.823); + --popover: oklch(1 0 0); + --popover-foreground: oklch(0.141 0.005 285.823); + --primary: oklch(0.21 0.006 285.885); + --primary-foreground: oklch(0.985 0 0); + --secondary: oklch(0.967 0.001 286.375); + --secondary-foreground: oklch(0.21 0.006 285.885); + --muted: oklch(0.967 0.001 286.375); + --muted-foreground: oklch(0.552 0.016 285.938); + --accent: oklch(0.967 0.001 286.375); + --accent-foreground: oklch(0.21 0.006 285.885); + --destructive: oklch(0.577 0.245 27.325); + --border: oklch(0.92 0.004 286.32); + --input: oklch(0.92 0.004 286.32); + --ring: oklch(0.705 0.015 286.067); + --chart-1: oklch(0.646 0.222 41.116); + --chart-2: oklch(0.6 0.118 184.704); + --chart-3: oklch(0.398 0.07 227.392); + --chart-4: oklch(0.828 0.189 84.429); + --chart-5: oklch(0.769 0.188 70.08); + --sidebar: oklch(0.985 0 0); + --sidebar-foreground: oklch(0.141 0.005 285.823); + --sidebar-primary: oklch(0.21 0.006 285.885); + --sidebar-primary-foreground: oklch(0.985 0 0); + --sidebar-accent: oklch(0.967 0.001 286.375); + --sidebar-accent-foreground: oklch(0.21 0.006 285.885); + --sidebar-border: oklch(0.92 0.004 286.32); + --sidebar-ring: oklch(0.705 0.015 286.067); +} + +.dark { + --background: oklch(0.141 0.005 285.823); + --foreground: oklch(0.985 0 0); + --card: oklch(0.21 0.006 285.885); + --card-foreground: oklch(0.985 0 0); + --popover: oklch(0.21 0.006 285.885); + --popover-foreground: oklch(0.985 0 0); + --primary: oklch(0.92 0.004 286.32); + --primary-foreground: oklch(0.21 0.006 285.885); + --secondary: oklch(0.274 0.006 286.033); + --secondary-foreground: oklch(0.985 0 0); + --muted: oklch(0.274 0.006 286.033); + --muted-foreground: oklch(0.705 0.015 286.067); + --accent: oklch(0.274 0.006 286.033); + --accent-foreground: oklch(0.985 0 0); + --destructive: oklch(0.704 0.191 22.216); + --border: oklch(1 0 0 / 10%); + --input: oklch(1 0 0 / 15%); + --ring: oklch(0.552 0.016 285.938); + --chart-1: oklch(0.488 0.243 264.376); + --chart-2: oklch(0.696 0.17 162.48); + --chart-3: oklch(0.769 0.188 70.08); + --chart-4: oklch(0.627 0.265 303.9); + --chart-5: oklch(0.645 0.246 16.439); + --sidebar: oklch(0.21 0.006 285.885); + --sidebar-foreground: oklch(0.985 0 0); + --sidebar-primary: oklch(0.488 0.243 264.376); + --sidebar-primary-foreground: oklch(0.985 0 0); + --sidebar-accent: oklch(0.274 0.006 286.033); + --sidebar-accent-foreground: oklch(0.985 0 0); + --sidebar-border: oklch(1 0 0 / 10%); + --sidebar-ring: oklch(0.552 0.016 285.938); +} + +@layer base { + * { + @apply border-border outline-ring/50; + } + body { + @apply bg-background text-foreground; + } +} diff --git a/frontend/app/groups/[id]/page.tsx b/frontend/app/groups/[id]/page.tsx new file mode 100644 index 0000000..b97fac5 --- /dev/null +++ b/frontend/app/groups/[id]/page.tsx @@ -0,0 +1,83 @@ +import { Button } from "@/components/ui/button" +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card" +import { ArrowLeft, Users, Lock, Globe } from "lucide-react" +import Link from "next/link" +import { notFound } from "next/navigation" +import { getApiUrl } from "@/lib/api-config" +import { GroupFeed } from "@/components/groups/group-feed" +import { JoinGroupButton } from "@/components/groups/join-group-button" + +async function getGroup(id: string) { + try { + const res = await fetch(`${getApiUrl()}/groups/${id}`, { cache: 'no-store' }) + if (!res.ok) return null + return res.json() + } catch (e) { + return null + } +} + +async function getGroupPosts(id: string) { + try { + const res = await fetch(`${getApiUrl()}/groups/${id}/posts`, { cache: 'no-store' }) + if (!res.ok) return [] + return res.json() + } catch (e) { + return [] + } +} + +export default async function GroupDetailPage({ params }: { params: Promise<{ id: string }> }) { + const { id } = await params + const group = await getGroup(id) + const posts = await getGroupPosts(id) + + if (!group) { + notFound() + } + + return ( +
+
+
+ + + +
+

+ {group.name} + {group.privacy === 'private' ? : } +

+

{group.description}

+
+
+ +
+ +
+
+ +
+ +
+ + + About + + +
+ + {group.member_count || 0} members +
+
+ Created {new Date(group.created_at).toLocaleDateString()} +
+
+
+
+
+
+ ) +} diff --git a/frontend/app/groups/create/page.tsx b/frontend/app/groups/create/page.tsx new file mode 100644 index 0000000..c5f9bc2 --- /dev/null +++ b/frontend/app/groups/create/page.tsx @@ -0,0 +1,104 @@ +"use client" + +import { Button } from "@/components/ui/button" +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card" +import { Input } from "@/components/ui/input" +import { Label } from "@/components/ui/label" +import { Textarea } from "@/components/ui/textarea" +import { useState } from "react" +import { useRouter } from "next/navigation" +import { getApiUrl } from "@/lib/api-config" + +export default function CreateGroupPage() { + const router = useRouter() + const [loading, setLoading] = useState(false) + const [formData, setFormData] = useState({ + name: "", + description: "", + privacy: "public" + }) + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault() + setLoading(true) + + const token = localStorage.getItem("token") + if (!token) { + alert("Please log in first") + setLoading(false) + return + } + + try { + const res = await fetch(`${getApiUrl()}/groups/`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${token}` + }, + body: JSON.stringify(formData) + }) + + if (res.ok) { + const group = await res.json() + router.push(`/groups/${group.id}`) + } else { + throw new Error("Failed to create group") + } + } catch (err) { + console.error(err) + alert("Error creating group") + } finally { + setLoading(false) + } + } + + return ( +
+ + + Create a New Group + + +
+
+ + setFormData({ ...formData, name: e.target.value })} + required + /> +
+ +
+ +