diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..32319724 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,72 @@ +# Git +.git +.gitignore +.github + +# Python +__pycache__ +*.py[cod] +*$py.class +*.so +.Python +env/ +venv/ +ENV/ +.venv +*.egg-info/ +.eggs/ +dist/ +build/ +.pytest_cache/ +.python-version + +# Node +node_modules/ +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.npm +.yarn +frontend/node_modules/ +frontend/dist/ +frontend/.vite/ +test-client/node_modules/ +test-client/dist/ + +# Environment +.env +.env.local +.env.*.local +.flaskenv + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ +.DS_Store + +# Logs +*.log +logs/ + +# Database +*.db +*.sqlite +*.sqlite3 +instance/ + +# Testing +.coverage +htmlcov/ +.tox/ + +# Documentation +*.md +!README.md + +# Misc +*.bak +*.tmp +.yamllint diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..1a4939eb --- /dev/null +++ b/.env.example @@ -0,0 +1,62 @@ +# ============================================================================= +# SHUBBLE ENVIRONMENT CONFIGURATION +# ============================================================================= +# Copy this file to .env and update with your values + +# ============================================================================= +# SERVICE PORTS (Docker) +# ============================================================================= +# Configure which ports services are exposed on the host machine +FRONTEND_PORT=3000 +BACKEND_PORT=8000 +POSTGRES_PORT=5432 +REDIS_PORT=6379 +TEST_FRONTEND_PORT=5174 +TEST_BACKEND_PORT=4000 + +# ============================================================================= +# SERVICE URLS +# ============================================================================= +# Configure URLs for all services +# Format: http://host:port (do not include trailing slash) + +# Main application URLs +FRONTEND_URL=http://localhost:3000 +VITE_FRONTEND_URL=http://localhost:3000 +VITE_BACKEND_URL=http://localhost:8000 + +# Test/Mock service URLs (for development/testing) +TEST_FRONTEND_URL=http://localhost:5174 +VITE_TEST_FRONTEND_URL=http://localhost:5174 +VITE_TEST_BACKEND_URL=http://localhost:4000 + +# ============================================================================= +# DATABASE +# ============================================================================= +# PostgreSQL credentials +POSTGRES_DB=shubble +POSTGRES_USER=shubble +POSTGRES_PASSWORD=shubble + +# PostgreSQL connection string +DATABASE_URL=postgresql://shubble:shubble@localhost:5432/shubble + +# ============================================================================= +# REDIS CACHE +# ============================================================================= +# Redis connection string +REDIS_URL=redis://localhost:6379/0 + +# ============================================================================= +# FASTAPI CONFIGURATION +# ============================================================================= +ENV=development +DEBUG=true +LOG_LEVEL=info + +# ============================================================================= +# SAMSARA API (Optional - for production) +# ============================================================================= +# Leave empty to use Mock Samsara API (test-server) in development +API_KEY= +SAMSARA_SECRET_BASE64= diff --git a/.env.prod.example b/.env.prod.example new file mode 100644 index 00000000..6fceaeb4 --- /dev/null +++ b/.env.prod.example @@ -0,0 +1,27 @@ +# postgres +POSTGRES_DB=shubble +POSTGRES_USER=shubble +POSTGRES_PASSWORD=shubble +POSTGRES_PORT=5432 + +# python env variable +DATABASE_URL=postgresql://shubble:shubble@postgres:5432/shubble +DEBUG=false +LOG_LEVEL=info + +# Backend Docker +FRONTEND_URL=http://localhost:3000 +BACKEND_PORT=8000 + +# Secrets +API_KEY= +SAMSARA_SECRET= + +# redis +REDIS_PORT=6379 +REDIS_URL=redis://redis:6379/0 + +# Frontend Docker +FRONTEND_PORT=3000 +# Vite +VITE_BACKEND_URL=http://localhost:8000 diff --git a/.flaskenv b/.flaskenv deleted file mode 100644 index 9c8e6d28..00000000 --- a/.flaskenv +++ /dev/null @@ -1,2 +0,0 @@ -FLASK_APP=server:create_app -FLASK_ENV=development diff --git a/.github/workflows/build-client.yml b/.github/workflows/build-client.yml index 7bca2e1c..3eb356cf 100644 --- a/.github/workflows/build-client.yml +++ b/.github/workflows/build-client.yml @@ -18,7 +18,9 @@ jobs: node-version: '22' - name: Install dependencies + working-directory: frontend run: npm install - name: Run build + working-directory: frontend run: npm run build diff --git a/.github/workflows/schedule-lint.yml b/.github/workflows/schedule-lint.yml index 0954a638..37446bbb 100644 --- a/.github/workflows/schedule-lint.yml +++ b/.github/workflows/schedule-lint.yml @@ -4,7 +4,7 @@ on: workflow_dispatch: pull_request: paths: - - 'data/schedule.json' + - 'shared/schedule.json' jobs: validate-schedule: @@ -50,4 +50,4 @@ jobs: else "✅ Validation Passed: No duplicate bus schedules found" end - ' data/schedule.json + ' shared/schedule.json diff --git a/.gitignore b/.gitignore index 9330de37..99d5bc74 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,8 @@ -__pycache__/ +**__pycache__/ node_modules/ .env -client/dist +frontend/dist .DS_Store .vscode/ diff --git a/.python-version b/.python-version deleted file mode 100644 index 24ee5b1b..00000000 --- a/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.13 diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..09dbb22d --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,495 @@ +# Shubble - RPI Shuttle Tracker + +## Overview + +Shubble is a real-time shuttle tracking application for Rensselaer Polytechnic Institute (RPI). The system provides live GPS tracking, route information, and schedules through a modern web interface. + +**Tech Stack:** +- Backend: FastAPI (Python 3.13) +- Frontend: React 19 + TypeScript + Vite +- Database: PostgreSQL 17 +- Cache: Redis 7 +- Maps: Apple MapKit JS +- GPS Data: Samsara API (with mock test server) + +--- + +## Project Structure + +``` +shuttletracker-new/ +├── backend/ +│ ├── __init__.py # Package exports (app, models, utils) +│ ├── config.py # Pydantic settings (DB, Redis, Samsara API) - shared +│ ├── database.py # Async SQLAlchemy engine/session - shared +│ ├── models.py # ORM models (5 tables) - shared +│ ├── utils.py # Database query helpers - shared +│ ├── time_utils.py # Timezone utilities - shared +│ │ +│ ├── flask/ # FastAPI backend application +│ │ ├── __init__.py # App factory, CORS, Redis setup +│ │ └── routes.py # API endpoints +│ │ +│ └── worker/ # Background worker package +│ ├── __init__.py # Package exports +│ ├── __main__.py # Module entry point +│ └── worker.py # Background GPS polling worker +│ +├── frontend/ # React frontend application +│ ├── src/ # Source code +│ │ ├── main.tsx # Entry point +│ │ ├── App.tsx # Router setup, main layout +│ │ ├── types/ # TypeScript interfaces +│ │ ├── components/ # Shared components (Navigation, ErrorBoundary) +│ │ ├── locations/ # Live map page + MapKit components +│ │ ├── schedule/ # Schedule page +│ │ ├── dashboard/ # Data analytics page +│ │ ├── about/ # About page +│ │ └── utils/ # Config, logger, map utilities +│ ├── package.json # Frontend dependencies and scripts +│ ├── vite.config.ts # Vite build configuration +│ ├── tsconfig.json # TypeScript configuration +│ └── eslint.config.js # ESLint configuration +│ +├── shared/ # Shared resources (routes, schedules, utilities) +│ ├── routes.json # Route polylines, stops, colors (39.5 KB) +│ ├── schedule.json # Schedule by day/route/time (26.5 KB) +│ ├── aggregated_schedule.json # Compiled schedule (16.7 KB) +│ ├── announcements.json # System announcements +│ ├── stops.py # Route matching logic (haversine distance) +│ └── schedules.py # Schedule analysis with scipy +│ +├── alembic/ # Database migrations +│ ├── env.py # Async migration config +│ └── versions/ # 3 migrations (initial, indices, constraints) +│ +├── docker/ # Container configurations +│ ├── Dockerfile.backend +│ ├── Dockerfile.worker +│ ├── Dockerfile.frontend +│ ├── Dockerfile.test-server +│ └── Dockerfile.test-client +│ +├── test-server/ # Mock Samsara API for dev +│ ├── server.py # FastAPI mock server +│ └── shuttle.py # Shuttle simulation +│ +├── test-client/ # Test frontend setup +├── .github/workflows/ # CI/CD pipelines +├── shubble.py # FastAPI entry point +└── docker-compose.yml # Multi-service orchestration +``` + +--- + +## Database Schema + +**5 Tables (PostgreSQL with async SQLAlchemy):** + +1. **`vehicles`** + - PK: `id` (Samsara vehicle ID) + - Fields: name, asset_type, license_plate, VIN, gateway info + - Relationships: locations, geofence_events, driver_assignments + +2. **`vehicle_locations`** + - PK: `id` (auto-increment) + - Fields: vehicle_id (FK), timestamp, lat, lon, heading, speed, address + - Unique constraint: (vehicle_id, timestamp) + - Index: (vehicle_id, timestamp) + +3. **`geofence_events`** + - PK: `id` (Samsara event ID) + - Fields: vehicle_id (FK), event_type, event_time, location + - Index: (vehicle_id, event_time) + - Tracks when vehicles enter/exit service area + +4. **`drivers`** + - PK: `id` (Samsara driver ID) + - Fields: name + +5. **`driver_vehicle_assignments`** + - Fields: driver_id (FK), vehicle_id (FK), assignment_start, assignment_end + - Links drivers to vehicles over time + +--- + +## API Endpoints + +**Backend (`routes.py`):** + +- `GET /api/locations` - Latest location for each shuttle in geofence + - Cache: 60 seconds + - Returns: VehicleLocationData with route name, vehicle/driver metadata + - Includes data latency calculation + +- `GET /api/schedule` - Route schedules +- `GET /api/routes` - Route definitions +- `GET /api/stops` - Stop locations +- `POST /api/webhook/geofence` - Samsara geofence events +- `POST /api/webhook/stats` - Samsara location updates + +**Frontend Routes (`App.tsx`):** + +- `/` - Live location map (default) +- `/schedule` - Schedule view +- `/about` - About page +- `/data` - Data dashboard +- `/map` - Fullscreen map +- `/generate-static-routes` - Route generation utility + +--- + +## Data Flow + +``` +┌─────────────────┐ +│ React Frontend │ HTTP GET /api/locations +│ (port 3000) │────────────────────────┐ +└─────────────────┘ │ + ▼ +┌─────────────────────────────────────────────────┐ +│ FastAPI Backend (port 8000) │ +│ - routes.py serves API │ +│ - Redis cache (60s TTL for locations) │ +│ - Queries PostgreSQL (async) │ +└─────────────────────┬───────────────────────────┘ + │ + ▼ +┌──────────────────────────────────────────────────┐ +│ PostgreSQL (port 5432) │ +│ - vehicle_locations (GPS data) │ +│ - geofence_events (in/out service area) │ +└──────────────────────────────────────────────────┘ + ▲ + │ +┌─────────────────────┴────────────────────────────┐ +│ Background Worker (separate container) │ +│ - backend/worker polls Samsara API every N secs │ +│ - Fetches GPS for vehicles in geofence │ +│ - Inserts to vehicle_locations table │ +└─────────────────────┬────────────────────────────┘ + │ + ▼ +┌──────────────────────────────────────────────────┐ +│ Samsara API (Production) │ +│ OR Mock Test Server (Development - port 4000) │ +│ - GET /fleet/vehicles/stats │ +└──────────────────────────────────────────────────┘ +``` + +--- + +## Key Components + +### Backend + +**`backend/config.py`** - Environment-based settings (shared by flask and worker) +- Database URL (postgres:// → postgresql+asyncpg://) +- Redis URL +- Samsara API credentials (base64 decoded) +- Timezone: America/New_York +- Modes: development, staging, production + +**`backend/database.py`** - Database infrastructure (shared) +- `Base`: SQLAlchemy declarative base for all models +- `create_async_db_engine()`: Creates async PostgreSQL+asyncpg engine +- `create_session_factory()`: Creates async session maker +- `get_db()`: FastAPI dependency injection for database sessions +- Uses connection pooling and pre-ping for reliability + +**`backend/models.py`** - SQLAlchemy ORM models (shared) +- 5 database models: Vehicle, GeofenceEvent, VehicleLocation, Driver, DriverVehicleAssignment +- Async-compatible relationships +- Indexes for performance (vehicle_id, timestamp) + +**`backend/utils.py`** - Database query helpers (shared) +- `get_vehicles_in_geofence_query()`: Subquery for active vehicles +- `get_vehicles_in_geofence()`: Cached version (900s TTL) + +**`backend/time_utils.py`** - Timezone utilities (shared) +- `get_campus_start_of_day()`: Campus timezone midnight to UTC conversion +- Uses America/New_York timezone + +**`backend/flask/routes.py`** - FastAPI endpoints +- CORS configured via middleware +- Cache decorator for frequently accessed data +- Webhook signature verification + +**`backend/worker/worker.py`** - Background task +- Async location polling from Samsara +- Pagination handling +- Duplicate location filtering +- Environment-aware (test-server vs production) + +### Frontend (`frontend/src/`) + +**`App.tsx`** - Main component +- React Router setup +- Selected route state management +- Git revision tracking +- Analytics integration + +**`locations/LiveLocation.tsx`** - Live tracking +- Fetches from `/api/locations` endpoint +- Real-time vehicle position display +- MapKit integration + +**`locations/MapKitMap.tsx`** - Map component +- Apple MapKit JS wrapper +- Shuttle markers with custom icons +- Route polyline rendering +- Data age indicator +- Fullscreen and embedded modes + +**`components/Navigation.tsx`** - App navigation +- Header/footer with route selection +- Responsive design + +**`utils/config.ts`** - Runtime config +- Backend URL from `VITE_BACKEND_URL` +- Staging vs production detection +- Analytics configuration + +### Shared Resources (`shared/`) + +**`stops.py`** - Route matching +- Loads `routes.json` polylines +- `get_closest_point(origin_point)` - Haversine distance to find route +- Returns: (distance, coords, route_name, polyline_index) +- Handles ambiguous proximity (returns None if routes too similar) + +**`schedules.py`** - Schedule analysis +- Redis caching for coordinate lookups (24h TTL) +- scipy's linear_sum_assignment for optimization +- Labels vehicle locations with stop info + +**`routes.json`** - Route definitions +- Per-route: COLOR, STOPS, POLYLINE_STOPS, ROUTES +- Used by backend (route matching) and frontend (map rendering) + +--- + +## Docker Services + +**docker-compose.yml profiles:** + +**Backend Profile:** +- `postgres`: PostgreSQL 17 with persistent volume +- `redis`: Redis 7 with AOF persistence +- `backend`: FastAPI server (2 uvicorn workers) +- `worker`: Background GPS poller + +**Frontend Profile:** +- `frontend`: Nginx serving React build + +**Test Profile:** +- `test-server`: Mock Samsara API (port 4000) +- `test-client`: Test frontend (port 5174) + +**Health Checks:** +- Backend: HTTP GET /api/locations +- Worker: Process running check +- Postgres/Redis: Native health checks + +--- + +## Environment Variables + +**Key variables (from `.env.example`):** + +```bash +# Service URLs +FRONTEND_URL=http://localhost:3000 +VITE_BACKEND_URL=http://localhost:8000 + +# Database +DATABASE_URL=postgresql://shubble:shubble@postgres:5432/shubble + +# Cache +REDIS_URL=redis://redis:6379 + +# Samsara API +API_KEY=sms_live_... +SAMSARA_SECRET_BASE64=... + +# Environment +ENVIRONMENT=development +``` + +--- + +## Development Workflow + +**Start all services:** +```bash +docker-compose --profile backend --profile frontend up +``` + +**Start with test server:** +```bash +docker-compose --profile test --profile backend up +``` + +**Frontend development:** +```bash +cd frontend +npm install +npm run dev # Parses schedule, copies data, runs Vite +``` + +**Backend development:** +```bash +pip install -r requirements.txt +alembic upgrade head +uvicorn shubble:app --reload +``` + +**Database migrations:** +```bash +alembic revision --autogenerate -m "description" +alembic upgrade head +``` + +--- + +## Important Files + +| File | Purpose | +|------|---------| +| `shubble.py` | FastAPI app entry point | +| `backend/config.py` | Shared configuration (settings) | +| `backend/database.py` | Shared database infrastructure | +| `backend/models.py` | Shared ORM models (database schema) | +| `backend/utils.py` | Shared database query utilities | +| `backend/time_utils.py` | Shared timezone utilities | +| `backend/flask/__init__.py` | App factory, middleware, Redis | +| `backend/flask/routes.py` | API endpoints | +| `backend/worker/worker.py` | GPS polling worker | +| `frontend/src/App.tsx` | Frontend router/layout | +| `frontend/src/locations/LiveLocation.tsx` | Live tracking page | +| `frontend/package.json` | Frontend dependencies and scripts | +| `frontend/vite.config.ts` | Frontend build config | +| `shared/routes.json` | Route polylines/stops/colors | +| `shared/stops.py` | Route matching algorithm | +| `docker-compose.yml` | Service orchestration | +| `alembic.ini` | Migration config | + +--- + +## Testing + +**Mock API Server (`test-server/`):** +- Simulates Samsara API for development +- Provides realistic vehicle movement +- No external API keys needed +- Reads real route polylines from `shared/` + +**Test Client (`test-client/`):** +- Separate Vite frontend for testing +- Uses test-server backend (port 4000) + +**CI/CD (`.github/workflows/`):** +- Build validation +- Schedule data linting +- YAML validation +- Deployment pipelines (staging, production, Dokku) + +--- + +## Security + +**Authentication:** +- No user authentication currently implemented +- Webhook signature verification using `SAMSARA_SECRET_BASE64` + +**CORS:** +- Configured to whitelist `FRONTEND_URL` only +- Set in `backend/flask/__init__.py` + +**Database:** +- Async SQLAlchemy prevents SQL injection +- Pydantic models validate API responses +- Connection pooling with pre-ping health checks + +**Secrets:** +- Stored in `.env` files (not committed) +- Base64 encoding for webhook secrets +- Separate test/production API keys + +--- + +## Key Algorithms + +**Route Matching (`shared/stops.py`):** +1. Load all route polylines from `routes.json` +2. For each polyline coordinate, calculate haversine distance to GPS point +3. Find minimum distance across all routes +4. Return route name if distance is unambiguous +5. Return None if multiple routes are too close (ambiguous) + +**Schedule Assignment (`shared/schedules.py`):** +1. Load vehicle locations from database +2. Load scheduled stops from `schedule.json` +3. Use scipy's `linear_sum_assignment` to optimize vehicle-to-stop matching +4. Cache results in Redis (24-hour TTL) + +**Location Caching (`backend/flask/routes.py`):** +1. Check Redis for cached location data (60s TTL) +2. If miss, query PostgreSQL for latest locations +3. Join with geofence events to filter active vehicles +4. Join with driver assignments +5. Calculate route name using `stops.py` +6. Cache result and return + +--- + +## Deployment Architecture + +**Production:** +- Multiple Docker containers on single host +- Nginx reverse proxy (frontend container) +- FastAPI backend (2 uvicorn workers) +- Separate worker container for GPS polling +- PostgreSQL + Redis with persistent volumes + +**Startup Sequence:** +1. PostgreSQL starts, waits for health check +2. Redis starts, waits for health check +3. Backend runs `alembic upgrade head`, then starts uvicorn +4. Worker starts after backend is healthy +5. Frontend nginx serves static files + +**Environment-based Configuration:** +- Development: Uses test-server, detailed logs +- Staging: Real Samsara API, verbose logs +- Production: Real Samsara API, minimal logs + +--- + +## Notes + +**Branch Status:** +- Current branch: `split-worker` +- Many client/ files deleted (legacy frontend being removed) +- New structure: `backend/` and `frontend/` directories +- Git status shows migration in progress + +**Timezone Handling:** +- Campus timezone: America/New_York +- Backend stores all timestamps in UTC +- `time_utils.py` provides conversion helpers +- Schedule queries use campus midnight in UTC + +**Performance Optimizations:** +- Redis caching (60s for locations, 900s for geofence queries) +- Database indexes on (vehicle_id, timestamp) and (vehicle_id, event_time) +- Unique constraint prevents duplicate location records +- Async database queries throughout +- Connection pooling with pre-ping + +**Future Considerations:** +- User authentication system +- Admin interface for announcements/schedules +- Real-time WebSocket updates +- Push notifications for delays +- Historical analytics dashboard diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ed0ae60e..0f319383 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -20,7 +20,8 @@ cd shubble ```bash # Python dependencies pip install -r requirements.txt -# Node.js dependencies +# Node.js dependencies (frontend) +cd frontend npm install ``` @@ -87,22 +88,24 @@ replacing port with your actual port # Running the frontend -To run the frontend, `cd` to the project root and run: +To run the frontend, `cd` to the `frontend` directory and run: ```bash +cd frontend npm run dev ``` This will start the development server and open the frontend in your default web browser. The frontend will automatically reload when you make changes to the source files. Note: `npm run dev` is for development only. It serves dynamic files and will not work with the backend. You should only use `npm run dev` when you are developing a purely frontend change. -To build the frontend for the backend to use, run: +To build the frontend for the backend to use, run from the `frontend` directory: ```bash +cd frontend npm run build ``` -This will create a static build of the frontend in the `/client/dist` directory, which the backend can serve. **You must build the frontend before you run the backend**. +This will create a static build of the frontend in the `/frontend/dist` directory, which the backend can serve. **You must build the frontend before you run the backend**. # Running the backend @@ -116,7 +119,7 @@ To run the backend, you need to run the _server_ and the _worker_. They must be flask run --port 8000 ``` -This will start the Flask development server on port 8000. The backend will serve the built frontend files from the `/client/dist` directory. +This will start the Flask development server on port 8000. The backend will serve the built frontend files from the `/frontend/dist` directory. #### Option 2. Run the backend using `gunicorn`, which is what Shubble's production server runs: @@ -127,10 +130,10 @@ gunicorn shubble:app #### To run the worker, `cd` to the project root and run: ```bash -python -m server.worker +python -m backend.worker ``` -This will start the worker process that handles background tasks, such as updating vehicle locations. It's important that you run it using `python -m server.worker` (as a python package) so that it can find its local imports. +This will start the worker process that handles background tasks, such as updating vehicle locations. It's important that you run it using `python -m backend.worker` (as a python package) so that it can find its local imports. # Testing the backend diff --git a/Procfile b/Procfile deleted file mode 100644 index 46620a96..00000000 --- a/Procfile +++ /dev/null @@ -1,3 +0,0 @@ -release: flask --app server:create_app db upgrade -web: gunicorn shubble:app --bind 0.0.0.0:$PORT --log-level $LOG_LEVEL -worker: python -m server.worker diff --git a/backend/__init__.py b/backend/__init__.py new file mode 100644 index 00000000..ca771e4c --- /dev/null +++ b/backend/__init__.py @@ -0,0 +1,24 @@ +"""Backend package - re-exports for convenient imports.""" +from backend.flask import app +from backend.config import settings +from backend.database import Base, create_async_db_engine, create_session_factory, get_db +from backend.models import Vehicle, GeofenceEvent, VehicleLocation, Driver, DriverVehicleAssignment +from backend.time_utils import get_campus_start_of_day +from backend.utils import get_vehicles_in_geofence_query, get_vehicles_in_geofence + +__all__ = [ + "app", + "settings", + "Base", + "create_async_db_engine", + "create_session_factory", + "get_db", + "Vehicle", + "GeofenceEvent", + "VehicleLocation", + "Driver", + "DriverVehicleAssignment", + "get_campus_start_of_day", + "get_vehicles_in_geofence_query", + "get_vehicles_in_geofence", +] diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 00000000..72919315 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,142 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = %(here)s/alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. +prepend_sys_path = . + + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: +# +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +# sqlalchemy.url = driver://user:pass@localhost/dbname +# Note: Database URL is loaded from .env file in env.py + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/alembic/README b/backend/alembic/README new file mode 100644 index 00000000..98e4f9c4 --- /dev/null +++ b/backend/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/backend/alembic/env.py b/backend/alembic/env.py new file mode 100644 index 00000000..e27cd3f2 --- /dev/null +++ b/backend/alembic/env.py @@ -0,0 +1,106 @@ +"""Alembic environment configuration for async SQLAlchemy.""" +import asyncio +from logging.config import fileConfig + +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +from alembic import context + +# Load application config to get DATABASE_URL +from backend.config import settings +from backend.database import Base + +# Import all models to ensure they're registered with Base +from backend.models import ( + Vehicle, + GeofenceEvent, + VehicleLocation, + Driver, + DriverVehicleAssignment, +) + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Set database URL from settings +database_url = settings.DATABASE_URL +if database_url.startswith("postgresql://"): + database_url = database_url.replace("postgresql://", "postgresql+asyncpg://") +elif database_url.startswith("postgres://"): + database_url = database_url.replace("postgres://", "postgresql+asyncpg://") + +config.set_main_option("sqlalchemy.url", database_url) + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + """Run migrations with the given connection.""" + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """Run migrations in 'online' mode with async support.""" + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako new file mode 100644 index 00000000..480b130d --- /dev/null +++ b/backend/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/backend/alembic/versions/4f42c8d834fa_initial.py b/backend/alembic/versions/4f42c8d834fa_initial.py new file mode 100644 index 00000000..667fd0cd --- /dev/null +++ b/backend/alembic/versions/4f42c8d834fa_initial.py @@ -0,0 +1,99 @@ +"""initial + +Revision ID: 4f42c8d834fa +Revises: +Create Date: 2025-12-24 23:11:50.160351 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '4f42c8d834fa' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('drivers', + sa.Column('id', sa.String(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('vehicles', + sa.Column('id', sa.String(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('asset_type', sa.String(), nullable=False), + sa.Column('license_plate', sa.String(), nullable=True), + sa.Column('vin', sa.String(), nullable=True), + sa.Column('maintenance_id', sa.String(), nullable=True), + sa.Column('gateway_model', sa.String(), nullable=True), + sa.Column('gateway_serial', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('driver_vehicle_assignments', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('driver_id', sa.String(), nullable=False), + sa.Column('vehicle_id', sa.String(), nullable=False), + sa.Column('assignment_start', sa.DateTime(timezone=True), nullable=False), + sa.Column('assignment_end', sa.DateTime(timezone=True), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(['driver_id'], ['drivers.id'], ), + sa.ForeignKeyConstraint(['vehicle_id'], ['vehicles.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_driver_vehicle_assignments_driver_id'), 'driver_vehicle_assignments', ['driver_id'], unique=False) + op.create_index(op.f('ix_driver_vehicle_assignments_vehicle_id'), 'driver_vehicle_assignments', ['vehicle_id'], unique=False) + op.create_table('geofence_events', + sa.Column('id', sa.String(), nullable=False), + sa.Column('vehicle_id', sa.String(), nullable=False), + sa.Column('event_type', sa.String(), nullable=False), + sa.Column('event_time', sa.DateTime(timezone=True), nullable=False), + sa.Column('address_name', sa.String(), nullable=True), + sa.Column('address_formatted', sa.String(), nullable=True), + sa.Column('latitude', sa.Float(), nullable=True), + sa.Column('longitude', sa.Float(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(['vehicle_id'], ['vehicles.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('vehicle_locations', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('vehicle_id', sa.String(), nullable=False), + sa.Column('name', sa.String(), nullable=True), + sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False), + sa.Column('latitude', sa.Float(), nullable=False), + sa.Column('longitude', sa.Float(), nullable=False), + sa.Column('heading_degrees', sa.Float(), nullable=True), + sa.Column('speed_mph', sa.Float(), nullable=True), + sa.Column('is_ecu_speed', sa.Boolean(), nullable=False), + sa.Column('formatted_location', sa.String(), nullable=True), + sa.Column('address_id', sa.String(), nullable=True), + sa.Column('address_name', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(['vehicle_id'], ['vehicles.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_vehicle_locations_vehicle_id'), 'vehicle_locations', ['vehicle_id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_vehicle_locations_vehicle_id'), table_name='vehicle_locations') + op.drop_table('vehicle_locations') + op.drop_table('geofence_events') + op.drop_index(op.f('ix_driver_vehicle_assignments_vehicle_id'), table_name='driver_vehicle_assignments') + op.drop_index(op.f('ix_driver_vehicle_assignments_driver_id'), table_name='driver_vehicle_assignments') + op.drop_table('driver_vehicle_assignments') + op.drop_table('vehicles') + op.drop_table('drivers') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/648b513fafc7_add_composite_indices.py b/backend/alembic/versions/648b513fafc7_add_composite_indices.py new file mode 100644 index 00000000..05bb29d2 --- /dev/null +++ b/backend/alembic/versions/648b513fafc7_add_composite_indices.py @@ -0,0 +1,36 @@ +"""add composite indices + +Revision ID: 648b513fafc7 +Revises: 4f42c8d834fa +Create Date: 2025-12-25 12:55:12.028445 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '648b513fafc7' +down_revision: Union[str, None] = '4f42c8d834fa' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_index('ix_geofence_events_vehicle_time', 'geofence_events', ['vehicle_id', 'event_time'], unique=False) + op.drop_index(op.f('ix_vehicle_locations_vehicle_id'), table_name='vehicle_locations') + op.create_index('ix_vehicle_locations_vehicle_timestamp', 'vehicle_locations', ['vehicle_id', 'timestamp'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('ix_vehicle_locations_vehicle_timestamp', table_name='vehicle_locations') + op.create_index(op.f('ix_vehicle_locations_vehicle_id'), 'vehicle_locations', ['vehicle_id'], unique=False) + op.drop_index('ix_geofence_events_vehicle_time', table_name='geofence_events') + # ### end Alembic commands ### diff --git a/backend/alembic/versions/ac296168d213_enforce_uniqueness_constraint_of_.py b/backend/alembic/versions/ac296168d213_enforce_uniqueness_constraint_of_.py new file mode 100644 index 00000000..8f881353 --- /dev/null +++ b/backend/alembic/versions/ac296168d213_enforce_uniqueness_constraint_of_.py @@ -0,0 +1,32 @@ +"""enforce uniqueness constraint of vehicle and timestamp + +Revision ID: ac296168d213 +Revises: 648b513fafc7 +Create Date: 2025-12-25 16:06:42.170853 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'ac296168d213' +down_revision: Union[str, None] = '648b513fafc7' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_unique_constraint('uq_vehicle_locations_vehicle_timestamp', 'vehicle_locations', ['vehicle_id', 'timestamp']) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('uq_vehicle_locations_vehicle_timestamp', 'vehicle_locations', type_='unique') + # ### end Alembic commands ### diff --git a/backend/config.py b/backend/config.py new file mode 100644 index 00000000..811fe7f2 --- /dev/null +++ b/backend/config.py @@ -0,0 +1,56 @@ +"""Configuration using Pydantic BaseSettings.""" +import base64 +from typing import Optional +from zoneinfo import ZoneInfo +from pydantic import field_validator +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + """Application settings loaded from environment variables.""" + + model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", extra="ignore") + + # Hosting settings + DEBUG: bool = True + ENV: str = "development" + LOG_LEVEL: str = "INFO" + + # CORS settings + FRONTEND_URL: str = "http://localhost:3000" + TEST_FRONTEND_URL: str = "http://localhost:5174" + + # Database settings + DATABASE_URL: str + + # Redis settings + REDIS_URL: str = "redis://localhost:6379/0" + + # Samsara API secret (base64 encoded) + # for webhook signature verification + SAMSARA_SECRET_BASE64: Optional[str] = None + + # Samsara API key + API_KEY: Optional[str] = None + + # Shubble settings + CAMPUS_TZ: ZoneInfo = ZoneInfo("America/New_York") + + @field_validator("DATABASE_URL") + @classmethod + def fix_database_url(cls, v: str) -> str: + """Convert postgres:// to postgresql:// for SQLAlchemy compatibility.""" + if v.startswith("postgres://"): + return v.replace("postgres://", "postgresql://", 1) + return v + + @property + def SAMSARA_SECRET(self) -> Optional[bytes]: + """Decode base64 Samsara secret.""" + if self.SAMSARA_SECRET_BASE64: + return base64.b64decode(self.SAMSARA_SECRET_BASE64.encode("utf-8")) + return None + + +# Global settings instance +settings = Settings() diff --git a/backend/database.py b/backend/database.py new file mode 100644 index 00000000..b24cf384 --- /dev/null +++ b/backend/database.py @@ -0,0 +1,71 @@ +"""Async database configuration for FastAPI.""" +from fastapi import Request +from typing import AsyncGenerator, TYPE_CHECKING +from sqlalchemy.ext.asyncio import ( + create_async_engine, + AsyncSession, + async_sessionmaker, + AsyncEngine, +) +from sqlalchemy.orm import declarative_base + +if TYPE_CHECKING: + from fastapi import Request + +Base = declarative_base() + + +def create_async_db_engine(database_url: str, echo: bool = False) -> AsyncEngine: + """ + Create an async database engine. + + Args: + database_url: Database connection URL + echo: Whether to log SQL statements + + Returns: + AsyncEngine instance + """ + # Convert postgresql:// to postgresql+asyncpg:// + if database_url.startswith("postgresql://"): + database_url = database_url.replace("postgresql://", "postgresql+asyncpg://") + elif database_url.startswith("postgres://"): + database_url = database_url.replace("postgres://", "postgresql+asyncpg://") + + return create_async_engine(database_url, echo=echo, pool_pre_ping=True) + + +def create_session_factory(engine: AsyncEngine): + """ + Create an async session factory. + + Args: + engine: AsyncEngine instance + + Returns: + async_sessionmaker instance + """ + return async_sessionmaker( + engine, + class_=AsyncSession, + expire_on_commit=False, + autocommit=False, + autoflush=False, + ) + + +async def get_db(request: Request) -> AsyncGenerator[AsyncSession, None]: + """ + FastAPI dependency for getting async database sessions. + + Accesses the session_factory from app.state which is initialized + during application startup in the lifespan context manager. + + Args: + request: FastAPI Request object (injected automatically) + + Yields: + AsyncSession instance + """ + async with request.app.state.session_factory() as session: + yield session diff --git a/server/.gitignore b/backend/flask/.gitignore similarity index 69% rename from server/.gitignore rename to backend/flask/.gitignore index e80d409a..43ae0e2a 100644 --- a/server/.gitignore +++ b/backend/flask/.gitignore @@ -1,3 +1,2 @@ __pycache__/ *.py[cod] -config.py diff --git a/backend/flask/__init__.py b/backend/flask/__init__.py new file mode 100644 index 00000000..386b25a4 --- /dev/null +++ b/backend/flask/__init__.py @@ -0,0 +1,81 @@ +"""FastAPI application factory.""" +import logging +from contextlib import asynccontextmanager +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi_cache import FastAPICache +from fastapi_cache.backends.redis import RedisBackend +from redis import asyncio as aioredis + +from backend.config import settings +from backend.database import create_async_db_engine, create_session_factory + + +# Configure logging +numeric_level = logging._nameToLevel.get(settings.LOG_LEVEL.upper(), logging.INFO) +logging.basicConfig( + level=numeric_level, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", +) + +logger = logging.getLogger(__name__) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Lifespan context manager for startup and shutdown events.""" + # Startup + logger.info("Starting up FastAPI application...") + + # Initialize database engine and session factory + app.state.db_engine = create_async_db_engine( + settings.DATABASE_URL, echo=settings.DEBUG + ) + app.state.session_factory = create_session_factory(app.state.db_engine) + logger.info("Database engine and session factory initialized") + + # Initialize Redis cache + app.state.redis = await aioredis.from_url( + settings.REDIS_URL, + encoding="utf-8", + decode_responses=False, + ) + FastAPICache.init(RedisBackend(app.state.redis), prefix="fastapi-cache") + logger.info("Redis cache initialized") + + yield + + # Shutdown + logger.info("Shutting down FastAPI application...") + await app.state.redis.close() + await app.state.db_engine.dispose() + logger.info("Database connections closed") + + +def create_app() -> FastAPI: + """Create and configure the FastAPI application.""" + app = FastAPI( + title="Shubble API", + description="Shuttle tracking API for Shubble", + version="2.0.0", + lifespan=lifespan, + ) + + # Configure CORS + app.add_middleware( + CORSMiddleware, + allow_origins=[settings.FRONTEND_URL], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + # Register routes + from .routes import router + app.include_router(router) + + return app + + +# Create app instance +app = create_app() diff --git a/backend/flask/routes.py b/backend/flask/routes.py new file mode 100644 index 00000000..b2b23670 --- /dev/null +++ b/backend/flask/routes.py @@ -0,0 +1,403 @@ +"""FastAPI routes for the Shubble API.""" +import logging +import hmac +from hashlib import sha256 +from datetime import datetime, timezone +from pathlib import Path + +from fastapi import APIRouter, Request, Depends, HTTPException, Response +from fastapi.responses import FileResponse, JSONResponse +from fastapi_cache import FastAPICache +from fastapi_cache.decorator import cache +from sqlalchemy import func, and_, select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.dialects import postgresql +from sqlalchemy.orm import selectinload + +from backend.database import get_db +from backend.models import Vehicle, GeofenceEvent, VehicleLocation, DriverVehicleAssignment +from backend.config import settings +from backend.time_utils import get_campus_start_of_day +from backend.utils import get_vehicles_in_geofence_query +from shared.stops import Stops +# from shared.schedules import Schedule + +logger = logging.getLogger(__name__) + +router = APIRouter() + + +@router.get("/api/locations") +@cache(expire=60, namespace="locations") +async def get_locations(response: Response, db: AsyncSession = Depends(get_db)): + """ + Returns the latest location for each vehicle currently inside the geofence. + The vehicle is considered inside the geofence if its latest geofence event + today is a 'geofenceEntry'. + """ + # Get query for vehicles in geofence and convert to subquery + geofence_entries = get_vehicles_in_geofence_query().subquery() + + # Subquery: latest vehicle location per vehicle + latest_locations = ( + select( + VehicleLocation.vehicle_id, + func.max(VehicleLocation.timestamp).label("latest_time"), + ) + .where(VehicleLocation.vehicle_id.in_(select(geofence_entries.c.vehicle_id))) + .group_by(VehicleLocation.vehicle_id) + .subquery() + ) + + # Join to get full location and vehicle info for vehicles in geofence + query = ( + select(VehicleLocation) + .join( + latest_locations, + and_( + VehicleLocation.vehicle_id == latest_locations.c.vehicle_id, + VehicleLocation.timestamp == latest_locations.c.latest_time, + ), + ) + .options(selectinload(VehicleLocation.vehicle)) + ) + + result = await db.execute(query) + results = result.scalars().all() + + # Get current driver assignments for all vehicles in results + vehicle_ids = [loc.vehicle_id for loc in results] + current_assignments = {} + if vehicle_ids: + assignments_query = ( + select(DriverVehicleAssignment) + .where( + DriverVehicleAssignment.vehicle_id.in_(vehicle_ids), + DriverVehicleAssignment.assignment_end.is_(None), + ) + ).options(selectinload(DriverVehicleAssignment.driver)) + assignments_result = await db.execute(assignments_query) + assignments = assignments_result.scalars().all() + for assignment in assignments: + current_assignments[assignment.vehicle_id] = assignment + + # Format response + response_data = {} + oldest_timestamp = None + for loc in results: + vehicle = loc.vehicle + # Track oldest data point for latency calculation + if oldest_timestamp is None or loc.timestamp < oldest_timestamp: + oldest_timestamp = loc.timestamp + # Get closest loop + closest_distance, _, closest_route_name, polyline_index = Stops.get_closest_point( + (loc.latitude, loc.longitude) + ) + if closest_distance is None: + route_name = "UNCLEAR" + else: + route_name = closest_route_name if closest_distance < 0.050 else None + + # Get current driver info + driver_info = None + assignment = current_assignments.get(loc.vehicle_id) + if assignment and assignment.driver: + driver_info = { + "id": assignment.driver.id, + "name": assignment.driver.name, + } + + response_data[loc.vehicle_id] = { + "name": loc.name, + "latitude": loc.latitude, + "longitude": loc.longitude, + "timestamp": loc.timestamp.isoformat(), + "heading_degrees": loc.heading_degrees, + "speed_mph": loc.speed_mph, + "route_name": route_name, + "polyline_index": polyline_index, + "is_ecu_speed": loc.is_ecu_speed, + "formatted_location": loc.formatted_location, + "address_id": loc.address_id, + "address_name": loc.address_name, + "license_plate": vehicle.license_plate, + "vin": vehicle.vin, + "asset_type": vehicle.asset_type, + "gateway_model": vehicle.gateway_model, + "gateway_serial": vehicle.gateway_serial, + "driver": driver_info, + } + + # Add timing metadata as HTTP headers to help frontend synchronize with Samsara API + now = datetime.now(timezone.utc) + data_age = (now - oldest_timestamp).total_seconds() if oldest_timestamp else None + + response.headers['X-Server-Time'] = now.isoformat() + response.headers['X-Oldest-Data-Time'] = oldest_timestamp.isoformat() if oldest_timestamp else '' + response.headers['X-Data-Age-Seconds'] = str(data_age) if data_age is not None else '' + + return response_data + + +@router.post("/api/webhook") +async def webhook(request: Request, db: AsyncSession = Depends(get_db)): + """ + Handles incoming webhook events for geofence entries/exits. + Expects JSON payload with event details. + """ + # Verify webhook signature if secret is configured + if secret := settings.SAMSARA_SECRET: + try: + timestamp = request.headers["X-Samsara-Timestamp"] + signature = request.headers["X-Samsara-Signature"] + + # Read request body + body = await request.body() + + prefix = f"v1:{timestamp}:" + message = bytes(prefix, "utf-8") + body + h = hmac.new(secret, message, sha256) + expected_signature = "v1=" + h.hexdigest() + + if expected_signature != signature: + return JSONResponse( + {"status": "error", "message": "Failed to authenticate request."}, + status_code=401, + ) + except KeyError as e: + return JSONResponse({"status": "error", "message": str(e)}, status_code=400) + + # Parse JSON payload + try: + data = await request.json() + except Exception: + logger.error(f"Invalid JSON received") + return JSONResponse( + {"status": "error", "message": "Invalid JSON"}, status_code=400 + ) + + if not data: + return JSONResponse( + {"status": "error", "message": "Empty payload"}, status_code=400 + ) + + try: + # Parse top-level event details + event_id = data.get("eventId") + event_time = datetime.fromisoformat( + data.get("eventTime").replace("Z", "+00:00") + ) + event_data = data.get("data", {}) + + # Parse condition details + conditions = event_data.get("conditions", []) + if not conditions: + logger.error(f"No conditions found in webhook data: {data}") + return JSONResponse( + {"status": "error", "message": "Missing conditions"}, status_code=400 + ) + + for condition in conditions: + details = condition.get("details", {}) + # Determine if entry or exit + if "geofenceEntry" in details: + geofence_event = details.get("geofenceEntry", {}) + else: + geofence_event = details.get("geofenceExit", {}) + + vehicle_data = geofence_event.get("vehicle") + if not vehicle_data: + continue # Skip conditions with no vehicle + + address = geofence_event.get("address", {}) + geofence = address.get("geofence", {}) + polygon = geofence.get("polygon", {}) + vertices = polygon.get("vertices", []) + latitude = vertices[0].get("latitude") if vertices else None + longitude = vertices[0].get("longitude") if vertices else None + + # Extract vehicle info + vehicle_id = vehicle_data.get("id") + vehicle_name = vehicle_data.get("name") + + if not (vehicle_id and vehicle_name): + continue # Skip invalid entries + + # Find or create vehicle + vehicle_query = select(Vehicle).where(Vehicle.id == vehicle_id) + result = await db.execute(vehicle_query) + vehicle = result.scalar_one_or_none() + + if not vehicle: + vehicle = Vehicle( + id=vehicle_id, + name=vehicle_name, + asset_type=vehicle_data.get("assetType", "vehicle"), + license_plate=vehicle_data.get("licensePlate"), + vin=vehicle_data.get("vin"), + maintenance_id=vehicle_data.get("externalIds", {}).get( + "maintenanceId" + ), + gateway_model=vehicle_data.get("gateway", {}).get("model"), + gateway_serial=vehicle_data.get("gateway", {}).get("serial"), + ) + db.add(vehicle) + await db.flush() # Ensure vehicle.id is available + + # Insert geofence event (using PostgreSQL upsert) + insert_stmt = postgresql.insert(GeofenceEvent).values( + id=event_id, + vehicle_id=vehicle_id, + event_type=( + "geofenceEntry" if "geofenceEntry" in details else "geofenceExit" + ), + event_time=event_time, + address_name=address.get("name"), + address_formatted=address.get("formattedAddress"), + latitude=latitude, + longitude=longitude, + ) + insert_stmt = insert_stmt.on_conflict_do_nothing() + await db.execute(insert_stmt) + + await db.commit() + + # Invalidate cache for vehicles in geofence + await FastAPICache.clear(namespace="vehicles_in_geofence") + + return {"status": "success"} + + except Exception as e: + await db.rollback() + logger.exception(f"Error processing webhook data: {e}") + return JSONResponse({"status": "error", "message": str(e)}, status_code=500) + + +@router.get("/api/today") +async def data_today(db: AsyncSession = Depends(get_db)): + """Get all location data and geofence events for today.""" + now = datetime.now(timezone.utc) + start_of_day = get_campus_start_of_day() + + # Query locations today + locations_query = ( + select(VehicleLocation) + .where( + and_( + VehicleLocation.timestamp >= start_of_day, + VehicleLocation.timestamp <= now, + ) + ) + .order_by(VehicleLocation.timestamp.asc()) + ) + locations_result = await db.execute(locations_query) + locations_today = locations_result.scalars().all() + + # Query events today + events_query = ( + select(GeofenceEvent) + .where( + and_( + GeofenceEvent.event_time >= start_of_day, + GeofenceEvent.event_time <= now, + ) + ) + .order_by(GeofenceEvent.event_time.asc()) + ) + events_result = await db.execute(events_query) + events_today = events_result.scalars().all() + + # Build response dict + locations_today_dict = {} + for location in locations_today: + vehicle_location = { + "latitude": location.latitude, + "longitude": location.longitude, + "timestamp": location.timestamp, + "speed_mph": location.speed_mph, + "heading_degrees": location.heading_degrees, + "address_id": location.address_id, + } + if location.vehicle_id in locations_today_dict: + locations_today_dict[location.vehicle_id]["data"].append(vehicle_location) + else: + locations_today_dict[location.vehicle_id] = { + "entry": None, + "exit": None, + "data": [vehicle_location], + } + + for geofence_event in events_today: + if geofence_event.event_type == "geofenceEntry": + if ( + "entry" not in locations_today_dict[geofence_event.vehicle_id] + ): # First entry + locations_today_dict[geofence_event.vehicle_id]["entry"] = ( + geofence_event.event_time + ) + elif geofence_event.event_type == "geofenceExit": + if ( + "entry" in locations_today_dict[geofence_event.vehicle_id] + ): # Makes sure that the vehicle already entered + locations_today_dict[geofence_event.vehicle_id]["exit"] = ( + geofence_event.event_time + ) + + return locations_today_dict + + +@router.get("/api/routes") +async def get_shuttle_routes(): + """Serve routes.json file.""" + root_dir = Path(__file__).parent.parent + routes_file = root_dir / "data" / "routes.json" + if routes_file.exists(): + return FileResponse(routes_file) + raise HTTPException(status_code=404, detail="Routes file not found") + + +@router.get("/api/schedule") +async def get_shuttle_schedule(): + """Serve schedule.json file.""" + root_dir = Path(__file__).parent.parent + schedule_file = root_dir / "data" / "schedule.json" + if schedule_file.exists(): + return FileResponse(schedule_file) + raise HTTPException(status_code=404, detail="Schedule file not found") + + +@router.get("/api/aggregated-schedule") +async def get_aggregated_shuttle_schedule(): + """Serve aggregated_schedule.json file.""" + root_dir = Path(__file__).parent.parent + aggregated_file = root_dir / "data" / "aggregated_schedule.json" + if aggregated_file.exists(): + return FileResponse(aggregated_file) + raise HTTPException(status_code=404, detail="Aggregated schedule file not found") + + +@router.get("/api/matched-schedules") +@cache(expire=3600, namespace="matched_schedules") +async def get_matched_shuttle_schedules(force_recompute: bool = False): + """ + Return cached matched schedules unless force_recompute=true, + in which case recompute and update the cache. + """ + try: + # Note: With fastapi-cache2, the @cache decorator handles caching automatically + # The force_recompute parameter would need custom cache invalidation logic + # For now, we compute fresh data if requested + + matched = {} # Schedule.match_shuttles_to_schedules() + + return { + "status": "success", + "matchedSchedules": matched, + "source": "recomputed" if force_recompute else "computed", + } + + except Exception as e: + logger.exception(f"Error in matched schedule endpoint: {e}") + return JSONResponse( + {"status": "error", "message": str(e)}, status_code=500 + ) diff --git a/backend/models.py b/backend/models.py new file mode 100644 index 00000000..588463eb --- /dev/null +++ b/backend/models.py @@ -0,0 +1,110 @@ +"""SQLAlchemy models for async database operations.""" +from datetime import datetime, timezone +from typing import Optional +from sqlalchemy import String, Integer, Float, Boolean, DateTime, ForeignKey, Index, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column, relationship +from backend.database import Base + + +class Vehicle(Base): + __tablename__ = "vehicles" + + id: Mapped[str] = mapped_column(String, primary_key=True) + name: Mapped[str] = mapped_column(String, nullable=False) + asset_type: Mapped[str] = mapped_column(String, default="vehicle") + license_plate: Mapped[Optional[str]] = mapped_column(String, nullable=True) + vin: Mapped[Optional[str]] = mapped_column(String, nullable=True) + maintenance_id: Mapped[Optional[str]] = mapped_column(String, nullable=True) + gateway_model: Mapped[Optional[str]] = mapped_column(String, nullable=True) + gateway_serial: Mapped[Optional[str]] = mapped_column(String, nullable=True) + + # Relationships + geofence_events: Mapped[list["GeofenceEvent"]] = relationship(back_populates="vehicle", lazy="selectin") + locations: Mapped[list["VehicleLocation"]] = relationship(back_populates="vehicle", lazy="selectin") + driver_assignments: Mapped[list["DriverVehicleAssignment"]] = relationship(back_populates="vehicle", lazy="selectin") + + def __repr__(self): + return f"" + + +class GeofenceEvent(Base): + __tablename__ = "geofence_events" + __table_args__ = ( + Index("ix_geofence_events_vehicle_time", "vehicle_id", "event_time"), + ) + + id: Mapped[str] = mapped_column(String, primary_key=True) # eventId from webhook + vehicle_id: Mapped[str] = mapped_column(String, ForeignKey("vehicles.id"), nullable=False) + event_type: Mapped[str] = mapped_column(String, nullable=False) + event_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + address_name: Mapped[Optional[str]] = mapped_column(String, nullable=True) + address_formatted: Mapped[Optional[str]] = mapped_column(String, nullable=True) + latitude: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + longitude: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc)) + + # Relationships + vehicle: Mapped["Vehicle"] = relationship(back_populates="geofence_events") + + def __repr__(self): + return f"" + + +class VehicleLocation(Base): + __tablename__ = "vehicle_locations" + __table_args__ = ( + Index("ix_vehicle_locations_vehicle_timestamp", "vehicle_id", "timestamp"), + UniqueConstraint("vehicle_id", "timestamp", name="uq_vehicle_locations_vehicle_timestamp"), + ) + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + vehicle_id: Mapped[str] = mapped_column(String, ForeignKey("vehicles.id"), nullable=False) + name: Mapped[Optional[str]] = mapped_column(String, nullable=True) + timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + latitude: Mapped[float] = mapped_column(Float, nullable=False) + longitude: Mapped[float] = mapped_column(Float, nullable=False) + heading_degrees: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + speed_mph: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + is_ecu_speed: Mapped[bool] = mapped_column(Boolean, default=False) + formatted_location: Mapped[Optional[str]] = mapped_column(String, nullable=True) + address_id: Mapped[Optional[str]] = mapped_column(String, nullable=True) + address_name: Mapped[Optional[str]] = mapped_column(String, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc)) + + # Relationships + vehicle: Mapped["Vehicle"] = relationship(back_populates="locations") + + def __repr__(self): + return f"" + + +class Driver(Base): + __tablename__ = "drivers" + + id: Mapped[str] = mapped_column(String, primary_key=True) # Samsara driver ID + name: Mapped[str] = mapped_column(String, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc)) + + # Relationships + assignments: Mapped[list["DriverVehicleAssignment"]] = relationship(back_populates="driver", lazy="selectin") + + def __repr__(self): + return f"" + + +class DriverVehicleAssignment(Base): + __tablename__ = "driver_vehicle_assignments" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + driver_id: Mapped[str] = mapped_column(String, ForeignKey("drivers.id"), nullable=False, index=True) + vehicle_id: Mapped[str] = mapped_column(String, ForeignKey("vehicles.id"), nullable=False, index=True) + assignment_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + assignment_end: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) # null = currently assigned + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc)) + + # Relationships + driver: Mapped["Driver"] = relationship(back_populates="assignments") + vehicle: Mapped["Vehicle"] = relationship(back_populates="driver_assignments") + + def __repr__(self): + return f" {self.vehicle_id}>" diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 00000000..840a089b --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,15 @@ +fastapi>=0.115.0 +uvicorn[standard]>=0.34.0 +httpx>=0.28.1 +asyncpg>=0.30.0 +SQLAlchemy>=2.0.41 +alembic>=1.14.0 +pydantic>=2.10.0 +pydantic-settings>=2.7.0 +python-dotenv>=1.1.1 +redis +fastapi-cache2[redis]>=0.2.2 +numpy +pandas>=2.0.0 +scipy +requests diff --git a/backend/time_utils.py b/backend/time_utils.py new file mode 100644 index 00000000..275bb140 --- /dev/null +++ b/backend/time_utils.py @@ -0,0 +1,18 @@ +"""Time utility functions for timezone handling.""" +from datetime import datetime, timezone + +from backend.config import settings + + +def get_campus_start_of_day(): + """ + Get the start of the current day in campus timezone (America/New_York), + converted to UTC. + + Returns: + datetime: Midnight in campus timezone, converted to UTC + """ + now = datetime.now(settings.CAMPUS_TZ) + midnight = now.replace(hour=0, minute=0, second=0, microsecond=0) + + return midnight.astimezone(timezone.utc) diff --git a/backend/utils.py b/backend/utils.py new file mode 100644 index 00000000..8aa8d7bb --- /dev/null +++ b/backend/utils.py @@ -0,0 +1,64 @@ +"""Utility functions for database queries.""" +from sqlalchemy import func, and_, select +from fastapi_cache.decorator import cache + +from backend.models import GeofenceEvent +from backend.time_utils import get_campus_start_of_day + + +def get_vehicles_in_geofence_query(): + """ + Returns a query for vehicle_ids where the latest geofence event from today + is a geofenceEntry. + + Returns: + SQLAlchemy select query that returns vehicle IDs currently in the geofence + """ + start_of_today = get_campus_start_of_day() + + # Subquery to get latest event per vehicle from today's events + subquery = ( + select( + GeofenceEvent.vehicle_id, + func.max(GeofenceEvent.event_time).label("latest_time"), + ) + .where(GeofenceEvent.event_time >= start_of_today) + .group_by(GeofenceEvent.vehicle_id) + .subquery() + ) + + # Join back to get the latest event row where type is entry + query = ( + select(GeofenceEvent.vehicle_id) + .join( + subquery, + and_( + GeofenceEvent.vehicle_id == subquery.c.vehicle_id, + GeofenceEvent.event_time == subquery.c.latest_time, + ), + ) + .where(GeofenceEvent.event_type == "geofenceEntry") + ) + + return query + + +@cache(expire=900, namespace="vehicles_in_geofence") +async def get_vehicles_in_geofence(session_factory): + """ + Returns a cached set of vehicle_ids where the latest geofence event from today + is a geofenceEntry. + + This function executes the query and caches the result for 5 seconds. + + Args: + session_factory: Async session factory for creating database sessions + + Returns: + Set of vehicle IDs currently in the geofence + """ + async with session_factory() as session: + query = get_vehicles_in_geofence_query() + result = await session.execute(query) + rows = result.all() + return {row.vehicle_id for row in rows} diff --git a/backend/worker/__init__.py b/backend/worker/__init__.py new file mode 100644 index 00000000..3c450b69 --- /dev/null +++ b/backend/worker/__init__.py @@ -0,0 +1,4 @@ +"""Background worker package for Shubble.""" +from .worker import run_worker + +__all__ = ["run_worker"] diff --git a/backend/worker/__main__.py b/backend/worker/__main__.py new file mode 100644 index 00000000..94308fef --- /dev/null +++ b/backend/worker/__main__.py @@ -0,0 +1,6 @@ +"""Entry point for running the worker as a module.""" +import asyncio +from .worker import run_worker + +if __name__ == "__main__": + asyncio.run(run_worker()) diff --git a/backend/worker/worker.py b/backend/worker/worker.py new file mode 100644 index 00000000..38284ccf --- /dev/null +++ b/backend/worker/worker.py @@ -0,0 +1,331 @@ +"""Async background worker for fetching vehicle data from Samsara API.""" +import asyncio +import logging +import os +from datetime import datetime, timezone + +import httpx +from sqlalchemy import select +from sqlalchemy.dialects import postgresql +from fastapi_cache import FastAPICache +from fastapi_cache.backends.redis import RedisBackend +from redis import asyncio as aioredis + +from backend.config import settings +from backend.database import create_async_db_engine, create_session_factory +from backend.models import VehicleLocation, Driver, DriverVehicleAssignment +from backend.utils import get_vehicles_in_geofence + +# Logging config +numeric_level = logging._nameToLevel.get(settings.LOG_LEVEL.upper(), logging.INFO) +logging.basicConfig( + level=numeric_level, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", +) +logger = logging.getLogger(__name__) + + +async def update_locations(session_factory): + """ + Fetches and updates vehicle locations for vehicles currently in the geofence. + Uses pagination token to fetch subsequent pages. + """ + # Get the current list of vehicles in the geofence (cached) + current_vehicle_ids = await get_vehicles_in_geofence(session_factory) + + # No vehicles to update + if not current_vehicle_ids: + logger.info("No vehicles in geofence to update") + return + + headers = {"Accept": "application/json"} + # Determine API URL based on environment + if settings.ENV == "development": + url = "http://localhost:4000/fleet/vehicles/stats" + else: + api_key = settings.API_KEY + if not api_key: + logger.error("API_KEY not set") + return + headers["Authorization"] = f"Bearer {api_key}" + url = "https://api.samsara.com/fleet/vehicles/stats" + + url_params = { + "vehicleIds": ",".join(current_vehicle_ids), + "types": "gps", + } + + try: + async with httpx.AsyncClient(timeout=30.0) as client: + has_next_page = True + after_token = None + new_records_added = 0 + + while has_next_page: + # Add pagination token if present + if after_token: + url_params["after"] = after_token + has_next_page = False + + # Make the API request + response = await client.get(url, headers=headers, params=url_params) + + # Handle non-200 responses + if response.status_code != 200: + logger.error(f"API error: {response.status_code} {response.text}") + return + + data = response.json() + + # Handle pagination + pagination = data.get("pagination", {}) + if pagination.get("hasNextPage"): + has_next_page = True + after_token = pagination.get("endCursor", after_token) + + async with session_factory() as session: + for vehicle in data.get("data", []): + # Process each vehicle's GPS data + vehicle_id = vehicle.get("id") + vehicle_name = vehicle.get("name") + gps = vehicle.get("gps") + + if not vehicle_id or not gps: + continue + + timestamp_str = gps.get("time") + if not timestamp_str: + continue + + # Convert ISO 8601 string to datetime + timestamp = datetime.fromisoformat( + timestamp_str.replace("Z", "+00:00") + ) + + # Use PostgreSQL upsert with ON CONFLICT DO NOTHING and RETURNING + insert_stmt = postgresql.insert(VehicleLocation).values( + vehicle_id=vehicle_id, + timestamp=timestamp, + name=vehicle_name, + latitude=gps.get("latitude"), + longitude=gps.get("longitude"), + heading_degrees=gps.get("headingDegrees"), + speed_mph=gps.get("speedMilesPerHour"), + is_ecu_speed=gps.get("isEcuSpeed", False), + formatted_location=gps.get("reverseGeo", {}).get( + "formattedLocation" + ), + address_id=gps.get("address", {}).get("id"), + address_name=gps.get("address", {}).get("name"), + ) + # ON CONFLICT on the composite index (vehicle_id, timestamp) DO NOTHING + insert_stmt = insert_stmt.on_conflict_do_nothing( + index_elements=["vehicle_id", "timestamp"] + ) + # RETURNING id to check if insert occurred + insert_stmt = insert_stmt.returning(VehicleLocation.id) + + result = await session.execute(insert_stmt) + inserted_id = result.scalar_one_or_none() + + # If a row was returned, an insert occurred + if inserted_id: + new_records_added += 1 + + # Only commit if we actually added new records + if new_records_added > 0: + await session.commit() + logger.info( + f"Updated locations for {len(current_vehicle_ids)} vehicles - {new_records_added} new records" + ) + # Invalidate cache for locations + await FastAPICache.clear(namespace="vehicles_in_geofence") + else: + logger.info( + f"No new location data for {len(current_vehicle_ids)} vehicles" + ) + + except httpx.HTTPError as e: + logger.error(f"Failed to fetch locations: {e}") + except Exception as e: + logger.exception(f"Unexpected error in update_locations: {e}") + + +async def update_driver_assignments(session_factory, vehicle_ids): + """ + Fetches and updates driver-vehicle assignments for vehicles currently in the geofence. + Creates/updates driver records and tracks assignment changes. + """ + if not vehicle_ids: + logger.info("No vehicles to fetch driver assignments for") + return + + headers = {"Accept": "application/json"} + # Determine API URL based on environment + if settings.ENV == "development": + url = "http://localhost:4000/fleet/driver-vehicle-assignments" + else: + api_key = settings.API_KEY + if not api_key: + logger.error("API_KEY not set for driver assignments") + return + headers["Authorization"] = f"Bearer {api_key}" + url = "https://api.samsara.com/fleet/driver-vehicle-assignments" + + url_params = { + "filterBy": "vehicles", + "vehicleIds": ",".join(vehicle_ids), + } + + try: + async with httpx.AsyncClient(timeout=30.0) as client: + has_next_page = True + after_token = None + assignments_updated = 0 + + while has_next_page: + if after_token: + url_params["after"] = after_token + has_next_page = False + + response = await client.get(url, headers=headers, params=url_params) + if response.status_code != 200: + logger.error( + f"Driver assignments API error: {response.status_code} {response.text}" + ) + return + + data = response.json() + logger.info( + f'Driver assignments API response: {len(data.get("data", []))} assignments returned' + ) + + pagination = data.get("pagination", {}) + if pagination.get("hasNextPage"): + has_next_page = True + after_token = pagination.get("endCursor", after_token) + + now = datetime.now(timezone.utc) + + async with session_factory() as session: + for assignment in data.get("data", []): + driver_data = assignment.get("driver") + vehicle_data = assignment.get("vehicle") + + if not driver_data or not vehicle_data: + continue + + driver_id = driver_data.get("id") + driver_name = driver_data.get("name") + vehicle_id = vehicle_data.get("id") + assigned_at_str = assignment.get("assignedAtTime") + + if not driver_id or not vehicle_id: + continue + + # Parse assignment time + if assigned_at_str: + assigned_at = datetime.fromisoformat( + assigned_at_str.replace("Z", "+00:00") + ) + else: + assigned_at = now + + # Create or update driver + driver_query = select(Driver).where(Driver.id == driver_id) + result = await session.execute(driver_query) + driver = result.scalar_one_or_none() + + if not driver: + driver = Driver(id=driver_id, name=driver_name) + session.add(driver) + logger.info(f"Created new driver: {driver_name} ({driver_id})") + elif driver.name != driver_name: + driver.name = driver_name + + # Check if there's an existing open assignment for this vehicle + existing_query = select(DriverVehicleAssignment).where( + DriverVehicleAssignment.vehicle_id == vehicle_id, + DriverVehicleAssignment.assignment_end.is_(None), + ) + result = await session.execute(existing_query) + existing = result.scalar_one_or_none() + + if existing: + # If same driver, no change needed + if existing.driver_id == driver_id: + continue + # Different driver - close the old assignment + existing.assignment_end = now + logger.info( + f"Closed assignment for driver {existing.driver_id} on vehicle {vehicle_id}" + ) + + # Create new assignment + new_assignment = DriverVehicleAssignment( + driver_id=driver_id, + vehicle_id=vehicle_id, + assignment_start=assigned_at, + ) + session.add(new_assignment) + assignments_updated += 1 + + if assignments_updated > 0: + await session.commit() + logger.info(f"Updated {assignments_updated} driver assignments") + else: + logger.info("No driver assignment changes detected") + + except httpx.HTTPError as e: + logger.error(f"Failed to fetch driver assignments: {e}") + except Exception as e: + logger.exception(f"Unexpected error in update_driver_assignments: {e}") + + +async def run_worker(): + """Main worker loop that runs continuously.""" + logger.info("Async worker started...") + + # Initialize database engine and session factory + db_engine = create_async_db_engine(settings.DATABASE_URL, echo=settings.DEBUG) + session_factory = create_session_factory(db_engine) + logger.info("Database engine and session factory initialized") + + # Initialize Redis cache for FastAPI cache + try: + redis = await aioredis.from_url( + settings.REDIS_URL, + encoding="utf-8", + decode_responses=False, + ) + FastAPICache.init(RedisBackend(redis), prefix="fastapi-cache") + logger.info("Redis cache initialized") + except Exception as e: + logger.error(f"Failed to initialize Redis cache: {e}") + # Continue without cache + + try: + while True: + try: + # Get current vehicles in geofence before updating (cached) + current_vehicle_ids = await get_vehicles_in_geofence(session_factory) + + # Update locations and driver assignments in parallel + await asyncio.gather( + update_locations(session_factory), + update_driver_assignments(session_factory, current_vehicle_ids), + ) + + except Exception as e: + logger.exception(f"Error in worker loop: {e}") + + await asyncio.sleep(5) + finally: + # Cleanup on shutdown + logger.info("Shutting down worker...") + await db_engine.dispose() + logger.info("Database connections closed") + + +if __name__ == "__main__": + asyncio.run(run_worker()) diff --git a/client/package-lock.json b/client/package-lock.json deleted file mode 100644 index 7131c12b..00000000 --- a/client/package-lock.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "name": "client", - "version": "0.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "client" - } - } -} diff --git a/client/package.json b/client/package.json deleted file mode 100644 index 3948574c..00000000 --- a/client/package.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "name": "client", - "type": "module", - "private": true, - "scripts": { - "dev": "cd .. && npm run dev", - "build": "cd .. && npm run build", - "preview": "cd .. && npm run preview" - } -} diff --git a/client/src/.gitignore b/client/src/.gitignore deleted file mode 100644 index 8fce6030..00000000 --- a/client/src/.gitignore +++ /dev/null @@ -1 +0,0 @@ -data/ diff --git a/client/src/ts/config.ts b/client/src/ts/config.ts deleted file mode 100644 index 3e8c2dcd..00000000 --- a/client/src/ts/config.ts +++ /dev/null @@ -1,8 +0,0 @@ -const isStaging = import.meta.env.VITE_DEPLOY_MODE !== 'production'; - -const config = { - isStaging, - isDev: isStaging || import.meta.env.DEV -}; - -export default config; \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..897f43b3 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,133 @@ +services: + postgres: + image: postgres:17-alpine + environment: + POSTGRES_DB: ${POSTGRES_DB:-shubble} + POSTGRES_USER: ${POSTGRES_USER:-shubble} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-shubble} + volumes: + - postgres_data:/var/lib/postgresql/data + ports: + - "${POSTGRES_PORT:-5432}:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U shubble"] + interval: 10s + timeout: 5s + retries: 5 + profiles: + - backend + - test + + redis: + image: redis:7-alpine + command: redis-server --appendonly yes + volumes: + - redis_data:/data + ports: + - "${REDIS_PORT:-6379}:6379" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 5 + profiles: + - backend + - test + + backend: + build: + context: . + dockerfile: docker/Dockerfile.backend + ports: + - "${BACKEND_PORT:-8000}:8000" + extra_hosts: + - "localhost:host-gateway" + environment: + FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} + DATABASE_URL: ${DATABASE_URL:-postgresql://shubble:shubble@postgres:5432/shubble} + REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} + DEBUG: ${DEBUG:-true} + LOG_LEVEL: ${LOG_LEVEL:-INFO} + API_KEY: ${API_KEY:-} + SAMSARA_SECRET: ${SAMSARA_SECRET:-} + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + restart: unless-stopped + profiles: + - backend + + worker: + build: + context: . + dockerfile: docker/Dockerfile.worker + extra_hosts: + - "localhost:host-gateway" + environment: + DATABASE_URL: ${DATABASE_URL:-postgresql://shubble:shubble@postgres:5432/shubble} + REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} + LOG_LEVEL: ${LOG_LEVEL:-INFO} + API_KEY: ${API_KEY:-} + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + backend: + condition: service_started + restart: unless-stopped + profiles: + - backend + + frontend: + build: + context: . + dockerfile: docker/Dockerfile.frontend + ports: + - "${FRONTEND_PORT:-3000}:80" + environment: + VITE_BACKEND_URL: ${VITE_BACKEND_URL:-http://localhost:8000} + restart: unless-stopped + profiles: + - frontend + + test-server: + build: + context: . + dockerfile: docker/Dockerfile.test-server + ports: + - "${TEST_BACKEND_PORT:-4000}:4000" + extra_hosts: + - "localhost:host-gateway" + environment: + TEST_FRONTEND_URL: ${TEST_FRONTEND_URL:-http://localhost:5174} + DATABASE_URL: ${DATABASE_URL:-postgresql://shubble:shubble@postgres:5432/shubble} + REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} + DEBUG: ${DEBUG:-true} + LOG_LEVEL: ${LOG_LEVEL:-INFO} + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + restart: unless-stopped + profiles: + - test + + test-client: + build: + context: . + dockerfile: docker/Dockerfile.test-client + ports: + - "${TEST_FRONTEND_PORT:-5174}:80" + environment: + VITE_TEST_BACKEND_URL: ${VITE_TEST_BACKEND_URL:-http://localhost:4000} + restart: unless-stopped + profiles: + - test + +volumes: + postgres_data: + redis_data: diff --git a/docker/Dockerfile.backend b/docker/Dockerfile.backend new file mode 100644 index 00000000..0ee0d08b --- /dev/null +++ b/docker/Dockerfile.backend @@ -0,0 +1,40 @@ +# Backend Dockerfile for Shubble FastAPI +FROM python:3.13-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements and install Python dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir --root-user-action=ignore -r requirements.txt + +# Copy application code +COPY backend/ ./backend/ +COPY shared/ ./shared/ +COPY alembic/ ./alembic/ +COPY alembic.ini . +COPY shubble.py . + +# Create non-root user +RUN useradd -m -u 1000 shubble && chown -R shubble:shubble /app +USER shubble + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ + CMD python -c "import httpx; httpx.get('http://localhost:8000/api/locations', timeout=5.0)" + +# Run database migrations and start uvicorn +CMD alembic upgrade head && \ + uvicorn shubble:app \ + --host 0.0.0.0 \ + --port 8000 \ + --workers 2 \ + --log-level ${LOG_LEVEL:-info} diff --git a/docker/Dockerfile.frontend b/docker/Dockerfile.frontend new file mode 100644 index 00000000..1c4d29fd --- /dev/null +++ b/docker/Dockerfile.frontend @@ -0,0 +1,51 @@ +# Frontend Dockerfile for Shubble +FROM node:24-alpine AS builder + +WORKDIR /app + +# Copy shared resources needed for build +COPY shared/ ./shared/ + +# Copy frontend directory with all config files +COPY frontend/ ./frontend/ + +# Set working directory to frontend +WORKDIR /app/frontend + +# Install dependencies +RUN npm ci + +# Build the application with a placeholder that will be replaced at runtime +# This allows the backend URL to be configured via environment variable +ENV VITE_BACKEND_URL=__VITE_BACKEND_URL__ +RUN npm run build + +# Production stage with nginx +FROM nginx:alpine + +# Install gettext for envsubst +RUN apk add --no-cache gettext + +# Copy built files to nginx +COPY --from=builder /app/frontend/dist /usr/share/nginx/html + +# Copy nginx configuration +COPY docker/nginx-frontend.conf /etc/nginx/conf.d/default.conf + +# Create entrypoint script to substitute environment variables at runtime +COPY <<'EOF' /docker-entrypoint.sh +#!/bin/sh +set -e + +# Replace __VITE_BACKEND_URL__ placeholder with actual environment variable value +find /usr/share/nginx/html -type f -name "*.js" -exec sed -i "s|__VITE_BACKEND_URL__|${VITE_BACKEND_URL:-http://localhost:8000}|g" {} \; + +# Start nginx +exec nginx -g "daemon off;" +EOF + +RUN chmod +x /docker-entrypoint.sh + +EXPOSE 80 + +CMD ["/docker-entrypoint.sh"] diff --git a/docker/Dockerfile.test-client b/docker/Dockerfile.test-client new file mode 100644 index 00000000..45348e7b --- /dev/null +++ b/docker/Dockerfile.test-client @@ -0,0 +1,48 @@ +# Test Client Dockerfile for Mock Samsara UI +FROM node:24-alpine AS builder + +WORKDIR /app + +# Copy package files +COPY test-client/package*.json ./test-client/ + +# Install dependencies +WORKDIR /app/test-client +RUN npm ci + +# Copy source files +COPY test-client/ ./ + +# Build the application with a placeholder that will be replaced at runtime +ENV VITE_TEST_BACKEND_URL=__VITE_TEST_BACKEND_URL__ +RUN npm run build + +# Production stage with nginx +FROM nginx:alpine + +# Install gettext for envsubst +RUN apk add --no-cache gettext + +# Copy built files to nginx +COPY --from=builder /app/test-client/dist /usr/share/nginx/html + +# Copy nginx configuration +COPY docker/nginx-test-client.conf /etc/nginx/conf.d/default.conf + +# Create entrypoint script to substitute environment variables at runtime +COPY <<'EOF' /docker-entrypoint.sh +#!/bin/sh +set -e + +# Replace __VITE_TEST_BACKEND_URL__ placeholder with actual environment variable value +find /usr/share/nginx/html -type f -name "*.js" -exec sed -i "s|__VITE_TEST_BACKEND_URL__|${VITE_TEST_BACKEND_URL:-http://localhost:4000}|g" {} \; + +# Start nginx +exec nginx -g "daemon off;" +EOF + +RUN chmod +x /docker-entrypoint.sh + +EXPOSE 80 + +CMD ["/docker-entrypoint.sh"] diff --git a/docker/Dockerfile.test-server b/docker/Dockerfile.test-server new file mode 100644 index 00000000..9c5d511d --- /dev/null +++ b/docker/Dockerfile.test-server @@ -0,0 +1,37 @@ +# Test Server Dockerfile for Mock Samsara API +FROM python:3.13-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements and install Python dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir --root-user-action=ignore -r requirements.txt + +# Copy application code +COPY backend/ ./backend/ +COPY shared/ ./shared/ +COPY test-server/ ./test-server/ + +# Create non-root user +RUN useradd -m -u 1000 shubble && chown -R shubble:shubble /app +USER shubble + +# Expose port +EXPOSE 4000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=10s --retries=3 \ + CMD python -c "import httpx; httpx.get('http://localhost:4000/api/shuttles', timeout=5.0)" + +# Run test server with uvicorn +CMD ["uvicorn", "test-server.server:app", \ + "--host", "0.0.0.0", \ + "--port", "4000", \ + "--workers", "1", \ + "--log-level", "info"] diff --git a/docker/Dockerfile.worker b/docker/Dockerfile.worker new file mode 100644 index 00000000..c2e7be9e --- /dev/null +++ b/docker/Dockerfile.worker @@ -0,0 +1,29 @@ +# Worker Dockerfile for Shubble Background Location Poller +FROM python:3.13-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements and install Python dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir --root-user-action=ignore -r requirements.txt + +# Copy application code +COPY backend/ ./backend/ +COPY shared/ ./shared/ + +# Create non-root user +RUN useradd -m -u 1000 shubble && chown -R shubble:shubble /app +USER shubble + +# Health check (worker should be running) +HEALTHCHECK --interval=60s --timeout=10s --start-period=10s --retries=3 \ + CMD pgrep -f "python -m backend.worker" > /dev/null || exit 1 + +# Run worker +CMD ["python", "-m", "backend.worker"] diff --git a/docker/nginx-frontend.conf b/docker/nginx-frontend.conf new file mode 100644 index 00000000..c6e9182b --- /dev/null +++ b/docker/nginx-frontend.conf @@ -0,0 +1,26 @@ +server { + listen 80; + server_name _; + root /usr/share/nginx/html; + index index.html; + + # Enable gzip compression + gzip on; + gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript; + + # Handle SPA routing + location / { + try_files $uri $uri/ /index.html; + } + + # Cache static assets + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ { + expires 1y; + add_header Cache-Control "public, immutable"; + } + + # Security headers + add_header X-Frame-Options "SAMEORIGIN" always; + add_header X-Content-Type-Options "nosniff" always; + add_header X-XSS-Protection "1; mode=block" always; +} diff --git a/docker/nginx-test-client.conf b/docker/nginx-test-client.conf new file mode 100644 index 00000000..c6e9182b --- /dev/null +++ b/docker/nginx-test-client.conf @@ -0,0 +1,26 @@ +server { + listen 80; + server_name _; + root /usr/share/nginx/html; + index index.html; + + # Enable gzip compression + gzip on; + gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript; + + # Handle SPA routing + location / { + try_files $uri $uri/ /index.html; + } + + # Cache static assets + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ { + expires 1y; + add_header Cache-Control "public, immutable"; + } + + # Security headers + add_header X-Frame-Options "SAMEORIGIN" always; + add_header X-Content-Type-Options "nosniff" always; + add_header X-XSS-Protection "1; mode=block" always; +} diff --git a/docs/INSTALLATION.md b/docs/INSTALLATION.md new file mode 100644 index 00000000..bc37b35b --- /dev/null +++ b/docs/INSTALLATION.md @@ -0,0 +1,298 @@ +# Installation Guide + +This guide explains how to set up and run the Shubble development environment. + +## Architecture Overview + +The codebase is organized into three main areas: + +- **Frontend** - Main React application for end users +- **Backend** - FastAPI server (async), PostgreSQL database, Redis cache, and background worker +- **Test** - Mock Samsara API server and test client for development/testing + +## Running Services: Docker vs Host + +For each area, you have two options for running the services: + +### Option 1: Dockerized (Recommended for Quick Start) + +**Advantages:** +- Zero local setup required (no Node.js, Python, PostgreSQL, Redis installation) +- Consistent environment across all developers +- Easy to run multiple profiles without conflicts +- Isolated from your local system + +**Disadvantages:** +- Slower hot reload and rebuild times +- More difficult to debug (can't easily attach debuggers) +- Changes require container rebuilds +- Less visibility into the running processes + +**When to use:** +- First time setup +- Running services you're not actively developing +- Testing the full stack together +- CI/CD environments + +### Option 2: Host (Recommended for Active Development) + +**Advantages:** +- Instant hot reload during development +- Easy debugging with IDE integration +- Direct access to logs and processes +- Faster iteration cycle +- Can use local development tools + +**Disadvantages:** +- Requires installing dependencies (Node.js, Python, PostgreSQL, Redis) +- Potential version conflicts with other projects +- Manual setup required +- Environment differences between developers + +**When to use:** +- Actively developing/debugging a specific area +- Writing new features or fixing bugs +- Need to use debugging tools +- Frequent code changes + +## Recommendation + +**Run services on host for areas you're actively working on, and use Docker for the rest.** + +For example: +- Working on the frontend? Run frontend on host, backend in Docker +- Working on the backend? Run backend on host, frontend in Docker +- Testing integration? Run everything in Docker + +## Quick Start with Docker + +### Prerequisites + +- Docker and Docker Compose installed +- Copy `.env.example` to `.env` and configure as needed + +### Running Services + +```bash +# Run only backend services (API, database, Redis, worker) +docker compose --profile backend up + +# Run frontend (includes backend automatically) +docker compose --profile frontend up + +# Run test services (mock Samsara API and test client) +docker compose --profile test up + +# Run multiple profiles +docker compose --profile backend --profile test up + +# Run everything +docker compose --profile "*" up +``` + +### Stopping Services + +```bash +# Stop all running services +docker compose down + +# Stop and remove volumes (clean slate) +docker compose down -v +``` + +## Running on Host + +### Prerequisites + +**All environments:** +- Node.js 24+ +- Python 3.13+ +- PostgreSQL 17+ +- Redis 7+ + +### Backend Setup + +1. **Install Python dependencies:** + ```bash + pip install -r requirements.txt + ``` + +2. **Set up environment variables:** + ```bash + cp .env.example .env + # Edit .env with your local database/Redis URLs + ``` + +3. **Start PostgreSQL and Redis:** + ```bash + # Option 1: Run just database services in Docker + docker compose up postgres redis + + # Option 2: Use local installations + # (configure DATABASE_URL and REDIS_URL in .env accordingly) + ``` + +4. **Run database migrations:** + ```bash + alembic upgrade head + ``` + +5. **Start the backend server:** + ```bash + uvicorn shubble:app --host 0.0.0.0 --port 8000 --reload + ``` + +6. **Start the worker (in a separate terminal):** + ```bash + python -m backend.worker + ``` + +### Frontend Setup + +1. **Install dependencies:** + ```bash + cd frontend + npm install + ``` + +2. **Set up environment variables:** + ```bash + # In .env, set: + VITE_BACKEND_URL=http://localhost:8000 + ``` + +3. **Start the development server:** + ```bash + cd frontend + npm run dev + ``` + +4. **Access the application:** + - Frontend: http://localhost:3000 + - Backend API: http://localhost:8000 + +### Test Services Setup + +1. **Install test-client dependencies:** + ```bash + cd test-client + npm install + ``` + +2. **Set up environment variables:** + ```bash + # In .env, set: + VITE_TEST_BACKEND_URL=http://localhost:4000 + ``` + +3. **Start the test server (in one terminal):** + ```bash + cd test-server + python server.py + ``` + +4. **Start the test client (in another terminal):** + ```bash + cd test-client + npm run dev + ``` + +5. **Access the test services:** + - Test Client: http://localhost:5174 + - Test Server API: http://localhost:4000 + +## Mixed Setup (Recommended) + +The most common development setup is to run some services on host and others in Docker: + +### Example: Frontend Development + +```bash +# Terminal 1: Run backend in Docker +docker compose --profile backend up + +# Terminal 2: Run frontend on host +cd frontend +npm run dev +``` + +### Example: Backend Development + +```bash +# Terminal 1: Run database services in Docker +docker compose up postgres redis + +# Terminal 2: Run backend on host +uvicorn shubble:app --host 0.0.0.0 --port 8000 --reload + +# Terminal 3: Run worker on host +python -m backend.worker + +# Terminal 4 (optional): Run frontend in Docker +docker compose --profile frontend up +``` + +## Environment Variables + +Key environment variables (see `.env.example` for full list): + +### Service URLs +- `FRONTEND_URL` - Main frontend URL +- `VITE_BACKEND_URL` - Backend API URL for frontend +- `TEST_FRONTEND_URL` - Test client URL +- `VITE_TEST_BACKEND_URL` - Test server API URL for test client + +### Database & Cache +- `DATABASE_URL` - PostgreSQL connection string +- `REDIS_URL` - Redis connection string + +### Service Ports +- `FRONTEND_PORT` - Port for frontend (default: 3000) +- `BACKEND_PORT` - Port for backend API (default: 8000) +- `TEST_FRONTEND_PORT` - Port for test client (default: 5174) +- `TEST_BACKEND_PORT` - Port for test server (default: 4000) + +## Troubleshooting + +### Port Conflicts + +If you see "port already in use" errors: + +```bash +# Check what's using a port +lsof -i :8000 + +# Stop Docker services +docker compose down + +# Change ports in .env if needed +``` + +### Database Issues + +```bash +# Reset the database +docker compose down -v +docker compose up postgres + +# Run migrations +alembic upgrade head +``` + +### Dependency Issues + +```bash +# Clean install for Node.js (frontend) +cd frontend +rm -rf node_modules package-lock.json +npm install + +# Clean install for Python +pip install --force-reinstall -r requirements.txt +``` + +## Next Steps + +- See the main README for project overview +- Check individual service directories for specific documentation +- Review `.env.example` for all configuration options diff --git a/INTRO_TO_UI.md b/docs/more/INTRO_TO_UI.md similarity index 88% rename from INTRO_TO_UI.md rename to docs/more/INTRO_TO_UI.md index 492acb73..6003a089 100644 --- a/INTRO_TO_UI.md +++ b/docs/more/INTRO_TO_UI.md @@ -31,20 +31,21 @@ For developers, this makes the UI a quick and intuitive way to test, debug, and ## How to Access the UI Verify that all [setup instructions](CONTRIBUTING.md) have been completed first. -To view the UI while you're developing, you’ll start the frontend's development server. Use the following steps below: +To view the UI while you're developing, you'll start the frontend's development server. Use the following steps below: #### 1. Run the command in your terminal: - In the project’s root folder (the main `shubble` folder) run: + From the project's root folder, navigate to the `frontend` directory and run: ```bash + cd frontend npm run dev ``` This command launches a development server so you can run and test your frontend application locally. #### 2. Wait for the local address to appear: - Once the dev server starts, your terminal will show a link where the UI is running. It usually looks like: + Once the dev server starts, your terminal will show a link where the UI is running. It usually looks like: ```bash - http://localhost:5173/ - ``` - The port your development server runs on may differ based on the project’s configuration and whichever ports are free on your system. + http://localhost:3000/ + ``` + The port your development server runs on may differ based on the project's configuration and whichever ports are free on your system. Open the link: - **Mac:** `Command + Click` the link the terminal prints. @@ -102,16 +103,18 @@ To learn how to use Figma click [HERE](https://www.youtube.com/watch?v=jQ1sfKIl5 ## Overview of the UI Layout -The UI for this project is built using a React + TypeScript front-end located in the `client/` directory. Inside `client/src/`, the layout is divided into logical folders such as `components/` for reusable UI elements, `pages/` for full page views, `styles/` for global and modular CSS, and `data/` or `types/` for shared utilities and type definitions. The main application entry point (`main.tsx`) mounts the UI, while `App.tsx` defines the top-level structure and routing. +The UI for this project is built using a React + TypeScript front-end located in the `frontend/` directory. Inside `frontend/src/`, the layout is divided into logical folders such as `components/` for reusable UI elements, `locations/`, `schedule/`, `dashboard/`, and `about/` for page views, `styles/` for global and modular CSS, and `shared/` or `types/` for shared data and type definitions. The main application entry point (`main.tsx`) mounts the UI, while `App.tsx` defines the top-level structure and routing. -From a developer perspective, this layout makes it easy to locate the code responsible for visual components, page-level logic, and shared UI resources. The separation into components and pages also encourages modularity — most UI changes happen inside `client/src/components` or `client/src/pages`, keeping the rest of the system clean and maintainable. +From a developer perspective, this layout makes it easy to locate the code responsible for visual components, page-level logic, and shared UI resources. The separation into components and pages also encourages modularity — most UI changes happen inside `frontend/src/components` or the page-specific directories, keeping the rest of the system clean and maintainable. #### Where to Start Editing the UI If you are new to the UI codebase, a good place to start is: -- `client/src/pages/` — Contains page-level components that represent full views. -- `client/src/components/` — Contains reusable UI components shared across pages. +- `frontend/src/locations/` — Live location tracking page components +- `frontend/src/schedule/` — Schedule display page components +- `frontend/src/dashboard/` — Data dashboard page components +- `frontend/src/components/` — Contains reusable UI components shared across pages. For small changes or experiments, modifying an existing component in `components/` is usually the safest place to begin. For larger features or layout changes, start with the relevant page in `pages/`. @@ -124,7 +127,7 @@ For small changes or experiments, modifying an existing component in `components All UI components are written as React functional components rather than class-based components. This approach aligns with modern React best practices and allows the use of hooks for state management and side effects. - **Centralized Type Definitions** - Major reusable types—especially those related to domain concepts such as Stops or Schedules—are defined in dedicated type files under `client/src/ts/types`. Centralizing these definitions ensures consistency across the UI and reduces duplication. + Major reusable types—especially those related to domain concepts such as Stops or Schedules—are defined in dedicated type files under `frontend/src/types`. Centralizing these definitions ensures consistency across the UI and reduces duplication. - **Component Reusability and Modularity** UI logic is broken down into small, reusable components whenever possible. Components are designed to do one thing well and be composed together, rather than embedding large amounts of logic in a single file. @@ -162,8 +165,8 @@ Following these practices helps ensure that the UI remains consistent, approacha Below are some common issues developers may encounter when accessing or running the UI, along with suggested solutions. -- **UI does not load in the browser** - Ensure the development server is running and that no errors are shown in the terminal. If the server failed to start, try running `npm install` to ensure all dependencies are installed. +- **UI does not load in the browser** + Ensure the development server is running and that no errors are shown in the terminal. If the server failed to start, try running `npm install` from the `frontend` directory to ensure all dependencies are installed. - **Command `npm run dev` fails** Check the error message printed in the terminal. Common causes include missing dependencies, an incompatible Node.js version, or syntax errors in the code. Restarting the terminal and re-running the command can sometimes resolve environment issues. diff --git a/web-development-intro.md b/docs/more/web-development-intro.md similarity index 100% rename from web-development-intro.md rename to docs/more/web-development-intro.md diff --git a/client/.gitignore b/frontend/.gitignore similarity index 100% rename from client/.gitignore rename to frontend/.gitignore diff --git a/client/README.md b/frontend/README.md similarity index 100% rename from client/README.md rename to frontend/README.md diff --git a/frontend/data/aggregated_schedule.json b/frontend/data/aggregated_schedule.json new file mode 100644 index 00000000..b6881296 --- /dev/null +++ b/frontend/data/aggregated_schedule.json @@ -0,0 +1,987 @@ +[ + { + "NORTH": [ + "9:00 AM", + "9:20 AM", + "9:40 AM", + "10:00 AM", + "10:20 AM", + "10:40 AM", + "11:00 AM", + "11:20 AM", + "11:40 AM", + "12:00 PM", + "12:10 PM", + "12:30 PM", + "12:50 PM", + "1:00 PM", + "1:10 PM", + "1:20 PM", + "1:30 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:10 PM", + "2:20 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "3:00 PM", + "3:10 PM", + "3:20 PM", + "3:40 PM", + "4:00 PM", + "4:10 PM", + "4:30 PM", + "4:50 PM", + "5:10 PM", + "5:30 PM", + "5:50 PM", + "6:10 PM", + "6:30 PM", + "6:50 PM", + "7:10 PM", + "7:30 PM", + "8:00 PM" + ], + "WEST": [ + "9:00 AM", + "9:20 AM", + "9:40 AM", + "10:00 AM", + "10:20 AM", + "10:40 AM", + "11:00 AM", + "11:20 AM", + "11:40 AM", + "12:00 PM", + "12:10 PM", + "12:30 PM", + "12:50 PM", + "1:00 PM", + "1:10 PM", + "1:20 PM", + "1:30 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:10 PM", + "2:20 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "3:00 PM", + "3:10 PM", + "3:20 PM", + "3:40 PM", + "4:00 PM", + "4:10 PM", + "4:30 PM", + "4:50 PM", + "5:10 PM", + "5:30 PM", + "5:50 PM", + "6:10 PM", + "6:30 PM", + "6:50 PM", + "7:10 PM", + "7:30 PM", + "8:00 PM" + ] + }, + { + "WEST": [ + "7:00 AM", + "7:10 AM", + "7:25 AM", + "7:35 AM", + "7:50 AM", + "8:00 AM", + "8:15 AM", + "8:25 AM", + "8:40 AM", + "8:50 AM", + "9:05 AM", + "9:15 AM", + "9:30 AM", + "9:35 AM", + "9:40 AM", + "9:55 AM", + "10:00 AM", + "10:05 AM", + "10:20 AM", + "10:25 AM", + "10:30 AM", + "10:45 AM", + "10:50 AM", + "10:55 AM", + "11:10 AM", + "11:20 AM", + "11:35 AM", + "11:45 AM", + "12:00 PM", + "12:10 PM", + "12:25 PM", + "12:35 PM", + "12:50 PM", + "1:00 PM", + "1:15 PM", + "1:25 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:05 PM", + "2:15 PM", + "2:25 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "2:55 PM", + "3:05 PM", + "3:15 PM", + "3:20 PM", + "3:30 PM", + "3:40 PM", + "3:45 PM", + "3:55 PM", + "4:00 PM", + "4:15 PM", + "4:25 PM", + "4:40 PM", + "4:50 PM", + "5:05 PM", + "5:15 PM", + "5:30 PM", + "5:40 PM", + "5:55 PM", + "6:05 PM", + "6:20 PM", + "6:30 PM", + "7:20 PM", + "7:45 PM", + "8:10 PM", + "8:35 PM", + "9:00 PM", + "9:25 PM", + "9:50 PM", + "10:15 PM", + "10:40 PM", + "11:05 PM", + "11:30 PM", + "12:00 AM" + ], + "NORTH": [ + "7:00 AM", + "7:10 AM", + "7:20 AM", + "7:30 AM", + "7:40 AM", + "7:50 AM", + "8:00 AM", + "8:10 AM", + "8:20 AM", + "8:30 AM", + "8:40 AM", + "8:50 AM", + "9:00 AM", + "9:10 AM", + "9:20 AM", + "9:30 AM", + "9:40 AM", + "9:50 AM", + "10:00 AM", + "10:10 AM", + "10:20 AM", + "10:30 AM", + "10:40 AM", + "10:50 AM", + "11:00 AM", + "11:10 AM", + "11:20 AM", + "11:30 AM", + "11:40 AM", + "11:50 AM", + "12:00 PM", + "12:10 PM", + "12:20 PM", + "12:30 PM", + "12:40 PM", + "12:50 PM", + "1:00 PM", + "1:10 PM", + "1:20 PM", + "1:30 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:10 PM", + "2:20 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "3:00 PM", + "3:10 PM", + "3:20 PM", + "3:30 PM", + "3:40 PM", + "3:50 PM", + "4:00 PM", + "4:20 PM", + "4:40 PM", + "5:00 PM", + "5:20 PM", + "5:40 PM", + "6:00 PM", + "6:20 PM", + "6:40 PM", + "7:30 PM", + "7:50 PM", + "8:10 PM", + "8:30 PM", + "8:50 PM", + "9:10 PM", + "9:30 PM", + "9:50 PM", + "10:10 PM", + "10:30 PM", + "10:50 PM", + "11:10 PM", + "11:30 PM", + "12:00 AM" + ] + }, + { + "WEST": [ + "7:00 AM", + "7:10 AM", + "7:25 AM", + "7:35 AM", + "7:50 AM", + "8:00 AM", + "8:15 AM", + "8:25 AM", + "8:40 AM", + "8:50 AM", + "9:05 AM", + "9:15 AM", + "9:30 AM", + "9:35 AM", + "9:40 AM", + "9:55 AM", + "10:00 AM", + "10:05 AM", + "10:20 AM", + "10:25 AM", + "10:30 AM", + "10:45 AM", + "10:50 AM", + "10:55 AM", + "11:10 AM", + "11:20 AM", + "11:35 AM", + "11:45 AM", + "12:00 PM", + "12:10 PM", + "12:25 PM", + "12:35 PM", + "12:50 PM", + "1:00 PM", + "1:15 PM", + "1:25 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:05 PM", + "2:15 PM", + "2:25 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "2:55 PM", + "3:05 PM", + "3:15 PM", + "3:20 PM", + "3:30 PM", + "3:40 PM", + "3:45 PM", + "3:55 PM", + "4:00 PM", + "4:15 PM", + "4:25 PM", + "4:40 PM", + "4:50 PM", + "5:05 PM", + "5:15 PM", + "5:30 PM", + "5:40 PM", + "5:55 PM", + "6:05 PM", + "6:20 PM", + "6:30 PM", + "7:20 PM", + "7:45 PM", + "8:10 PM", + "8:35 PM", + "9:00 PM", + "9:25 PM", + "9:50 PM", + "10:15 PM", + "10:40 PM", + "11:05 PM", + "11:30 PM", + "12:00 AM" + ], + "NORTH": [ + "7:00 AM", + "7:10 AM", + "7:20 AM", + "7:30 AM", + "7:40 AM", + "7:50 AM", + "8:00 AM", + "8:10 AM", + "8:20 AM", + "8:30 AM", + "8:40 AM", + "8:50 AM", + "9:00 AM", + "9:10 AM", + "9:20 AM", + "9:30 AM", + "9:40 AM", + "9:50 AM", + "10:00 AM", + "10:10 AM", + "10:20 AM", + "10:30 AM", + "10:40 AM", + "10:50 AM", + "11:00 AM", + "11:10 AM", + "11:20 AM", + "11:30 AM", + "11:40 AM", + "11:50 AM", + "12:00 PM", + "12:10 PM", + "12:20 PM", + "12:30 PM", + "12:40 PM", + "12:50 PM", + "1:00 PM", + "1:10 PM", + "1:20 PM", + "1:30 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:10 PM", + "2:20 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "3:00 PM", + "3:10 PM", + "3:20 PM", + "3:30 PM", + "3:40 PM", + "3:50 PM", + "4:00 PM", + "4:20 PM", + "4:40 PM", + "5:00 PM", + "5:20 PM", + "5:40 PM", + "6:00 PM", + "6:20 PM", + "6:40 PM", + "7:30 PM", + "7:50 PM", + "8:10 PM", + "8:30 PM", + "8:50 PM", + "9:10 PM", + "9:30 PM", + "9:50 PM", + "10:10 PM", + "10:30 PM", + "10:50 PM", + "11:10 PM", + "11:30 PM", + "12:00 AM" + ] + }, + { + "WEST": [ + "7:00 AM", + "7:10 AM", + "7:25 AM", + "7:35 AM", + "7:50 AM", + "8:00 AM", + "8:15 AM", + "8:25 AM", + "8:40 AM", + "8:50 AM", + "9:05 AM", + "9:15 AM", + "9:30 AM", + "9:35 AM", + "9:40 AM", + "9:55 AM", + "10:00 AM", + "10:05 AM", + "10:20 AM", + "10:25 AM", + "10:30 AM", + "10:45 AM", + "10:50 AM", + "10:55 AM", + "11:10 AM", + "11:20 AM", + "11:35 AM", + "11:45 AM", + "12:00 PM", + "12:10 PM", + "12:25 PM", + "12:35 PM", + "12:50 PM", + "1:00 PM", + "1:15 PM", + "1:25 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:05 PM", + "2:15 PM", + "2:25 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "2:55 PM", + "3:05 PM", + "3:15 PM", + "3:20 PM", + "3:30 PM", + "3:40 PM", + "3:45 PM", + "3:55 PM", + "4:00 PM", + "4:15 PM", + "4:25 PM", + "4:40 PM", + "4:50 PM", + "5:05 PM", + "5:15 PM", + "5:30 PM", + "5:40 PM", + "5:55 PM", + "6:05 PM", + "6:20 PM", + "6:30 PM", + "7:20 PM", + "7:45 PM", + "8:10 PM", + "8:35 PM", + "9:00 PM", + "9:25 PM", + "9:50 PM", + "10:15 PM", + "10:40 PM", + "11:05 PM", + "11:30 PM", + "12:00 AM" + ], + "NORTH": [ + "7:00 AM", + "7:10 AM", + "7:20 AM", + "7:30 AM", + "7:40 AM", + "7:50 AM", + "8:00 AM", + "8:10 AM", + "8:20 AM", + "8:30 AM", + "8:40 AM", + "8:50 AM", + "9:00 AM", + "9:10 AM", + "9:20 AM", + "9:30 AM", + "9:40 AM", + "9:50 AM", + "10:00 AM", + "10:10 AM", + "10:20 AM", + "10:30 AM", + "10:40 AM", + "10:50 AM", + "11:00 AM", + "11:10 AM", + "11:20 AM", + "11:30 AM", + "11:40 AM", + "11:50 AM", + "12:00 PM", + "12:10 PM", + "12:20 PM", + "12:30 PM", + "12:40 PM", + "12:50 PM", + "1:00 PM", + "1:10 PM", + "1:20 PM", + "1:30 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:10 PM", + "2:20 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "3:00 PM", + "3:10 PM", + "3:20 PM", + "3:30 PM", + "3:40 PM", + "3:50 PM", + "4:00 PM", + "4:20 PM", + "4:40 PM", + "5:00 PM", + "5:20 PM", + "5:40 PM", + "6:00 PM", + "6:20 PM", + "6:40 PM", + "7:30 PM", + "7:50 PM", + "8:10 PM", + "8:30 PM", + "8:50 PM", + "9:10 PM", + "9:30 PM", + "9:50 PM", + "10:10 PM", + "10:30 PM", + "10:50 PM", + "11:10 PM", + "11:30 PM", + "12:00 AM" + ] + }, + { + "WEST": [ + "7:00 AM", + "7:10 AM", + "7:25 AM", + "7:35 AM", + "7:50 AM", + "8:00 AM", + "8:15 AM", + "8:25 AM", + "8:40 AM", + "8:50 AM", + "9:05 AM", + "9:15 AM", + "9:30 AM", + "9:35 AM", + "9:40 AM", + "9:55 AM", + "10:00 AM", + "10:05 AM", + "10:20 AM", + "10:25 AM", + "10:30 AM", + "10:45 AM", + "10:50 AM", + "10:55 AM", + "11:10 AM", + "11:20 AM", + "11:35 AM", + "11:45 AM", + "12:00 PM", + "12:10 PM", + "12:25 PM", + "12:35 PM", + "12:50 PM", + "1:00 PM", + "1:15 PM", + "1:25 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:05 PM", + "2:15 PM", + "2:25 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "2:55 PM", + "3:05 PM", + "3:15 PM", + "3:20 PM", + "3:30 PM", + "3:40 PM", + "3:45 PM", + "3:55 PM", + "4:00 PM", + "4:15 PM", + "4:25 PM", + "4:40 PM", + "4:50 PM", + "5:05 PM", + "5:15 PM", + "5:30 PM", + "5:40 PM", + "5:55 PM", + "6:05 PM", + "6:20 PM", + "6:30 PM", + "7:20 PM", + "7:45 PM", + "8:10 PM", + "8:35 PM", + "9:00 PM", + "9:25 PM", + "9:50 PM", + "10:15 PM", + "10:40 PM", + "11:05 PM", + "11:30 PM", + "12:00 AM" + ], + "NORTH": [ + "7:00 AM", + "7:10 AM", + "7:20 AM", + "7:30 AM", + "7:40 AM", + "7:50 AM", + "8:00 AM", + "8:10 AM", + "8:20 AM", + "8:30 AM", + "8:40 AM", + "8:50 AM", + "9:00 AM", + "9:10 AM", + "9:20 AM", + "9:30 AM", + "9:40 AM", + "9:50 AM", + "10:00 AM", + "10:10 AM", + "10:20 AM", + "10:30 AM", + "10:40 AM", + "10:50 AM", + "11:00 AM", + "11:10 AM", + "11:20 AM", + "11:30 AM", + "11:40 AM", + "11:50 AM", + "12:00 PM", + "12:10 PM", + "12:20 PM", + "12:30 PM", + "12:40 PM", + "12:50 PM", + "1:00 PM", + "1:10 PM", + "1:20 PM", + "1:30 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:10 PM", + "2:20 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "3:00 PM", + "3:10 PM", + "3:20 PM", + "3:30 PM", + "3:40 PM", + "3:50 PM", + "4:00 PM", + "4:20 PM", + "4:40 PM", + "5:00 PM", + "5:20 PM", + "5:40 PM", + "6:00 PM", + "6:20 PM", + "6:40 PM", + "7:30 PM", + "7:50 PM", + "8:10 PM", + "8:30 PM", + "8:50 PM", + "9:10 PM", + "9:30 PM", + "9:50 PM", + "10:10 PM", + "10:30 PM", + "10:50 PM", + "11:10 PM", + "11:30 PM", + "12:00 AM" + ] + }, + { + "WEST": [ + "7:00 AM", + "7:10 AM", + "7:25 AM", + "7:35 AM", + "7:50 AM", + "8:00 AM", + "8:15 AM", + "8:25 AM", + "8:40 AM", + "8:50 AM", + "9:05 AM", + "9:15 AM", + "9:30 AM", + "9:35 AM", + "9:40 AM", + "9:55 AM", + "10:00 AM", + "10:05 AM", + "10:20 AM", + "10:25 AM", + "10:30 AM", + "10:45 AM", + "10:50 AM", + "10:55 AM", + "11:10 AM", + "11:20 AM", + "11:35 AM", + "11:45 AM", + "12:00 PM", + "12:10 PM", + "12:25 PM", + "12:35 PM", + "12:50 PM", + "1:00 PM", + "1:15 PM", + "1:25 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:05 PM", + "2:15 PM", + "2:25 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "2:55 PM", + "3:05 PM", + "3:15 PM", + "3:20 PM", + "3:30 PM", + "3:40 PM", + "3:45 PM", + "3:55 PM", + "4:00 PM", + "4:15 PM", + "4:25 PM", + "4:40 PM", + "4:50 PM", + "5:05 PM", + "5:15 PM", + "5:30 PM", + "5:40 PM", + "5:55 PM", + "6:05 PM", + "6:20 PM", + "6:30 PM", + "7:20 PM", + "7:45 PM", + "8:10 PM", + "8:35 PM", + "9:00 PM", + "9:25 PM", + "9:50 PM", + "10:15 PM", + "10:40 PM", + "11:05 PM", + "11:30 PM", + "12:00 AM" + ], + "NORTH": [ + "7:00 AM", + "7:10 AM", + "7:20 AM", + "7:30 AM", + "7:40 AM", + "7:50 AM", + "8:00 AM", + "8:10 AM", + "8:20 AM", + "8:30 AM", + "8:40 AM", + "8:50 AM", + "9:00 AM", + "9:10 AM", + "9:20 AM", + "9:30 AM", + "9:40 AM", + "9:50 AM", + "10:00 AM", + "10:10 AM", + "10:20 AM", + "10:30 AM", + "10:40 AM", + "10:50 AM", + "11:00 AM", + "11:10 AM", + "11:20 AM", + "11:30 AM", + "11:40 AM", + "11:50 AM", + "12:00 PM", + "12:10 PM", + "12:20 PM", + "12:30 PM", + "12:40 PM", + "12:50 PM", + "1:00 PM", + "1:10 PM", + "1:20 PM", + "1:30 PM", + "1:40 PM", + "1:50 PM", + "2:00 PM", + "2:10 PM", + "2:20 PM", + "2:30 PM", + "2:40 PM", + "2:50 PM", + "3:00 PM", + "3:10 PM", + "3:20 PM", + "3:30 PM", + "3:40 PM", + "3:50 PM", + "4:00 PM", + "4:20 PM", + "4:40 PM", + "5:00 PM", + "5:20 PM", + "5:40 PM", + "6:00 PM", + "6:20 PM", + "6:40 PM", + "7:30 PM", + "7:50 PM", + "8:10 PM", + "8:30 PM", + "8:50 PM", + "9:10 PM", + "9:30 PM", + "9:50 PM", + "10:10 PM", + "10:30 PM", + "10:50 PM", + "11:10 PM", + "11:30 PM", + "12:00 AM" + ] + }, + { + "NORTH": [ + "9:00 AM", + "9:20 AM", + "9:40 AM", + "10:00 AM", + "10:20 AM", + "10:40 AM", + "11:00 AM", + "11:20 AM", + "11:40 AM", + "12:00 PM", + "1:30 PM", + "1:50 PM", + "2:10 PM", + "2:30 PM", + "2:50 PM", + "3:10 PM", + "3:30 PM", + "3:50 PM", + "4:10 PM", + "4:30 PM", + "4:50 PM", + "5:00 PM", + "5:20 PM", + "5:40 PM", + "6:00 PM", + "6:20 PM", + "6:40 PM", + "7:00 PM", + "7:20 PM", + "7:40 PM", + "8:40 PM", + "9:00 PM", + "9:10 PM", + "9:30 PM", + "9:50 PM", + "10:10 PM", + "10:30 PM", + "10:50 PM", + "11:10 PM", + "11:30 PM", + "11:45 PM", + "12:00 AM" + ], + "WEST": [ + "9:00 AM", + "9:20 AM", + "9:40 AM", + "10:00 AM", + "10:20 AM", + "10:40 AM", + "11:00 AM", + "11:20 AM", + "11:40 AM", + "12:00 PM", + "1:30 PM", + "1:50 PM", + "2:10 PM", + "2:30 PM", + "2:50 PM", + "3:10 PM", + "3:30 PM", + "3:50 PM", + "4:10 PM", + "4:30 PM", + "4:50 PM", + "5:00 PM", + "5:20 PM", + "5:40 PM", + "6:00 PM", + "6:20 PM", + "6:40 PM", + "7:00 PM", + "7:20 PM", + "7:40 PM", + "8:40 PM", + "9:00 PM", + "9:10 PM", + "9:30 PM", + "9:50 PM", + "10:10 PM", + "10:30 PM", + "10:50 PM", + "11:10 PM", + "11:30 PM", + "11:45 PM", + "12:00 AM" + ] + } +] \ No newline at end of file diff --git a/eslint.config.js b/frontend/eslint.config.js similarity index 94% rename from eslint.config.js rename to frontend/eslint.config.js index 09250237..804588d3 100644 --- a/eslint.config.js +++ b/frontend/eslint.config.js @@ -8,7 +8,7 @@ import { defineConfig, globalIgnores } from 'eslint/config' export default defineConfig([ globalIgnores(['dist']), { - files: ['client/**/*.{ts,tsx}'], + files: ['src/**/*.{ts,tsx}'], extends: [ js.configs.recommended, tseslint.configs.recommended, diff --git a/client/index.html b/frontend/index.html similarity index 100% rename from client/index.html rename to frontend/index.html diff --git a/package-lock.json b/frontend/package-lock.json similarity index 100% rename from package-lock.json rename to frontend/package-lock.json diff --git a/package.json b/frontend/package.json similarity index 82% rename from package.json rename to frontend/package.json index 9131bf02..16f17473 100644 --- a/package.json +++ b/frontend/package.json @@ -4,8 +4,8 @@ "version": "0.0.0", "type": "module", "scripts": { - "dev": "node data/parseSchedule.js && shx cp -r data/ client/src/ && vite", - "build": "node data/parseSchedule.js && shx cp -r data/ client/src/ && vite build", + "dev": "node ../shared/parseSchedule.js && shx cp -r ../shared/ src/ && vite", + "build": "node ../shared/parseSchedule.js && shx cp -r ../shared/ src/ && vite build", "lint": "eslint .", "preview": "vite preview" }, diff --git a/client/public/RPIStudentUnionv2.png b/frontend/public/RPIStudentUnionv2.png similarity index 100% rename from client/public/RPIStudentUnionv2.png rename to frontend/public/RPIStudentUnionv2.png diff --git a/client/public/favicon.ico b/frontend/public/favicon.ico similarity index 100% rename from client/public/favicon.ico rename to frontend/public/favicon.ico diff --git a/client/public/map-marker.png b/frontend/public/map-marker.png similarity index 100% rename from client/public/map-marker.png rename to frontend/public/map-marker.png diff --git a/client/public/privacy-policy.txt b/frontend/public/privacy-policy.txt similarity index 100% rename from client/public/privacy-policy.txt rename to frontend/public/privacy-policy.txt diff --git a/client/public/robots.txt b/frontend/public/robots.txt similarity index 100% rename from client/public/robots.txt rename to frontend/public/robots.txt diff --git a/client/public/shubble192.png b/frontend/public/shubble192.png similarity index 100% rename from client/public/shubble192.png rename to frontend/public/shubble192.png diff --git a/client/public/shubble20.png b/frontend/public/shubble20.png similarity index 100% rename from client/public/shubble20.png rename to frontend/public/shubble20.png diff --git a/client/public/shubble2048.png b/frontend/public/shubble2048.png similarity index 100% rename from client/public/shubble2048.png rename to frontend/public/shubble2048.png diff --git a/client/public/shubble40.png b/frontend/public/shubble40.png similarity index 100% rename from client/public/shubble40.png rename to frontend/public/shubble40.png diff --git a/client/public/shubble512.png b/frontend/public/shubble512.png similarity index 100% rename from client/public/shubble512.png rename to frontend/public/shubble512.png diff --git a/client/public/shubble_basic.svg b/frontend/public/shubble_basic.svg similarity index 100% rename from client/public/shubble_basic.svg rename to frontend/public/shubble_basic.svg diff --git a/frontend/src/.gitignore b/frontend/src/.gitignore new file mode 100644 index 00000000..d1d584b3 --- /dev/null +++ b/frontend/src/.gitignore @@ -0,0 +1 @@ +shared/ diff --git a/client/src/App.css b/frontend/src/App.css similarity index 100% rename from client/src/App.css rename to frontend/src/App.css diff --git a/client/src/App.tsx b/frontend/src/App.tsx similarity index 84% rename from client/src/App.tsx rename to frontend/src/App.tsx index 93b1e708..76dace0e 100644 --- a/client/src/App.tsx +++ b/frontend/src/App.tsx @@ -4,17 +4,17 @@ import { Route, } from 'react-router'; import './App.css'; -import LiveLocation from './pages/LiveLocation'; -import Schedule from './components/Schedule'; -import About from './pages/About'; -import Data from './pages/Data'; -import MapKitMap from './components/MapKitMap'; -import rawRouteData from './data/routes.json'; +import LiveLocation from './locations/LiveLocation'; +import Schedule from './schedule/Schedule'; +import About from './about/About'; +import Data from './dashboard/Dashboard'; +import MapKitMap from './locations/components/MapKitMap'; +import rawRouteData from './shared/routes.json'; import { useState, useEffect } from "react"; -import type { ShuttleRouteData } from './ts/types/route'; +import type { ShuttleRouteData } from './types/route'; import Navigation from './components/Navigation'; import ErrorBoundary from './components/ErrorBoundary'; -import config from "./ts/config"; +import config from "./utils/config"; function App() { const [selectedRoute, setSelectedRoute] = useState(null); diff --git a/client/src/pages/About.tsx b/frontend/src/about/About.tsx similarity index 97% rename from client/src/pages/About.tsx rename to frontend/src/about/About.tsx index 48f38b3c..6f9e6a0c 100644 --- a/client/src/pages/About.tsx +++ b/frontend/src/about/About.tsx @@ -1,4 +1,4 @@ -import '../styles/About.css'; +import './styles/About.css'; import { useState, useEffect, @@ -30,7 +30,7 @@ export default function About() {

Track RPI shuttles in real time and view schedules seamlessly with Shubble

Shubble is an open source project under the Rensselaer Center for Open Source (RCOS).
- Have an idea to improve it? Contributions are welcome!
+ Have an idea to improve it? Contributions are welcome!
Visit our Github Repository to learn more.
Interested in Shubble's data? Take a look at our diff --git a/client/src/styles/About.css b/frontend/src/about/styles/About.css similarity index 100% rename from client/src/styles/About.css rename to frontend/src/about/styles/About.css diff --git a/client/src/components/AnnouncementBanner.tsx b/frontend/src/components/AnnouncementBanner.tsx similarity index 95% rename from client/src/components/AnnouncementBanner.tsx rename to frontend/src/components/AnnouncementBanner.tsx index 9bbb1186..98046d62 100644 --- a/client/src/components/AnnouncementBanner.tsx +++ b/frontend/src/components/AnnouncementBanner.tsx @@ -1,6 +1,6 @@ -import '../styles/AnnouncementBanner.css'; -import announcementsData from '../data/announcements.json'; -import type { Announcement, AnnouncementsData } from '../ts/types/announcement'; +import './styles/AnnouncementBanner.css'; +import announcementsData from '../shared/announcements.json'; +import type { Announcement, AnnouncementsData } from '../types/announcement'; import type { ReactNode } from 'react'; type BannerType = 'info' | 'warning' | 'error'; diff --git a/client/src/components/ErrorBoundary.tsx b/frontend/src/components/ErrorBoundary.tsx similarity index 100% rename from client/src/components/ErrorBoundary.tsx rename to frontend/src/components/ErrorBoundary.tsx diff --git a/client/src/components/Feedback.tsx b/frontend/src/components/Feedback.tsx similarity index 95% rename from client/src/components/Feedback.tsx rename to frontend/src/components/Feedback.tsx index 5a956804..81014311 100644 --- a/client/src/components/Feedback.tsx +++ b/frontend/src/components/Feedback.tsx @@ -1,4 +1,4 @@ -import '../styles/Feedback.css'; +import './styles/Feedback.css'; export default function Feedback() { return ( diff --git a/client/src/components/Navigation.tsx b/frontend/src/components/Navigation.tsx similarity index 99% rename from client/src/components/Navigation.tsx rename to frontend/src/components/Navigation.tsx index b52d8b36..f2ae8688 100644 --- a/client/src/components/Navigation.tsx +++ b/frontend/src/components/Navigation.tsx @@ -1,7 +1,7 @@ import { Link, Outlet } from "react-router"; import Feedback from "./Feedback"; import AnnouncementBanner, { Banner } from "./AnnouncementBanner"; -import config from "../ts/config"; +import config from "../utils/config"; export default function Navigation({ GIT_REV }: { GIT_REV: string }) { // Build staging warning message with markdown link diff --git a/client/src/styles/AnnouncementBanner.css b/frontend/src/components/styles/AnnouncementBanner.css similarity index 100% rename from client/src/styles/AnnouncementBanner.css rename to frontend/src/components/styles/AnnouncementBanner.css diff --git a/client/src/styles/Feedback.css b/frontend/src/components/styles/Feedback.css similarity index 100% rename from client/src/styles/Feedback.css rename to frontend/src/components/styles/Feedback.css diff --git a/client/src/pages/Data.tsx b/frontend/src/dashboard/Dashboard.tsx similarity index 90% rename from client/src/pages/Data.tsx rename to frontend/src/dashboard/Dashboard.tsx index 9a0b1cad..4f55cd6e 100644 --- a/client/src/pages/Data.tsx +++ b/frontend/src/dashboard/Dashboard.tsx @@ -2,10 +2,11 @@ import { useState, useEffect, } from 'react'; -import "../styles/Data.css" -import DataBoard from '../components/DataBoard'; -import ShuttleRow from '../components/ShuttleRow'; -import type { VehicleInformationMap } from '../ts/types/vehicleLocation'; +import "./styles/Dashboard.css" +import DataBoard from './components/DataBoard'; +import ShuttleRow from './components/ShuttleRow'; +import type { VehicleInformationMap } from '../types/vehicleLocation'; +import config from '../utils/config'; export default function Data() { @@ -14,7 +15,7 @@ export default function Data() { const fetchShuttleData = async () => { try { - const response = await fetch('/api/today'); + const response = await fetch(`${config.apiBaseUrl}/api/today`); if (!response.ok) { throw new Error('Network response was not ok'); } diff --git a/client/src/components/DataBoard.tsx b/frontend/src/dashboard/components/DataBoard.tsx similarity index 100% rename from client/src/components/DataBoard.tsx rename to frontend/src/dashboard/components/DataBoard.tsx diff --git a/client/src/components/ShuttleRow.tsx b/frontend/src/dashboard/components/ShuttleRow.tsx similarity index 100% rename from client/src/components/ShuttleRow.tsx rename to frontend/src/dashboard/components/ShuttleRow.tsx diff --git a/client/src/components/StatusTag.tsx b/frontend/src/dashboard/components/StatusTag.tsx similarity index 100% rename from client/src/components/StatusTag.tsx rename to frontend/src/dashboard/components/StatusTag.tsx diff --git a/client/src/components/TimeTag.tsx b/frontend/src/dashboard/components/TimeTag.tsx similarity index 100% rename from client/src/components/TimeTag.tsx rename to frontend/src/dashboard/components/TimeTag.tsx diff --git a/client/src/styles/Data.css b/frontend/src/dashboard/styles/Dashboard.css similarity index 100% rename from client/src/styles/Data.css rename to frontend/src/dashboard/styles/Dashboard.css diff --git a/client/src/styles/DataBoard.css b/frontend/src/dashboard/styles/DataBoard.css similarity index 100% rename from client/src/styles/DataBoard.css rename to frontend/src/dashboard/styles/DataBoard.css diff --git a/client/src/styles/ShuttleRow.css b/frontend/src/dashboard/styles/ShuttleRow.css similarity index 100% rename from client/src/styles/ShuttleRow.css rename to frontend/src/dashboard/styles/ShuttleRow.css diff --git a/client/src/styles/StatusTag.css b/frontend/src/dashboard/styles/StatusTag.css similarity index 100% rename from client/src/styles/StatusTag.css rename to frontend/src/dashboard/styles/StatusTag.css diff --git a/client/src/styles/TimeTag.css b/frontend/src/dashboard/styles/TimeTag.css similarity index 100% rename from client/src/styles/TimeTag.css rename to frontend/src/dashboard/styles/TimeTag.css diff --git a/client/src/globals.d.ts b/frontend/src/globals.d.ts similarity index 100% rename from client/src/globals.d.ts rename to frontend/src/globals.d.ts diff --git a/client/src/index.css b/frontend/src/index.css similarity index 100% rename from client/src/index.css rename to frontend/src/index.css diff --git a/client/src/pages/LiveLocation.tsx b/frontend/src/locations/LiveLocation.tsx similarity index 77% rename from client/src/pages/LiveLocation.tsx rename to frontend/src/locations/LiveLocation.tsx index 1a08550a..4110d937 100644 --- a/client/src/pages/LiveLocation.tsx +++ b/frontend/src/locations/LiveLocation.tsx @@ -3,12 +3,12 @@ import { useEffect, } from 'react'; -import MapKitMap from '../components/MapKitMap'; -import Schedule from '../components/Schedule'; -import "../styles/LiveLocation.css"; -import routeData from '../data/routes.json'; -import type { ShuttleRouteData } from '../ts/types/route'; -import aggregatedSchedule from '../data/aggregated_schedule.json'; +import MapKitMap from './components/MapKitMap'; +import Schedule from '../schedule/Schedule'; +import "./styles/LiveLocation.css"; +import routeData from '../shared/routes.json'; +import type { ShuttleRouteData } from '../types/route'; +import aggregatedSchedule from '../shared/aggregated_schedule.json'; export default function LiveLocation() { const [filteredRouteData, setFilteredRouteData] = useState(null); diff --git a/frontend/src/locations/MapKitMap.tsx b/frontend/src/locations/MapKitMap.tsx new file mode 100644 index 00000000..090fadd0 --- /dev/null +++ b/frontend/src/locations/MapKitMap.tsx @@ -0,0 +1,736 @@ +import { useEffect, useRef, useState, useMemo } from "react"; +import { renderToStaticMarkup } from "react-dom/server"; +import './Locations.css'; +import ShuttleIcon from "./ShuttleIcon"; +import config from "../components/config"; + +import type { ShuttleRouteData, ShuttleStopData } from "../types/route"; +import type { VehicleInformationMap } from "../types/vehicleLocation"; + +import { + type Coordinate, + findNearestPointOnPolyline, + moveAlongPolyline, + calculateDistanceAlongPolyline, + calculateBearing, + getAngleDifference +} from "../components/mapUtils"; + +async function generateRoutePolylines(updatedRouteData: ShuttleRouteData) { + // Use MapKit Directions API to generate polylines for each route segment + const directions = new mapkit.Directions(); + + for (const [routeName, routeInfo] of Object.entries(updatedRouteData)) { + const polyStops = routeInfo.POLYLINE_STOPS || []; + const realStops = routeInfo.STOPS || []; + + // Initialize ROUTES with empty arrays for each real stop segment + routeInfo.ROUTES = Array(realStops.length - 1).fill(null).map(() => []); + + // Index of the current real stop segment we are populating + // polyStops may include intermediate points between real stops + let currentRealIndex = 0; + + for (let i = 0; i < polyStops.length - 1; i++) { + // Get origin and destination stops + const originStop = polyStops[i]; + const destStop = polyStops[i + 1]; + const originCoords = (routeInfo[originStop] as ShuttleStopData)?.COORDINATES; + const destCoords = (routeInfo[destStop] as ShuttleStopData)?.COORDINATES; + if (!originCoords || !destCoords) continue; + + // Fetch segment polyline + const segment = await new Promise((resolve) => { + directions.route( + { + origin: new mapkit.Coordinate(originCoords[0], originCoords[1]), + destination: new mapkit.Coordinate(destCoords[0], destCoords[1]), + }, + (error, data) => { + if (error) { + console.error(`Directions error for ${routeName} segment ${originStop}→${destStop}:`, error); + resolve([]); + return; + } + try { + const coords = data.routes[0].polyline.points.map(pt => [pt.latitude, pt.longitude]); + resolve(coords as [number, number][]); + } catch (e) { + console.error(`Unexpected response parsing for ${routeName} segment ${originStop}→${destStop}:`, e); + resolve([]); + } + } + ); + }) as [number, number][]; + + // Add to the current real stop segment + if (segment.length > 0) { + if (routeInfo.ROUTES[currentRealIndex].length === 0) { + // first segment for this route piece + routeInfo.ROUTES[currentRealIndex].push(...segment); + } else { + // append, avoiding duplicate join point + routeInfo.ROUTES[currentRealIndex].push(...segment.slice(1)); + } + } + + // If the destStop is the next real stop, move to the next ROUTES index + if (destStop === realStops[currentRealIndex + 1]) { + currentRealIndex++; + } + } + } + + // Trigger download + function downloadJSON(data: ShuttleRouteData, filename = 'routeData.json') { + const jsonStr = JSON.stringify(data, null, 2); + const blob = new Blob([jsonStr], { type: 'application/json' }); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = filename; + document.body.appendChild(a); + a.click(); + setTimeout(() => { + document.body.removeChild(a); + URL.revokeObjectURL(url); + }, 100); + } + + downloadJSON(updatedRouteData); + return updatedRouteData; +} + +type MapKitMapProps = { + routeData: ShuttleRouteData | null; + displayVehicles?: boolean; + generateRoutes?: boolean; + selectedRoute?: string | null; + setSelectedRoute?: (route: string | null) => void; + isFullscreen?: boolean; +}; + +// @ts-expect-error selectedRoutes is never used +// eslint-disable-next-line @typescript-eslint/no-unused-vars +export default function MapKitMap({ routeData, displayVehicles = true, generateRoutes = false, selectedRoute, setSelectedRoute, isFullscreen = false }: MapKitMapProps) { + const mapRef = useRef(null); + const [mapLoaded, setMapLoaded] = useState(false); + const token = import.meta.env.VITE_MAPKIT_KEY; + const [map, setMap] = useState<(mapkit.Map | null)>(null); + const [vehicles, setVehicles] = useState(null); + + const vehicleOverlays = useRef>({}); + const vehicleAnimationStates = useRef>({}); + const animationFrameId = useRef(null); + + + const circleWidth = 15; + const selectedMarkerRef = useRef(null); + const overlays: mapkit.Overlay[] = []; + + // source: https://developer.apple.com/documentation/mapkitjs/loading-the-latest-version-of-mapkit-js + const setupMapKitJs = async () => { + if (!window.mapkit || window.mapkit.loadedLibraries.length === 0) { + await new Promise(resolve => { window.initMapKit = () => resolve(null); }); + delete window.initMapKit; + } + }; + + useEffect(() => { + // initialize mapkit + const mapkitScript = async () => { + // load the MapKit JS library + await setupMapKitJs(); + mapkit.init({ + authorizationCallback: (done) => { + done(token); + }, + }); + setMapLoaded(true); + }; + mapkitScript(); + }, []); + + // Fetch location data on component mount and set up polling + useEffect(() => { + if (!displayVehicles) return; + + const pollLocation = async () => { + try { + const response = await fetch(`${config.apiBaseUrl}/api/locations`); + if (!response.ok) { + throw new Error('Network response was not ok'); + } + const data = await response.json(); + setVehicles(data); + } catch (error) { + console.error('Error fetching location:', error); + } + } + + pollLocation(); + + // refresh location every 5 seconds + const refreshLocation = setInterval(pollLocation, 5000); + + return () => { + clearInterval(refreshLocation); + } + + }, []); + + // create the map + useEffect(() => { + if (mapLoaded) { + + // center on RPI + const center = new mapkit.Coordinate(42.730216, -73.675690); + const span = new mapkit.CoordinateSpan(0.02, 0.005); + const region = new mapkit.CoordinateRegion(center, span); + + const mapOptions = { + center: center, + region: region, + isScrollEnabled: true, + isZoomEnabled: true, + showsZoomControl: true, + isRotationEnabled: false, + showsPointsOfInterest: false, + showsUserLocation: true, + }; + + // create the map + const thisMap = new mapkit.Map(mapRef.current!, mapOptions); + // set zoom and boundary limits + thisMap.setCameraZoomRangeAnimated( + new mapkit.CameraZoomRange(200, 3000), + false, + ); + thisMap.setCameraBoundaryAnimated( + new mapkit.CoordinateRegion( + center, + new mapkit.CoordinateSpan(0.02, 0.025) + ), + false, + ); + thisMap.setCameraDistanceAnimated(2500); + // Helper function to create and add stop marker + const createStopMarker = (overlay: mapkit.CircleOverlay) => { + if (selectedMarkerRef.current) { + thisMap.removeAnnotation(selectedMarkerRef.current); + selectedMarkerRef.current = null; + } + const marker = new mapkit.MarkerAnnotation(overlay.coordinate, { + title: overlay.stopName, + glyphImage: { 1: "map-marker.png" }, + }); + thisMap.addAnnotation(marker); + selectedMarkerRef.current = marker; + return marker; + }; + + thisMap.addEventListener("select", (e) => { + if (!e.overlay) return; + if (!(e.overlay instanceof mapkit.CircleOverlay)) return; + + // Only change schedule selection on desktop-sized screens + const isDesktop = window.matchMedia('(min-width: 800px)').matches; + + if (e.overlay.stopKey) { + // Create marker for both mobile and desktop + createStopMarker(e.overlay); + + if (isDesktop) { + // Desktop: handle schedule change + const routeKey = e.overlay.routeKey; + if (setSelectedRoute && routeKey) setSelectedRoute(routeKey); + } + } + }); + thisMap.addEventListener("deselect", () => { + // remove any selected stop/marker annotation on when deselected + if (selectedMarkerRef.current) { + thisMap.removeAnnotation(selectedMarkerRef.current); + selectedMarkerRef.current = null; + } + }); + + thisMap.addEventListener("region-change-start", () => { + (thisMap.element as HTMLElement).style.cursor = "grab"; + }); + + thisMap.addEventListener("region-change-end", () => { + (thisMap.element as HTMLElement).style.cursor = "default"; + }); + + // Working hover detection + let currentHover: mapkit.CircleOverlay | null = null; + thisMap.element.addEventListener('mousemove', (e) => { + const rect = thisMap.element.getBoundingClientRect(); + const x = (e as MouseEvent).clientX - rect.left; + const y = (e as MouseEvent).clientY - rect.top; + + let foundOverlay: mapkit.CircleOverlay | null = null; + + // Check overlays for mouse position + for (const overlay of thisMap.overlays) { + if (!(overlay instanceof mapkit.CircleOverlay)) continue; + if (overlay.stopKey) { + // Calculate overlay screen position + const mapRect = thisMap.element.getBoundingClientRect(); + const centerLat = overlay.coordinate.latitude; + const centerLng = overlay.coordinate.longitude; + + // Check if mouse is within overlay radius + const region = thisMap.region; + if (region) { + const centerX = mapRect.width * (centerLng - region.center.longitude + region.span.longitudeDelta / 2) / region.span.longitudeDelta; + const centerY = mapRect.height * (region.center.latitude - centerLat + region.span.latitudeDelta / 2) / region.span.latitudeDelta; + + const distance = Math.sqrt((x - centerX) ** 2 + (y - centerY) ** 2); + if (distance < circleWidth) { // Within hover radius + foundOverlay = overlay; + } + } + } + } + + if (foundOverlay !== currentHover) { + // Clear previous hover style + if (currentHover) { + currentHover.style = new mapkit.Style({ + strokeColor: '#000000', + fillColor: '#FFFFFF', + fillOpacity: 0.1, + lineWidth: 2, + }); + } + + // Apply hover style + if (foundOverlay) { + foundOverlay.style = new mapkit.Style({ + strokeColor: '#6699ff', + fillColor: '#a1c3ff', + fillOpacity: 0.3, + lineWidth: 2.5, + }); + (thisMap.element as HTMLElement).style.cursor = "pointer"; + } else { + (thisMap.element as HTMLElement).style.cursor = "default"; + } + + currentHover = foundOverlay; + } + }); + + // Store reference to cleanup function + thisMap._hoverCleanup = () => { + // thisMap.element.removeEventListener('mousemove', _); + }; + + setMap(thisMap); + } + + // Cleanup on component unmount + return () => { + if (map && map._hoverCleanup) { + map._hoverCleanup(); + } + }; + }, [mapLoaded]); + + // add fixed details to the map + // includes routes and stops + useEffect(() => { + if (!map || !routeData) return; + + + // display stop overlays + for (const [route, thisRouteData] of Object.entries(routeData)) { + for (const stopKey of thisRouteData.STOPS) { + const stopData = thisRouteData[stopKey] as ShuttleStopData; + const stopCoordinate = new mapkit.Coordinate(...(stopData.COORDINATES)); + // add stop overlay (circle) + const stopOverlay = new mapkit.CircleOverlay( + stopCoordinate, + circleWidth, + { + style: new mapkit.Style( + { + strokeColor: '#000000', + fillColor: '#FFFFFF', // White fill by default + fillOpacity: 0.1, + lineWidth: 2, + } + ) + } + ); + // attach exact identifiers so the select handler can update selection precisely + stopOverlay.routeKey = route; + stopOverlay.stopKey = stopKey; + stopOverlay.stopName = stopData.NAME; + // cast circle overlay to generic overlay for adding to map + overlays.push(stopOverlay as mapkit.Overlay); + } + } + + function displayRouteOverlays(routeData: ShuttleRouteData) { + // display route overlays + for (const [_route, thisRouteData] of Object.entries(routeData)) { + // for route (WEST, NORTH) + const routePolylines = thisRouteData.ROUTES?.map( + // for segment (STOP1 -> STOP2, STOP2 -> STOP3, ...) + (route) => { + const coords = route.map(([lat, lon]) => new mapkit.Coordinate(lat, lon)); + if (coords.length === 0) return null; + const polyline = new mapkit.PolylineOverlay(coords, { + // for coordinate ([lat, lon], ...) + style: new mapkit.Style({ + strokeColor: thisRouteData.COLOR, + lineWidth: 2 + }) + }); + return polyline; + } + ).filter(p => p !== null); + overlays.push(...routePolylines as mapkit.Overlay[]); + } + } + + if (generateRoutes) { + // generate polylines for routes + const routeDataCopy = JSON.parse(JSON.stringify(routeData)); // deep copy to avoid mutating original + generateRoutePolylines(routeDataCopy).then((updatedRouteData) => { + displayRouteOverlays(updatedRouteData); + map.addOverlays(overlays); + }); + } else { + // use pre-generated polylines + displayRouteOverlays(routeData); + map.addOverlays(overlays); + } + + }, [map, routeData]); + + // Memoize flattened routes to avoid recalculating on every render + const flattenedRoutes = useMemo(() => { + if (!routeData) return {}; + const flattened: Record = {}; + + for (const [routeKey, data] of Object.entries(routeData)) { + if (data.ROUTES) { + // Flatten all route segments into one continuous polyline + const points: Coordinate[] = []; + data.ROUTES.forEach(segment => { + segment.forEach(pt => { + points.push({ latitude: pt[0], longitude: pt[1] }); + }); + }); + flattened[routeKey] = points; + } + } + return flattened; + }, [routeData]); + + // display vehicles on map + useEffect(() => { + if (!map || !vehicles) return; + + Object.keys(vehicles).forEach((key) => { + const vehicle = vehicles[key]; + const coordinate = new window.mapkit.Coordinate(vehicle.latitude, vehicle.longitude); + + const existingAnnotation = vehicleOverlays.current[key]; + + // Build SVG dynamically using ShuttleIcon component + const routeColor = (() => { + if (!routeData || !vehicle.route_name || vehicle.route_name === "UNCLEAR") { + return "#444444"; + } + const routeKey = vehicle.route_name as keyof typeof routeData; + const info = routeData[routeKey] as { COLOR?: string }; + return info.COLOR ?? "#444444"; + + })(); + + // Render ShuttleIcon JSX to a static SVG string + const svgString = renderToStaticMarkup(); + const svgShuttle = `data:image/svg+xml;base64,${btoa(svgString)}`; + + // --- Update or create annotation --- + if (existingAnnotation) { + // existing vehicle — update position and subtitle + // Only update coordinate directly if we don't have an animation state (to avoid flicker) + if (!vehicleAnimationStates.current[key]) { + existingAnnotation.coordinate = coordinate; + } + existingAnnotation.subtitle = `${vehicle.speed_mph.toFixed(1)} mph`; + + // Handle route status updates + // If shuttle does not have a route null + if (vehicle.route_name === null) { + // shuttle off-route (exiting) + if (existingAnnotation.lockedRoute) { + existingAnnotation.lockedRoute = null; + existingAnnotation.url = { 1: svgShuttle }; + } + } else if (vehicle.route_name !== "UNCLEAR" && vehicle.route_name !== existingAnnotation.lockedRoute) { + existingAnnotation.lockedRoute = vehicle.route_name; + existingAnnotation.url = { 1: svgShuttle }; + } + } else { + const annotationOptions = { + title: vehicle.name, + subtitle: `${vehicle.speed_mph.toFixed(1)} mph`, + url: { 1: svgShuttle }, + size: { width: 25, height: 25 }, + anchorOffset: new DOMPoint(0, -13), + }; + + // create shuttle object + const annotation = new window.mapkit.ImageAnnotation(coordinate, annotationOptions) as mapkit.ShuttleAnnotation; + + + // lock route if known + if (vehicle.route_name !== "UNCLEAR" && vehicle.route_name !== null) { + annotation.lockedRoute = vehicle.route_name; + } + + // add shuttle to map + map.addAnnotation(annotation); + vehicleOverlays.current[key] = annotation; + } + }); + + // --- Update Animation State for new/updated vehicles --- + const now = Date.now(); + Object.keys(vehicles).forEach((key) => { + const vehicle = vehicles[key]; + // If we don't have a route for this vehicle, we can't animate along a path nicely. + // We'll just rely on the API updates or maybe simple linear extrapolation later? + // For now, let's only set up animation if we have a valid route. + if (!vehicle.route_name || !flattenedRoutes[vehicle.route_name]) return; + + const routePolyline = flattenedRoutes[vehicle.route_name]; + const vehicleCoord = { latitude: vehicle.latitude, longitude: vehicle.longitude }; + + const serverTime = new Date(vehicle.timestamp).getTime(); + + // Check if we already have state + let animState = vehicleAnimationStates.current[key]; + + // If the server data hasn't changed (cached response), ignore this update + // and let the client-side prediction continue running. + if (animState && animState.lastServerTime === serverTime) { + return; + } + + const snapToPolyline = () => { + const { index, point } = findNearestPointOnPolyline(vehicleCoord, routePolyline); + vehicleAnimationStates.current[key] = { + lastUpdateTime: now, + polylineIndex: index, + currentPoint: point, + targetDistance: 0, + distanceTraveled: 0, + lastServerTime: serverTime + }; + }; + + if (!animState) { + snapToPolyline(); + } else { + // ======================================================================= + // PREDICTION SMOOTHING ALGORITHM + // ======================================================================= + // Problem: Server updates arrive every ~5 seconds, causing the shuttle + // to "jump" to its new position (rubberbanding). + // + // Solution: Instead of jumping, we calculate the speed needed for the + // shuttle to smoothly travel from its current visual position to where + // it *should* be when the next update arrives. + // + // Formula: speed = distance / time + // Where: + // - distance = gap between current visual position and predicted target + // - time = 5 seconds (the update interval) + // ======================================================================= + + const PREDICTION_WINDOW_SECONDS = 5; + + // Step 1: Find where the server says the shuttle is right now + const { index: serverIndex, point: serverPoint } = findNearestPointOnPolyline(vehicleCoord, routePolyline); + + // Step 2: Calculate where the shuttle will be in 5 seconds + // Convert speed from mph to meters/second (1 mph = 0.44704 m/s) + const speedMetersPerSecond = vehicle.speed_mph * 0.44704; + const projectedDistanceMeters = speedMetersPerSecond * PREDICTION_WINDOW_SECONDS; + + // Move along the route polyline by that distance to find the target point + const { index: targetIndex, point: targetPoint } = moveAlongPolyline( + routePolyline, + serverIndex, + serverPoint, + projectedDistanceMeters + ); + + // Step 3: Verify the shuttle is moving in the correct direction + // Compare the vehicle's GPS heading to the route segment bearing. + // If they differ by more than 90°, the shuttle may be going the wrong way. + let isMovingCorrectDirection = true; + if (routePolyline.length > serverIndex + 1 && vehicle.speed_mph > 1) { + const segmentStart = routePolyline[serverIndex]; + const segmentEnd = routePolyline[serverIndex + 1]; + const segmentBearing = calculateBearing(segmentStart, segmentEnd); + const headingDifference = getAngleDifference(segmentBearing, vehicle.heading_degrees); + + if (headingDifference > 90) { + isMovingCorrectDirection = false; + } + } + + // Step 4: Calculate distance from current visual position to target + const distanceToTarget = calculateDistanceAlongPolyline( + routePolyline, + animState.polylineIndex, + animState.currentPoint, + targetIndex, + targetPoint + ); + + // Step 5: Calculate the total distance to travel with easing + let targetDistanceMeters = distanceToTarget; + + // If moving wrong direction, stop the animation + if (!isMovingCorrectDirection) { + targetDistanceMeters = 0; + } + + // Step 6: Update animation state. + // If the gap is extremely large (>250m in either direction), snap to server position. + // For smaller backward gaps, animate smoothly backward to correct the overprediction. + const MAX_REASONABLE_GAP_METERS = 250; + if (Math.abs(distanceToTarget) > MAX_REASONABLE_GAP_METERS) { + snapToPolyline(); + } else { + // Allow negative targetDistance for smooth backward animation + // Reset the animation progress - we're starting a new prediction window + vehicleAnimationStates.current[key] = { + lastUpdateTime: now, + polylineIndex: animState.polylineIndex, + currentPoint: animState.currentPoint, + targetDistance: targetDistanceMeters, + distanceTraveled: 0, + lastServerTime: serverTime + }; + } + } + }); + + // --- Remove stale vehicles --- + const currentVehicleKeys = new Set(Object.keys(vehicles)); + Object.keys(vehicleOverlays.current).forEach((key) => { + if (!currentVehicleKeys.has(key)) { + map.removeAnnotation(vehicleOverlays.current[key]); + delete vehicleOverlays.current[key]; + } + }); + }, [map, vehicles, routeData]); + + + // --- Animation Loop --- + useEffect(() => { + // We use setTimeout/setInterval or requestAnimationFrame. The user "Considered" setTimeout. + // We will use requestAnimationFrame for smoothness, but structure it to calculate delta + // similar to how one might with setTimeout. + + let lastFrameTime = Date.now(); + + const animate = () => { + const now = Date.now(); + const dt = now - lastFrameTime; // ms + lastFrameTime = now; + + // Avoid huge jumps if tab was backgrounded + if (dt > 1000) { + animationFrameId.current = requestAnimationFrame(animate); + return; + } + + Object.keys(vehicleAnimationStates.current).forEach(key => { + const animState = vehicleAnimationStates.current[key]; + const vehicle = vehicles?.[key]; + const annotation = vehicleOverlays.current[key]; + + if (!vehicle || !annotation || !animState) return; + if (!vehicle.route_name || !flattenedRoutes[vehicle.route_name]) return; + + const routePolyline = flattenedRoutes[vehicle.route_name]; + + // ======================================================================= + // EASED ANIMATION + // ======================================================================= + // Instead of constant speed, we use an ease-in-out curve. + // This makes the shuttle accelerate at the start and decelerate at the end + // of each prediction window, creating smoother, more natural motion. + // ======================================================================= + + const PREDICTION_WINDOW_MS = 5000; + const timeElapsed = now - animState.lastUpdateTime; + + // Calculate progress through the prediction window (0.0 to 1.0) + const progress = Math.min(timeElapsed / PREDICTION_WINDOW_MS, 1.0); + + // Calculate how far along the target distance we should be (linear interpolation) + const targetPosition = animState.targetDistance * progress; + + // Calculate how much to move this frame (can be negative for backward movement) + const distanceToMove = targetPosition - animState.distanceTraveled; + + // Skip if no movement needed + if (distanceToMove === 0) return; + + // Move along polyline + const { index, point } = moveAlongPolyline( + routePolyline, + animState.polylineIndex, + animState.currentPoint, + distanceToMove + ); + + // Update state + animState.polylineIndex = index; + animState.currentPoint = point; + animState.distanceTraveled = targetPosition; + + // Update MapKit annotation + annotation.coordinate = new mapkit.Coordinate(point.latitude, point.longitude); + }); + + animationFrameId.current = requestAnimationFrame(animate); + }; + + animationFrameId.current = requestAnimationFrame(animate); + + return () => { + if (animationFrameId.current) cancelAnimationFrame(animationFrameId.current); + }; + }, [vehicles]); // Restart loop if vehicles change? Not strictly necessary if refs are used, but ensures we have latest `vehicles` closure if needed. Actually with refs we don't need to dependency on vehicles often if we read from ref, but here we read `vehicles` prop. + + + + return ( +

+
+ ); +}; diff --git a/client/src/components/DataAgeIndicator.tsx b/frontend/src/locations/components/DataAgeIndicator.tsx similarity index 100% rename from client/src/components/DataAgeIndicator.tsx rename to frontend/src/locations/components/DataAgeIndicator.tsx diff --git a/client/src/components/MapKitMap.tsx b/frontend/src/locations/components/MapKitMap.tsx similarity index 98% rename from client/src/components/MapKitMap.tsx rename to frontend/src/locations/components/MapKitMap.tsx index 866e0943..59d7877b 100644 --- a/client/src/components/MapKitMap.tsx +++ b/frontend/src/locations/components/MapKitMap.tsx @@ -2,9 +2,10 @@ import { useEffect, useRef, useState, useMemo } from "react"; import { renderToStaticMarkup } from "react-dom/server"; import '../styles/MapKitMap.css'; import ShuttleIcon from "./ShuttleIcon"; +import config from "../../utils/config"; -import type { ShuttleRouteData, ShuttleStopData } from "../ts/types/route"; -import type { VehicleInformationMap } from "../ts/types/vehicleLocation"; +import type { ShuttleRouteData, ShuttleStopData } from "../../types/route"; +import type { VehicleInformationMap } from "../../types/vehicleLocation"; import DataAgeIndicator from "./DataAgeIndicator"; import { @@ -14,7 +15,7 @@ import { calculateDistanceAlongPolyline, calculateBearing, getAngleDifference -} from "../ts/mapUtils"; +} from "../../utils/mapUtils"; // Helper function to remove consecutive duplicate points from a route function removeDuplicateConsecutivePoints(route: [number, number][]): [number, number][] { @@ -186,7 +187,7 @@ export default function MapKitMap({ routeData, displayVehicles = true, generateR const pollLocation = async () => { try { - const response = await fetch('/api/locations'); + const response = await fetch(`${config.apiBaseUrl}/api/locations`); if (!response.ok) { throw new Error('Network response was not ok'); } @@ -514,7 +515,7 @@ export default function MapKitMap({ routeData, displayVehicles = true, generateR existingAnnotation.subtitle = `${vehicle.speed_mph.toFixed(1)} mph`; // Handle route status updates - // If shuttle does not have a route null + // If shuttle does not have a route null if (vehicle.route_name === null) { // shuttle off-route (exiting) if (existingAnnotation.lockedRoute) { @@ -553,7 +554,7 @@ export default function MapKitMap({ routeData, displayVehicles = true, generateR const now = Date.now(); Object.keys(vehicles).forEach((key) => { const vehicle = vehicles[key]; - // If we don't have a route for this vehicle, we can't animate along a path nicely. + // If we don't have a route for this vehicle, we can't animate along a path nicely. // We'll just rely on the API updates or maybe simple linear extrapolation later? // For now, let's only set up animation if we have a valid route. if (!vehicle.route_name || !flattenedRoutes[vehicle.route_name]) return; diff --git a/client/src/components/ShuttleIcon.tsx b/frontend/src/locations/components/ShuttleIcon.tsx similarity index 100% rename from client/src/components/ShuttleIcon.tsx rename to frontend/src/locations/components/ShuttleIcon.tsx diff --git a/client/src/styles/DataAgeIndicator.css b/frontend/src/locations/styles/DataAgeIndicator.css similarity index 100% rename from client/src/styles/DataAgeIndicator.css rename to frontend/src/locations/styles/DataAgeIndicator.css diff --git a/client/src/styles/LiveLocation.css b/frontend/src/locations/styles/LiveLocation.css similarity index 100% rename from client/src/styles/LiveLocation.css rename to frontend/src/locations/styles/LiveLocation.css diff --git a/client/src/styles/MapKitMap.css b/frontend/src/locations/styles/MapKitMap.css similarity index 100% rename from client/src/styles/MapKitMap.css rename to frontend/src/locations/styles/MapKitMap.css diff --git a/client/src/main.tsx b/frontend/src/main.tsx similarity index 100% rename from client/src/main.tsx rename to frontend/src/main.tsx diff --git a/client/src/components/Schedule.tsx b/frontend/src/schedule/Schedule.tsx similarity index 94% rename from client/src/components/Schedule.tsx rename to frontend/src/schedule/Schedule.tsx index 328c6b36..562b09f1 100644 --- a/client/src/components/Schedule.tsx +++ b/frontend/src/schedule/Schedule.tsx @@ -1,10 +1,10 @@ import { useState, useEffect } from 'react'; -import '../styles/Schedule.css'; -import rawRouteData from '../data/routes.json'; -import rawAggregatedSchedule from '../data/aggregated_schedule.json'; -import type { AggregatedDaySchedule, AggregatedScheduleType} from '../ts/types/schedule'; -import type { ShuttleRouteData, ShuttleStopData } from '../ts/types/route'; -import {buildAllStops, findClosestStop, type Stop, type ClosestStop, } from '../ts/types/ClosestStop'; +import './styles/Schedule.css'; +import rawRouteData from '../shared/routes.json'; +import rawAggregatedSchedule from '../shared/aggregated_schedule.json'; +import type { AggregatedDaySchedule, AggregatedScheduleType} from '../types/schedule'; +import type { ShuttleRouteData, ShuttleStopData } from '../types/route'; +import {buildAllStops, findClosestStop, type Stop, type ClosestStop, } from '../types/ClosestStop'; @@ -37,7 +37,7 @@ export default function Schedule({ selectedRoute, setSelectedRoute }: SchedulePr const stopsToday = buildAllStops(routeData, aggregatedSchedule, selectedDay); setAllStops(stopsToday); }, [selectedDay]); - + // Define safe values to avoid repeated null checks const safeSelectedRoute = selectedRoute || routeNames[0]; @@ -88,7 +88,7 @@ export default function Schedule({ selectedRoute, setSelectedRoute }: SchedulePr return date; } -// Use user location and get closest stop to them +// Use user location and get closest stop to them useEffect(() => { if (!('geolocation' in navigator)) return; @@ -154,7 +154,7 @@ export default function Schedule({ selectedRoute, setSelectedRoute }: SchedulePr
- +