From d39cd76a4be9c77026079ea740c743049232cab3 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Sat, 20 Dec 2025 15:45:38 -0500 Subject: [PATCH 01/29] add dockerfiles --- .dockerignore | 71 +++++++++++++++++++++++++++++++++++++ Dockerfile.backend | 42 ++++++++++++++++++++++ Dockerfile.frontend | 60 ++++++++++++++++++++++++++++++++ Dockerfile.worker | 29 ++++++++++++++++ docker-compose.yml | 85 +++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 287 insertions(+) create mode 100644 .dockerignore create mode 100644 Dockerfile.backend create mode 100644 Dockerfile.frontend create mode 100644 Dockerfile.worker create mode 100644 docker-compose.yml diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..5aa08335 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,71 @@ +# Git +.git +.gitignore +.github + +# Python +__pycache__ +*.py[cod] +*$py.class +*.so +.Python +env/ +venv/ +ENV/ +.venv +*.egg-info/ +.eggs/ +dist/ +build/ +.pytest_cache/ +.python-version + +# Node +node_modules/ +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.npm +.yarn +client/node_modules/ +client/dist/ +client/.vite/ +test-client/ + +# Environment +.env +.env.local +.env.*.local +.flaskenv + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ +.DS_Store + +# Logs +*.log +logs/ + +# Database +*.db +*.sqlite +*.sqlite3 +instance/ + +# Testing +.coverage +htmlcov/ +.tox/ + +# Documentation +*.md +!README.md + +# Misc +*.bak +*.tmp +.yamllint diff --git a/Dockerfile.backend b/Dockerfile.backend new file mode 100644 index 00000000..0a0584a6 --- /dev/null +++ b/Dockerfile.backend @@ -0,0 +1,42 @@ +# Backend Dockerfile for Shubble Flask API +FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements and install Python dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY server/ ./server/ +COPY data/ ./data/ +COPY migrations/ ./migrations/ +COPY shubble.py . + +# Create non-root user +RUN useradd -m -u 1000 shubble && chown -R shubble:shubble /app +USER shubble + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ + CMD python -c "import requests; requests.get('http://localhost:8000/api/locations', timeout=5)" + +# Run database migrations and start gunicorn +CMD flask --app server:create_app db upgrade && \ + gunicorn shubble:app \ + --bind 0.0.0.0:8000 \ + --workers 2 \ + --threads 4 \ + --timeout 120 \ + --log-level ${LOG_LEVEL:-info} \ + --access-logfile - \ + --error-logfile - diff --git a/Dockerfile.frontend b/Dockerfile.frontend new file mode 100644 index 00000000..69446360 --- /dev/null +++ b/Dockerfile.frontend @@ -0,0 +1,60 @@ +# Frontend Dockerfile for Shubble +FROM node:20-alpine AS builder + +WORKDIR /app + +# Copy package files +COPY package*.json ./ +COPY client/package*.json ./client/ + +# Install dependencies +RUN npm ci + +# Copy source files and data +COPY client/ ./client/ +COPY data/ ./data/ +COPY vite.config.ts ./ +COPY tsconfig*.json ./ + +# Build the application (includes parseSchedule.js and data copy) +RUN npm run build + +# Production stage with nginx +FROM nginx:alpine + +# Copy built files to nginx +COPY --from=builder /app/client/dist /usr/share/nginx/html + +# Copy nginx configuration +COPY < /dev/null || exit 1 + +# Run worker +CMD ["python", "-m", "server.worker"] diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..14883b7b --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,85 @@ +version: '3.8' + +services: + postgres: + image: postgres:16-alpine + environment: + POSTGRES_DB: shubble + POSTGRES_USER: shubble + POSTGRES_PASSWORD: shubble + volumes: + - postgres_data:/var/lib/postgresql/data + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U shubble"] + interval: 10s + timeout: 5s + retries: 5 + + redis: + image: redis:7-alpine + command: redis-server --appendonly yes + volumes: + - redis_data:/data + ports: + - "6379:6379" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 5 + + backend: + build: + context: . + dockerfile: Dockerfile.backend + ports: + - "8000:8000" + environment: + DATABASE_URL: postgresql://shubble:shubble@postgres:5432/shubble + REDIS_URL: redis://redis:6379/0 + FLASK_ENV: development + FLASK_DEBUG: "true" + LOG_LEVEL: INFO + API_KEY: ${API_KEY:-} + SAMSARA_SECRET: ${SAMSARA_SECRET:-} + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + restart: unless-stopped + + worker: + build: + context: . + dockerfile: Dockerfile.worker + environment: + DATABASE_URL: postgresql://shubble:shubble@postgres:5432/shubble + REDIS_URL: redis://redis:6379/0 + FLASK_ENV: development + LOG_LEVEL: INFO + API_KEY: ${API_KEY:-} + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + backend: + condition: service_started + restart: unless-stopped + + frontend: + build: + context: . + dockerfile: Dockerfile.frontend + ports: + - "3000:80" + depends_on: + - backend + restart: unless-stopped + +volumes: + postgres_data: + redis_data: From d5f05626f94182d2f4f0b517f347efbbc70e3faa Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Sat, 20 Dec 2025 17:24:13 -0500 Subject: [PATCH 02/29] ignore root on pip install --- Dockerfile.backend | 2 +- Dockerfile.worker | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile.backend b/Dockerfile.backend index 0a0584a6..3d807ce4 100644 --- a/Dockerfile.backend +++ b/Dockerfile.backend @@ -11,7 +11,7 @@ RUN apt-get update && apt-get install -y \ # Copy requirements and install Python dependencies COPY requirements.txt . -RUN pip install --no-cache-dir -r requirements.txt +RUN pip install --no-cache-dir --root-user-action=ignore -r requirements.txt # Copy application code COPY server/ ./server/ diff --git a/Dockerfile.worker b/Dockerfile.worker index 19001621..5a35a509 100644 --- a/Dockerfile.worker +++ b/Dockerfile.worker @@ -11,7 +11,7 @@ RUN apt-get update && apt-get install -y \ # Copy requirements and install Python dependencies COPY requirements.txt . -RUN pip install --no-cache-dir -r requirements.txt +RUN pip install --no-cache-dir --root-user-action=ignore -r requirements.txt # Copy application code COPY server/ ./server/ From dbfd76000d19680a4c390790d86bc5f98485aa75 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Tue, 23 Dec 2025 17:52:53 -0500 Subject: [PATCH 03/29] use customizable base url --- client/src/components/MapKitMap.tsx | 3 ++- client/src/pages/Data.tsx | 3 ++- client/src/ts/config.ts | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/client/src/components/MapKitMap.tsx b/client/src/components/MapKitMap.tsx index a2bdfef4..7bd3037d 100644 --- a/client/src/components/MapKitMap.tsx +++ b/client/src/components/MapKitMap.tsx @@ -2,6 +2,7 @@ import { useEffect, useRef, useState, useMemo } from "react"; import { renderToStaticMarkup } from "react-dom/server"; import '../styles/MapKitMap.css'; import ShuttleIcon from "./ShuttleIcon"; +import config from "../ts/config"; import type { ShuttleRouteData, ShuttleStopData } from "../ts/types/route"; import type { VehicleInformationMap } from "../ts/types/vehicleLocation"; @@ -163,7 +164,7 @@ export default function MapKitMap({ routeData, displayVehicles = true, generateR const pollLocation = async () => { try { - const response = await fetch('/api/locations'); + const response = await fetch(`${config.apiBaseUrl}/api/locations`); if (!response.ok) { throw new Error('Network response was not ok'); } diff --git a/client/src/pages/Data.tsx b/client/src/pages/Data.tsx index 9a0b1cad..508fbd92 100644 --- a/client/src/pages/Data.tsx +++ b/client/src/pages/Data.tsx @@ -6,6 +6,7 @@ import "../styles/Data.css" import DataBoard from '../components/DataBoard'; import ShuttleRow from '../components/ShuttleRow'; import type { VehicleInformationMap } from '../ts/types/vehicleLocation'; +import config from '../ts/config'; export default function Data() { @@ -14,7 +15,7 @@ export default function Data() { const fetchShuttleData = async () => { try { - const response = await fetch('/api/today'); + const response = await fetch(`${config.apiBaseUrl}/api/today`); if (!response.ok) { throw new Error('Network response was not ok'); } diff --git a/client/src/ts/config.ts b/client/src/ts/config.ts index 3e8c2dcd..79e20d4c 100644 --- a/client/src/ts/config.ts +++ b/client/src/ts/config.ts @@ -2,7 +2,8 @@ const isStaging = import.meta.env.VITE_DEPLOY_MODE !== 'production'; const config = { isStaging, - isDev: isStaging || import.meta.env.DEV + isDev: isStaging || import.meta.env.DEV, + apiBaseUrl: import.meta.env.VITE_BACKEND_URL || 'http://localhost:5001' }; export default config; \ No newline at end of file From e0195196435076baffa7d6555ce569faaab215e9 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Tue, 23 Dec 2025 19:49:15 -0500 Subject: [PATCH 04/29] support CORS --- .gitignore | 2 +- client/src/ts/config.ts | 4 ++-- data/schedules.py | 12 ++++++------ docker-compose.yml | 1 + ...5d550a4a5_add_vehiclelocation.cpython-312.pyc | Bin 3231 -> 0 bytes ...166ecfdae66_initial_migration.cpython-312.pyc | Bin 3221 -> 0 bytes requirements.txt | 1 + server/__init__.py | 8 ++++++-- server/config.py | 4 ++++ test-server/server.py | 4 ++++ 10 files changed, 25 insertions(+), 11 deletions(-) delete mode 100644 migrations/versions/__pycache__/23c5d550a4a5_add_vehiclelocation.cpython-312.pyc delete mode 100644 migrations/versions/__pycache__/d166ecfdae66_initial_migration.cpython-312.pyc diff --git a/.gitignore b/.gitignore index 9330de37..6a07fd89 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ -__pycache__/ +**__pycache__/ node_modules/ .env client/dist diff --git a/client/src/ts/config.ts b/client/src/ts/config.ts index 79e20d4c..b030ce57 100644 --- a/client/src/ts/config.ts +++ b/client/src/ts/config.ts @@ -3,7 +3,7 @@ const isStaging = import.meta.env.VITE_DEPLOY_MODE !== 'production'; const config = { isStaging, isDev: isStaging || import.meta.env.DEV, - apiBaseUrl: import.meta.env.VITE_BACKEND_URL || 'http://localhost:5001' + apiBaseUrl: import.meta.env.VITE_BACKEND_URL || 'http://localhost:8000' }; -export default config; \ No newline at end of file +export default config; diff --git a/data/schedules.py b/data/schedules.py index 860f295a..19a58671 100644 --- a/data/schedules.py +++ b/data/schedules.py @@ -110,7 +110,7 @@ def match_shuttles_to_schedules(cls): # Determine day from first timestamp required_cols = {"vehicle_id", "timestamp", "route_name"} - if at_stops.empty or not required_cols.issubset(at_stops.columns): + if not at_stops or not required_cols.issubset(at_stops.columns): logger.warning("at_stops is missing required data returning empty match.") return {} @@ -139,15 +139,15 @@ def match_shuttles_to_schedules(cls): # Precompute minute-aligned timestamps at_stops['minute'] = at_stops['timestamp'].dt.floor('min') - + # Group logs shuttle_groups = {k: v for k, v in at_stops.groupby('vehicle_id')} - + # Build cost matrix for i, shuttle in enumerate(shuttles): - + logs = shuttle_groups.get(shuttle) if logs is None or logs.empty: W[i] = 1 #No data for shuttle @@ -176,6 +176,6 @@ def match_shuttles_to_schedules(cls): cache.set("schedule_entries", result, timeout=3600) return result - + if __name__ == "__main__": - result = Schedule.match_shuttles_to_schedules() \ No newline at end of file + result = Schedule.match_shuttles_to_schedules() diff --git a/docker-compose.yml b/docker-compose.yml index 14883b7b..d00261d0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -37,6 +37,7 @@ services: ports: - "8000:8000" environment: + FRONTEND_URL: ${FRONTEND_URL:-http://localhost:5173} DATABASE_URL: postgresql://shubble:shubble@postgres:5432/shubble REDIS_URL: redis://redis:6379/0 FLASK_ENV: development diff --git a/migrations/versions/__pycache__/23c5d550a4a5_add_vehiclelocation.cpython-312.pyc b/migrations/versions/__pycache__/23c5d550a4a5_add_vehiclelocation.cpython-312.pyc deleted file mode 100644 index 2ad0a27f525b13fdce6629040137667ab9b375a0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3231 zcmb_fO>7fK6yCMh_Il%lB&39rh6EZ2M$JEVLTE_|5yB3Hs-=~PN>>}t#9n8;Yi8Dj zpjHZMC3@%ufs|N|6d{lZ3WppywwH>w7e_G)n)cRS+7oT4kT`YL-ia3y(FkR%oq6-- z%{TAO&o|>=Dk~+9rmz3Hn5gk_-0xO}C6_I%^-*Dl1KcPFc;M3blxx(*Te4eoj|xB- z^#ISP7q~}#^b>*i8*bDOd^DG)+(GfGt^m;d8BC}#4W2P#im4j9Bz+7otC&j7k)i(P z?w(i(ghHLl5hYY=fw;dJbcMq(HVzaR4ofEyR7}`BL^EpN*&S-@47YWMsn~zyXn$vK zdv8xq_-I&?{piRsH3@?erqEhD^`*b)QjbBfau*KqHoJSiVnm8^Q>IH zfscL%VpSQ5NpFBCZ98Jc77@2TkAZDRtaKn&v)L<`>+`sEAFCXQuiKAXN8GXPh}GMU zSmQu^-45S8>e}**GYNR&JgANHi#E$R@GVAqB>~r>%}a~+D)LUI+VzS#tx}HW9_(c3 zA?_>gOD_5LMmwr)*r)|{Y`mhM?gwu%wxaA`{4r#no8sQ(x;V^F(wpG|$J5^wcg1~y zo8*JLMo7)&68+1X&HtFVRB(#2w5Vq^O&QZ5k(iANh@dMec%Jx8H3hM$q|!vx=)GZP z03?3R(Bma>M*=E9)#EaNaRebIew>C7$f z#xfl+z+`J+BE^cA4UiS{YK83?CO(|&VWo^xvV#X4(qL~ocqcR^<9l$GuAEyC{B`YJ1 zt-aO!BGK_Fh6r~g4XAa%n5L#NY@0OD1Wqe4*ntxn)6^gxI5q)Mn+_*CQfeF(FLm5; znf9c2I__{@yE2_yHZyFh+S60Smr2u(0HVWm;GOitGt`A??hnCx{oJ*4`MTEMs1&&t z`4-$7oQd9u-aNAqDhORntIZA0N3xOJ@X~>T(8sjnx5j4F8|ux8g&~LS#9a6MvFx$j z2TK&;D6>t@?VmrCJ(N4VkSPcurnPMQvVFP!B~L*J+uD3p)7{#8yKnElvu6nugku|x z1qXyaH#VQlCUe@-`GU~D4zd64;JwK0$erPbEd^nKL6qa5+Fqu$Ak?gyn^+ohcx>4Y zXNPlVnRZoZSQZ*)Kc5|0xVUg`ML16T+q$6@Vc)Z+YLEBXeohLk`Wu)1jrqM@4_hAl z2fpunB37@8jmu)=Y{GIiAB?W-9e$)e`XUe1yx3R}lh56p^dZ0MA6WJeyucRU{nU0K z+Ti}V!4qxqz4Uyto-D=Z(~ng6|K}4WB^RR(o0QfcR20BYl$7X`^wk91jFecpk&GPA zw@sd3fG_vlNL$}E*|a5$AZvJJ(JI)9 zpw;FF#-v`_>+Q7up4EJYW^}LnD>hx2hGnr~_Vj!-8_l<#UU?@{5Tm8#c;@9izUybN zoz2%Ccq|-z?z7e?Z!J&-1)SY?qTc;Oz2`);FIYi*vJ8xvEEAui!PJ-zSob89r`N`Z>+rwowM z;BjRAXRrtb_t5OAo9FqzU0z=Jw~FKIf92|*idN=WL)=W$PchH`*=G zJLlNvo^!s9Mg@k9zt837f8!bE7rJqW&sppblf@dIG;M3TGFXv*<4~&R6hQuL3_?;S>WUtbXADx8y$=rX7c)H10tSek_J#`u630N!OZWqY;!vpv{^3Y)^!|XaN;Qxx- zrCQ1GywM6V;E}fnkv}0~@WeKT+Kw1*JEG8b#E!NjM*bI~ePex~k+R1{ zc+_JRJ7SjyF?QgsyJ~uU zavS4qM|{PDc+{V1Z6nS8hX#n{S#UPb?mGG3gMai5bF{NenMrm};m{JCR}~GK*r$SV zQiwu&Nz>$64Ps803lIykX+le~%0)PXqnfHf-Gow6Bb5#NmsB0c3bLwOP?vQDN-Dt7 zJP|F+WvO5QsNtBqH6bE}6rajNV-C`@a0%*`i3P_3`mSK}Y-P>0N{ooe03c|Zl5Nsy zcQP zuesEF#C0>ZU_dPe6-_Oguz%S=i)K+)V9J~?S(XL~w6X|MzYdpE&0fPyEs-}#t6`?R zoq?n_ni8pbaG+Sm!BUa5C;*~9qP2@m^Ba;?SD0Tp{`2Y8>HBa0OcvSI>=$6;_GWf1 zyFRgVxy}u`+_{a}O?6FOU)*`K&WSEp+q%7-tz@ecwZ1xc!{r)VXuDh~S3j-I)VYky zg&TJ^@2=fl|6u3Z0q$Px!eif)@Wb$%f$RB>>v=Htl|(Wc=)iR1=CFJA-?2vq@j$ z<_G{8#m2osy;MkTLzf7_VPGuljX->j><7s-dkAR+;^gY&-kD2vu8)2Svblftd+t^H z?PLcIN)j*>Ny0%{gN0dDag>>#XtJiv!$KLIrz%3o>A-Q6Dw7VjvxsD&d5PpNsFBP9 zU=ixTlnmXz6o?vh*xt|^Bu|wzcnhV7l=dN}NXRok%d&s?c$Rw+W7zJWnC@pGCUSly b^kZjyCE6g-(<{QC{5h82&-mF3wio{Z#^GHn diff --git a/requirements.txt b/requirements.txt index 23614dd3..7d031c84 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ Flask>=3.1.1 +Flask-CORS>=5.0.0 requests>=2.32.4 psycopg2-binary>=2.9 Flask-SQLAlchemy>=3.1.1 diff --git a/server/__init__.py b/server/__init__.py index 93d369cb..cc9c5376 100644 --- a/server/__init__.py +++ b/server/__init__.py @@ -2,6 +2,7 @@ from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate from flask_caching import Cache +from flask_cors import CORS import logging from .config import Config @@ -19,16 +20,19 @@ def create_app(): app = Flask(__name__, static_folder='../client/dist', static_url_path='/') app.config.from_object(Config) + # configure CORS + CORS(app, origins=[app.config['FRONTEND_URL']], supports_credentials=True) + # initialize database db.init_app(app) # make any necessary migrations migrate.init_app(app, db) - + # initialize cache cache.init_app(app, config={'CACHE_TYPE': 'RedisCache', 'CACHE_REDIS_URL': app.config["REDIS_URL"]}) # register routes from . import routes app.register_blueprint(routes.bp) - + return app \ No newline at end of file diff --git a/server/config.py b/server/config.py index dd704cdc..334caffd 100644 --- a/server/config.py +++ b/server/config.py @@ -12,6 +12,10 @@ class Config: ENV = os.environ.get('FLASK_ENV', 'development').lower() LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO').upper() + # CORS settings + FRONTEND_URL = os.environ.get('FRONTEND_URL', 'http://localhost:5173') + TEST_FRONTEND_URL = os.environ.get('TEST_FRONTEND_URL', 'http://localhost:5174') + # database settings SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URL") if SQLALCHEMY_DATABASE_URI.startswith('postgres://'): diff --git a/test-server/server.py b/test-server/server.py index 1cd35bff..c2ff5d20 100644 --- a/test-server/server.py +++ b/test-server/server.py @@ -1,4 +1,5 @@ from flask import Flask, jsonify, request, send_from_directory +from flask_cors import CORS from threading import Thread, Lock import time import os @@ -24,6 +25,9 @@ app = Flask(__name__, static_folder="../test-client/dist", static_url_path="") app.config.from_object(Config) +# Configure CORS for test-client +CORS(app, origins=[app.config.get('TEST_FRONTEND_URL', 'http://localhost:5174')], supports_credentials=True) + db = SQLAlchemy() db.init_app(app) From 7658d963cdaf85f412160aa88b0ae28284e1eb39 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Tue, 23 Dec 2025 23:13:25 -0500 Subject: [PATCH 05/29] get docker working with env vars --- docker-compose.yml | 48 ++++++++++--------- .../Dockerfile.backend | 0 .../Dockerfile.frontend | 23 ++++++++- Dockerfile.worker => docker/Dockerfile.worker | 0 4 files changed, 47 insertions(+), 24 deletions(-) rename Dockerfile.backend => docker/Dockerfile.backend (100%) rename Dockerfile.frontend => docker/Dockerfile.frontend (64%) rename Dockerfile.worker => docker/Dockerfile.worker (100%) diff --git a/docker-compose.yml b/docker-compose.yml index d00261d0..0a52076b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,16 +1,14 @@ -version: '3.8' - services: postgres: image: postgres:16-alpine environment: - POSTGRES_DB: shubble - POSTGRES_USER: shubble - POSTGRES_PASSWORD: shubble + POSTGRES_DB: ${POSTGRES_DB:-shubble} + POSTGRES_USER: ${POSTGRES_USER:-shubble} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-shubble} volumes: - postgres_data:/var/lib/postgresql/data ports: - - "5432:5432" + - "${POSTGRES_PORT:-5432}:5432" healthcheck: test: ["CMD-SHELL", "pg_isready -U shubble"] interval: 10s @@ -23,7 +21,7 @@ services: volumes: - redis_data:/data ports: - - "6379:6379" + - "${REDIS_PORT:-6379}:6379" healthcheck: test: ["CMD", "redis-cli", "ping"] interval: 10s @@ -33,16 +31,18 @@ services: backend: build: context: . - dockerfile: Dockerfile.backend + dockerfile: docker/Dockerfile.backend ports: - - "8000:8000" + - "${BACKEND_PORT:-8000}:8000" + extra_hosts: + - "localhost:host-gateway" environment: - FRONTEND_URL: ${FRONTEND_URL:-http://localhost:5173} - DATABASE_URL: postgresql://shubble:shubble@postgres:5432/shubble - REDIS_URL: redis://redis:6379/0 - FLASK_ENV: development - FLASK_DEBUG: "true" - LOG_LEVEL: INFO + FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} + DATABASE_URL: ${DATABASE_URL:-postgresql://shubble:shubble@postgres:5432/shubble} + REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} + FLASK_ENV: ${FLASK_ENV:-development} + FLASK_DEBUG: ${FLASK_DEBUG:-true} + LOG_LEVEL: ${LOG_LEVEL:-INFO} API_KEY: ${API_KEY:-} SAMSARA_SECRET: ${SAMSARA_SECRET:-} depends_on: @@ -55,12 +55,14 @@ services: worker: build: context: . - dockerfile: Dockerfile.worker + dockerfile: docker/Dockerfile.worker + extra_hosts: + - "localhost:host-gateway" environment: - DATABASE_URL: postgresql://shubble:shubble@postgres:5432/shubble - REDIS_URL: redis://redis:6379/0 - FLASK_ENV: development - LOG_LEVEL: INFO + DATABASE_URL: ${DATABASE_URL:-postgresql://shubble:shubble@postgres:5432/shubble} + REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} + FLASK_ENV: ${FLASK_ENV:-development} + LOG_LEVEL: ${LOG_LEVEL:-INFO} API_KEY: ${API_KEY:-} depends_on: postgres: @@ -74,9 +76,11 @@ services: frontend: build: context: . - dockerfile: Dockerfile.frontend + dockerfile: docker/Dockerfile.frontend ports: - - "3000:80" + - "${FRONTEND_PORT:-3000}:80" + environment: + VITE_BACKEND_URL: ${VITE_BACKEND_URL:-http://localhost:8000} depends_on: - backend restart: unless-stopped diff --git a/Dockerfile.backend b/docker/Dockerfile.backend similarity index 100% rename from Dockerfile.backend rename to docker/Dockerfile.backend diff --git a/Dockerfile.frontend b/docker/Dockerfile.frontend similarity index 64% rename from Dockerfile.frontend rename to docker/Dockerfile.frontend index 69446360..8caf9343 100644 --- a/Dockerfile.frontend +++ b/docker/Dockerfile.frontend @@ -16,12 +16,17 @@ COPY data/ ./data/ COPY vite.config.ts ./ COPY tsconfig*.json ./ -# Build the application (includes parseSchedule.js and data copy) +# Build the application with a placeholder that will be replaced at runtime +# This allows the backend URL to be configured via environment variable +ENV VITE_BACKEND_URL=__VITE_BACKEND_URL__ RUN npm run build # Production stage with nginx FROM nginx:alpine +# Install gettext for envsubst +RUN apk add --no-cache gettext + # Copy built files to nginx COPY --from=builder /app/client/dist /usr/share/nginx/html @@ -55,6 +60,20 @@ server { } EOF +# Create entrypoint script to substitute environment variables at runtime +COPY <<'EOF' /docker-entrypoint.sh +#!/bin/sh +set -e + +# Replace __VITE_BACKEND_URL__ placeholder with actual environment variable value +find /usr/share/nginx/html -type f -name "*.js" -exec sed -i "s|__VITE_BACKEND_URL__|${VITE_BACKEND_URL:-http://localhost:8000}|g" {} \; + +# Start nginx +exec nginx -g "daemon off;" +EOF + +RUN chmod +x /docker-entrypoint.sh + EXPOSE 80 -CMD ["nginx", "-g", "daemon off;"] +CMD ["/docker-entrypoint.sh"] diff --git a/Dockerfile.worker b/docker/Dockerfile.worker similarity index 100% rename from Dockerfile.worker rename to docker/Dockerfile.worker From d97dbdb1746167fea432584744e86a39c308d908 Mon Sep 17 00:00:00 2001 From: williamschen23 Date: Wed, 24 Dec 2025 12:49:28 -0500 Subject: [PATCH 06/29] add production env example --- .env.prod.example | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 .env.prod.example diff --git a/.env.prod.example b/.env.prod.example new file mode 100644 index 00000000..a9d90428 --- /dev/null +++ b/.env.prod.example @@ -0,0 +1,28 @@ +# postgres +POSTGRES_DB=shubble +POSTGRES_USER=shubble +POSTGRES_PASSWORD=shubble +POSTGRES_PORT=5432 + +# python env variable +DATABASE_URL=postgresql://shubble:shubble@postgres:5432/shubble +FLASK_ENV=production +FLASK_DEBUG=false +LOG_LEVEL=INFO + +# Backend Docker +FRONTEND_URL=http://localhost:3000 +BACKEND_PORT=8000 + +# Secrets +API_KEY= +SAMSARA_SECRET= + +# redis +REDIS_PORT=6379 +REDIS_URL=redis://redis:6379/0 + +# Frontend Docker +FRONTEND_PORT=3000 +# Vite +VITE_BACKEND_URL=http://localhost:8000 From 2814497298144a77ee2eff997e37da2448877a83 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Wed, 24 Dec 2025 12:39:36 -0500 Subject: [PATCH 07/29] support running test in docker as well --- .dockerignore | 3 +- .env.example | 62 + docker-compose.yml | 38 + docker/Dockerfile.test-client | 75 + docker/Dockerfile.test-server | 34 + test-client/package-lock.json | 2833 +++++++++++++++++++++++++++++++++ test-client/src/api.js | 14 +- test-client/vite.config.js | 8 + 8 files changed, 3060 insertions(+), 7 deletions(-) create mode 100644 .env.example create mode 100644 docker/Dockerfile.test-client create mode 100644 docker/Dockerfile.test-server create mode 100644 test-client/package-lock.json diff --git a/.dockerignore b/.dockerignore index 5aa08335..445d44c2 100644 --- a/.dockerignore +++ b/.dockerignore @@ -30,7 +30,8 @@ yarn-error.log* client/node_modules/ client/dist/ client/.vite/ -test-client/ +test-client/node_modules/ +test-client/dist/ # Environment .env diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..247feade --- /dev/null +++ b/.env.example @@ -0,0 +1,62 @@ +# ============================================================================= +# SHUBBLE ENVIRONMENT CONFIGURATION +# ============================================================================= +# Copy this file to .env and update with your values + +# ============================================================================= +# SERVICE PORTS (Docker) +# ============================================================================= +# Configure which ports services are exposed on the host machine +FRONTEND_PORT=3000 +BACKEND_PORT=8000 +POSTGRES_PORT=5432 +REDIS_PORT=6379 +TEST_FRONTEND_PORT=5174 +TEST_BACKEND_PORT=4000 + +# ============================================================================= +# SERVICE URLS +# ============================================================================= +# Configure URLs for all services +# Format: http://host:port (do not include trailing slash) + +# Main application URLs +FRONTEND_URL=http://localhost:3000 +VITE_FRONTEND_URL=http://localhost:5173 +VITE_BACKEND_URL=http://localhost:8000 + +# Test/Mock service URLs (for development/testing) +TEST_FRONTEND_URL=http://localhost:5174 +VITE_TEST_FRONTEND_URL=http://localhost:5174 +VITE_TEST_BACKEND_URL=http://localhost:4000 + +# ============================================================================= +# DATABASE +# ============================================================================= +# PostgreSQL credentials +POSTGRES_DB=shubble +POSTGRES_USER=shubble +POSTGRES_PASSWORD=shubble + +# PostgreSQL connection string +DATABASE_URL=postgresql://shubble:shubble@localhost:5432/shubble + +# ============================================================================= +# REDIS CACHE +# ============================================================================= +# Redis connection string +REDIS_URL=redis://localhost:6379/0 + +# ============================================================================= +# FLASK CONFIGURATION +# ============================================================================= +FLASK_ENV=development +FLASK_DEBUG=true +LOG_LEVEL=INFO + +# ============================================================================= +# SAMSARA API (Optional - for production) +# ============================================================================= +# Leave empty to use Mock Samsara API (test-server) in development +API_KEY= +SAMSARA_SECRET= diff --git a/docker-compose.yml b/docker-compose.yml index 0a52076b..af6aacba 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -85,6 +85,44 @@ services: - backend restart: unless-stopped + test-server: + build: + context: . + dockerfile: docker/Dockerfile.test-server + ports: + - "${TEST_BACKEND_PORT:-4000}:4000" + extra_hosts: + - "localhost:host-gateway" + environment: + TEST_FRONTEND_URL: ${TEST_FRONTEND_URL:-http://localhost:5174} + DATABASE_URL: ${DATABASE_URL:-postgresql://shubble:shubble@postgres:5432/shubble} + REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} + FLASK_ENV: ${FLASK_ENV:-development} + FLASK_DEBUG: ${FLASK_DEBUG:-true} + LOG_LEVEL: ${LOG_LEVEL:-INFO} + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + restart: unless-stopped + profiles: + - test + + test-client: + build: + context: . + dockerfile: docker/Dockerfile.test-client + ports: + - "${TEST_FRONTEND_PORT:-5174}:80" + environment: + VITE_TEST_BACKEND_URL: ${VITE_TEST_BACKEND_URL:-http://localhost:4000} + depends_on: + - test-server + restart: unless-stopped + profiles: + - test + volumes: postgres_data: redis_data: diff --git a/docker/Dockerfile.test-client b/docker/Dockerfile.test-client new file mode 100644 index 00000000..29c8c164 --- /dev/null +++ b/docker/Dockerfile.test-client @@ -0,0 +1,75 @@ +# Test Client Dockerfile for Mock Samsara UI +FROM node:20-alpine AS builder + +WORKDIR /app + +# Copy package files +COPY test-client/package*.json ./test-client/ + +# Install dependencies +WORKDIR /app/test-client +RUN npm ci + +# Copy source files +COPY test-client/ ./ + +# Build the application with a placeholder that will be replaced at runtime +ENV VITE_TEST_BACKEND_URL=__VITE_TEST_BACKEND_URL__ +RUN npm run build + +# Production stage with nginx +FROM nginx:alpine + +# Install gettext for envsubst +RUN apk add --no-cache gettext + +# Copy built files to nginx +COPY --from=builder /app/test-client/dist /usr/share/nginx/html + +# Copy nginx configuration +COPY <=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", + "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz", + "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.28.3", + "@babel/helpers": "^7.28.4", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", + "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", + "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.28.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.5" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", + "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.5", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.2.tgz", + "integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.2.tgz", + "integrity": "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.2.tgz", + "integrity": "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.2.tgz", + "integrity": "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz", + "integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz", + "integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.2.tgz", + "integrity": "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.2.tgz", + "integrity": "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.2.tgz", + "integrity": "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.2.tgz", + "integrity": "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.2.tgz", + "integrity": "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.2.tgz", + "integrity": "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.2.tgz", + "integrity": "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.2.tgz", + "integrity": "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.2.tgz", + "integrity": "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.2.tgz", + "integrity": "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz", + "integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.2.tgz", + "integrity": "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.2.tgz", + "integrity": "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.2.tgz", + "integrity": "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.2.tgz", + "integrity": "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.2.tgz", + "integrity": "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.2.tgz", + "integrity": "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.2.tgz", + "integrity": "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.2.tgz", + "integrity": "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.2.tgz", + "integrity": "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", + "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.7", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz", + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz", + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.3.tgz", + "integrity": "sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.1", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@eslint/js": { + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz", + "integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz", + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.4.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.54.0.tgz", + "integrity": "sha512-OywsdRHrFvCdvsewAInDKCNyR3laPA2mc9bRYJ6LBp5IyvF3fvXbbNR0bSzHlZVFtn6E0xw2oZlyjg4rKCVcng==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.54.0.tgz", + "integrity": "sha512-Skx39Uv+u7H224Af+bDgNinitlmHyQX1K/atIA32JP3JQw6hVODX5tkbi2zof/E69M1qH2UoN3Xdxgs90mmNYw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.54.0.tgz", + "integrity": "sha512-k43D4qta/+6Fq+nCDhhv9yP2HdeKeP56QrUUTW7E6PhZP1US6NDqpJj4MY0jBHlJivVJD5P8NxrjuobZBJTCRw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.54.0.tgz", + "integrity": "sha512-cOo7biqwkpawslEfox5Vs8/qj83M/aZCSSNIWpVzfU2CYHa2G3P1UN5WF01RdTHSgCkri7XOlTdtk17BezlV3A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.54.0.tgz", + "integrity": "sha512-miSvuFkmvFbgJ1BevMa4CPCFt5MPGw094knM64W9I0giUIMMmRYcGW/JWZDriaw/k1kOBtsWh1z6nIFV1vPNtA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.54.0.tgz", + "integrity": "sha512-KGXIs55+b/ZfZsq9aR026tmr/+7tq6VG6MsnrvF4H8VhwflTIuYh+LFUlIsRdQSgrgmtM3fVATzEAj4hBQlaqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.54.0.tgz", + "integrity": "sha512-EHMUcDwhtdRGlXZsGSIuXSYwD5kOT9NVnx9sqzYiwAc91wfYOE1g1djOEDseZJKKqtHAHGwnGPQu3kytmfaXLQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.54.0.tgz", + "integrity": "sha512-+pBrqEjaakN2ySv5RVrj/qLytYhPKEUwk+e3SFU5jTLHIcAtqh2rLrd/OkbNuHJpsBgxsD8ccJt5ga/SeG0JmA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.54.0.tgz", + "integrity": "sha512-NSqc7rE9wuUaRBsBp5ckQ5CVz5aIRKCwsoa6WMF7G01sX3/qHUw/z4pv+D+ahL1EIKy6Enpcnz1RY8pf7bjwng==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.54.0.tgz", + "integrity": "sha512-gr5vDbg3Bakga5kbdpqx81m2n9IX8M6gIMlQQIXiLTNeQW6CucvuInJ91EuCJ/JYvc+rcLLsDFcfAD1K7fMofg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.54.0.tgz", + "integrity": "sha512-gsrtB1NA3ZYj2vq0Rzkylo9ylCtW/PhpLEivlgWe0bpgtX5+9j9EZa0wtZiCjgu6zmSeZWyI/e2YRX1URozpIw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.54.0.tgz", + "integrity": "sha512-y3qNOfTBStmFNq+t4s7Tmc9hW2ENtPg8FeUD/VShI7rKxNW7O4fFeaYbMsd3tpFlIg1Q8IapFgy7Q9i2BqeBvA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.54.0.tgz", + "integrity": "sha512-89sepv7h2lIVPsFma8iwmccN7Yjjtgz0Rj/Ou6fEqg3HDhpCa+Et+YSufy27i6b0Wav69Qv4WBNl3Rs6pwhebQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.54.0.tgz", + "integrity": "sha512-ZcU77ieh0M2Q8Ur7D5X7KvK+UxbXeDHwiOt/CPSBTI1fBmeDMivW0dPkdqkT4rOgDjrDDBUed9x4EgraIKoR2A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.54.0.tgz", + "integrity": "sha512-2AdWy5RdDF5+4YfG/YesGDDtbyJlC9LHmL6rZw6FurBJ5n4vFGupsOBGfwMRjBYH7qRQowT8D/U4LoSvVwOhSQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.54.0.tgz", + "integrity": "sha512-WGt5J8Ij/rvyqpFexxk3ffKqqbLf9AqrTBbWDk7ApGUzaIs6V+s2s84kAxklFwmMF/vBNGrVdYgbblCOFFezMQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.54.0.tgz", + "integrity": "sha512-JzQmb38ATzHjxlPHuTH6tE7ojnMKM2kYNzt44LO/jJi8BpceEC8QuXYA908n8r3CNuG/B3BV8VR3Hi1rYtmPiw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.54.0.tgz", + "integrity": "sha512-huT3fd0iC7jigGh7n3q/+lfPcXxBi+om/Rs3yiFxjvSxbSB6aohDFXbWvlspaqjeOh+hx7DDHS+5Es5qRkWkZg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.54.0.tgz", + "integrity": "sha512-c2V0W1bsKIKfbLMBu/WGBz6Yci8nJ/ZJdheE0EwB73N3MvHYKiKGs3mVilX4Gs70eGeDaMqEob25Tw2Gb9Nqyw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.54.0.tgz", + "integrity": "sha512-woEHgqQqDCkAzrDhvDipnSirm5vxUXtSKDYTVpZG3nUdW/VVB5VdCYA2iReSj/u3yCZzXID4kuKG7OynPnB3WQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.54.0.tgz", + "integrity": "sha512-dzAc53LOuFvHwbCEOS0rPbXp6SIhAf2txMP5p6mGyOXXw5mWY8NGGbPMPrs4P1WItkfApDathBj/NzMLUZ9rtQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.54.0.tgz", + "integrity": "sha512-hYT5d3YNdSh3mbCU1gwQyPgQd3T2ne0A3KG8KSBdav5TiBg6eInVmV+TeR5uHufiIgSFg0XsOWGW5/RhNcSvPg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "19.2.7", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.7.tgz", + "integrity": "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^19.2.0" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "peer": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.11", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.11.tgz", + "integrity": "sha512-Sg0xJUNDU1sJNGdfGWhVHX0kkZ+HWcvmVymJbj6NSgZZmW/8S9Y2HQ5euytnIgakgxN6papOAWiwDo1ctFDcoQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "peer": true, + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001761", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001761.tgz", + "integrity": "sha512-JF9ptu1vP2coz98+5051jZ4PwQgd2ni8A+gYSN7EA7dPKIMf0pDlSUxhdmVOaV3/fYK5uWBkgSXJaRLr4+3A6g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.267", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz", + "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==", + "dev": true, + "license": "ISC" + }, + "node_modules/esbuild": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.2.tgz", + "integrity": "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.2", + "@esbuild/android-arm": "0.27.2", + "@esbuild/android-arm64": "0.27.2", + "@esbuild/android-x64": "0.27.2", + "@esbuild/darwin-arm64": "0.27.2", + "@esbuild/darwin-x64": "0.27.2", + "@esbuild/freebsd-arm64": "0.27.2", + "@esbuild/freebsd-x64": "0.27.2", + "@esbuild/linux-arm": "0.27.2", + "@esbuild/linux-arm64": "0.27.2", + "@esbuild/linux-ia32": "0.27.2", + "@esbuild/linux-loong64": "0.27.2", + "@esbuild/linux-mips64el": "0.27.2", + "@esbuild/linux-ppc64": "0.27.2", + "@esbuild/linux-riscv64": "0.27.2", + "@esbuild/linux-s390x": "0.27.2", + "@esbuild/linux-x64": "0.27.2", + "@esbuild/netbsd-arm64": "0.27.2", + "@esbuild/netbsd-x64": "0.27.2", + "@esbuild/openbsd-arm64": "0.27.2", + "@esbuild/openbsd-x64": "0.27.2", + "@esbuild/openharmony-arm64": "0.27.2", + "@esbuild/sunos-x64": "0.27.2", + "@esbuild/win32-arm64": "0.27.2", + "@esbuild/win32-ia32": "0.27.2", + "@esbuild/win32-x64": "0.27.2" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz", + "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.2", + "@eslint/core": "^0.17.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.39.2", + "@eslint/plugin-kit": "^0.4.1", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-react-hooks": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.2.0.tgz", + "integrity": "sha512-+f15FfK64YQwZdJNELETdn5ibXEUQmW1DZL6KXhNnc2heoy/sg9VJJeT7n8TlMWouzWqSWavFkIhHyIbIAEapg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0" + } + }, + "node_modules/eslint-plugin-react-refresh": { + "version": "0.4.26", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.26.tgz", + "integrity": "sha512-1RETEylht2O6FM/MvgnyvT+8K21wLqDNg4qD51Zj3guhjt433XbnnkVttHMyaVyAFD03QSV4LPS5iE3VQmO7XQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "eslint": ">=8.40" + } + }, + "node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz", + "integrity": "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/react": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz", + "integrity": "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg==", + "license": "MIT", + "dependencies": { + "scheduler": "^0.27.0" + }, + "peerDependencies": { + "react": "^19.2.3" + } + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/rollup": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.54.0.tgz", + "integrity": "sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.54.0", + "@rollup/rollup-android-arm64": "4.54.0", + "@rollup/rollup-darwin-arm64": "4.54.0", + "@rollup/rollup-darwin-x64": "4.54.0", + "@rollup/rollup-freebsd-arm64": "4.54.0", + "@rollup/rollup-freebsd-x64": "4.54.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.54.0", + "@rollup/rollup-linux-arm-musleabihf": "4.54.0", + "@rollup/rollup-linux-arm64-gnu": "4.54.0", + "@rollup/rollup-linux-arm64-musl": "4.54.0", + "@rollup/rollup-linux-loong64-gnu": "4.54.0", + "@rollup/rollup-linux-ppc64-gnu": "4.54.0", + "@rollup/rollup-linux-riscv64-gnu": "4.54.0", + "@rollup/rollup-linux-riscv64-musl": "4.54.0", + "@rollup/rollup-linux-s390x-gnu": "4.54.0", + "@rollup/rollup-linux-x64-gnu": "4.54.0", + "@rollup/rollup-linux-x64-musl": "4.54.0", + "@rollup/rollup-openharmony-arm64": "4.54.0", + "@rollup/rollup-win32-arm64-msvc": "4.54.0", + "@rollup/rollup-win32-ia32-msvc": "4.54.0", + "@rollup/rollup-win32-x64-gnu": "4.54.0", + "@rollup/rollup-win32-x64-msvc": "4.54.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/scheduler": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", + "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/vite": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.0.tgz", + "integrity": "sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/test-client/src/api.js b/test-client/src/api.js index 5d747b55..74ee7d82 100644 --- a/test-client/src/api.js +++ b/test-client/src/api.js @@ -1,24 +1,26 @@ // api wrappers +const API_BASE_URL = import.meta.env.VITE_TEST_BACKEND_URL || ''; + export function fetchShuttles() { - return fetch("/api/shuttles"); + return fetch(`${API_BASE_URL}/api/shuttles`); } export function fetchEvents() { - return fetch("/api/events/today"); + return fetch(`${API_BASE_URL}/api/events/today`); } export function deleteEvents(keepShuttles) { - return fetch(`/api/events/today?keepShuttles=${keepShuttles}`, {method: "DELETE"}); + return fetch(`${API_BASE_URL}/api/events/today?keepShuttles=${keepShuttles}`, {method: "DELETE"}); } export function fetchRoutes() { - return fetch("/api/routes"); + return fetch(`${API_BASE_URL}/api/routes`); } // signal is optional export function addShuttle(signal) { - return fetch("/api/shuttles", { + return fetch(`${API_BASE_URL}/api/shuttles`, { method: "POST", headers: { "Content-Type": "application/json" }, signal: signal @@ -27,7 +29,7 @@ export function addShuttle(signal) { // expects stateObj { state, route, more data if necessary... } export function setNextState(shuttleId, stateObj, signal) { - return fetch(`/api/shuttles/${shuttleId}/set-next-state`, { + return fetch(`${API_BASE_URL}/api/shuttles/${shuttleId}/set-next-state`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify(stateObj), diff --git a/test-client/vite.config.js b/test-client/vite.config.js index 8b0f57b9..9955946b 100644 --- a/test-client/vite.config.js +++ b/test-client/vite.config.js @@ -4,4 +4,12 @@ import react from '@vitejs/plugin-react' // https://vite.dev/config/ export default defineConfig({ plugins: [react()], + server: { + host: '0.0.0.0', + port: 5174, + }, + define: { + 'import.meta.env.VITE_TEST_BACKEND_URL': JSON.stringify(process.env.VITE_TEST_BACKEND_URL || 'http://localhost:4000'), + 'import.meta.env.VITE_TEST_FRONTEND_URL': JSON.stringify(process.env.VITE_TEST_FRONTEND_URL || 'http://localhost:5174') + } }) From da655bee2247c0f56303ec5f7e1779bf82bf1462 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Wed, 24 Dec 2025 13:09:19 -0500 Subject: [PATCH 08/29] create profiles --- docker-compose.yml | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index af6aacba..798040a2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -14,6 +14,9 @@ services: interval: 10s timeout: 5s retries: 5 + profiles: + - backend + - test redis: image: redis:7-alpine @@ -27,6 +30,9 @@ services: interval: 10s timeout: 5s retries: 5 + profiles: + - backend + - test backend: build: @@ -51,6 +57,8 @@ services: redis: condition: service_healthy restart: unless-stopped + profiles: + - backend worker: build: @@ -72,6 +80,8 @@ services: backend: condition: service_started restart: unless-stopped + profiles: + - backend frontend: build: @@ -81,9 +91,9 @@ services: - "${FRONTEND_PORT:-3000}:80" environment: VITE_BACKEND_URL: ${VITE_BACKEND_URL:-http://localhost:8000} - depends_on: - - backend restart: unless-stopped + profiles: + - frontend test-server: build: @@ -117,8 +127,6 @@ services: - "${TEST_FRONTEND_PORT:-5174}:80" environment: VITE_TEST_BACKEND_URL: ${VITE_TEST_BACKEND_URL:-http://localhost:4000} - depends_on: - - test-server restart: unless-stopped profiles: - test From 48aa023ffcba156da1032fe5c5c16320201e39c3 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Wed, 24 Dec 2025 13:29:08 -0500 Subject: [PATCH 09/29] move nginx config to conf files --- docker/Dockerfile.frontend | 29 +---------------------------- docker/Dockerfile.test-client | 29 +---------------------------- docker/nginx-frontend.conf | 26 ++++++++++++++++++++++++++ docker/nginx-test-client.conf | 26 ++++++++++++++++++++++++++ 4 files changed, 54 insertions(+), 56 deletions(-) create mode 100644 docker/nginx-frontend.conf create mode 100644 docker/nginx-test-client.conf diff --git a/docker/Dockerfile.frontend b/docker/Dockerfile.frontend index 8caf9343..b148257c 100644 --- a/docker/Dockerfile.frontend +++ b/docker/Dockerfile.frontend @@ -31,34 +31,7 @@ RUN apk add --no-cache gettext COPY --from=builder /app/client/dist /usr/share/nginx/html # Copy nginx configuration -COPY < Date: Wed, 24 Dec 2025 13:33:34 -0500 Subject: [PATCH 10/29] improve docs --- docs/INSTALLATION.md | 294 ++++++++++++++++++ INTRO_TO_UI.md => docs/more/INTRO_TO_UI.md | 0 .../more/web-development-intro.md | 0 3 files changed, 294 insertions(+) create mode 100644 docs/INSTALLATION.md rename INTRO_TO_UI.md => docs/more/INTRO_TO_UI.md (100%) rename web-development-intro.md => docs/more/web-development-intro.md (100%) diff --git a/docs/INSTALLATION.md b/docs/INSTALLATION.md new file mode 100644 index 00000000..8946036b --- /dev/null +++ b/docs/INSTALLATION.md @@ -0,0 +1,294 @@ +# Installation Guide + +This guide explains how to set up and run the Shubble development environment. + +## Architecture Overview + +The codebase is organized into three main areas: + +- **Frontend** - Main React application for end users +- **Backend** - Flask API server, PostgreSQL database, Redis cache, and background worker +- **Test** - Mock Samsara API server and test client for development/testing + +## Running Services: Docker vs Host + +For each area, you have two options for running the services: + +### Option 1: Dockerized (Recommended for Quick Start) + +**Advantages:** +- Zero local setup required (no Node.js, Python, PostgreSQL, Redis installation) +- Consistent environment across all developers +- Easy to run multiple profiles without conflicts +- Isolated from your local system + +**Disadvantages:** +- Slower hot reload and rebuild times +- More difficult to debug (can't easily attach debuggers) +- Changes require container rebuilds +- Less visibility into the running processes + +**When to use:** +- First time setup +- Running services you're not actively developing +- Testing the full stack together +- CI/CD environments + +### Option 2: Host (Recommended for Active Development) + +**Advantages:** +- Instant hot reload during development +- Easy debugging with IDE integration +- Direct access to logs and processes +- Faster iteration cycle +- Can use local development tools + +**Disadvantages:** +- Requires installing dependencies (Node.js, Python, PostgreSQL, Redis) +- Potential version conflicts with other projects +- Manual setup required +- Environment differences between developers + +**When to use:** +- Actively developing/debugging a specific area +- Writing new features or fixing bugs +- Need to use debugging tools +- Frequent code changes + +## Recommendation + +**Run services on host for areas you're actively working on, and use Docker for the rest.** + +For example: +- Working on the frontend? Run frontend on host, backend in Docker +- Working on the backend? Run backend on host, frontend in Docker +- Testing integration? Run everything in Docker + +## Quick Start with Docker + +### Prerequisites + +- Docker and Docker Compose installed +- Copy `.env.example` to `.env` and configure as needed + +### Running Services + +```bash +# Run only backend services (API, database, Redis, worker) +docker compose --profile backend up + +# Run frontend (includes backend automatically) +docker compose --profile frontend up + +# Run test services (mock Samsara API and test client) +docker compose --profile test up + +# Run multiple profiles +docker compose --profile backend --profile test up + +# Run everything +docker compose --profile "*" up +``` + +### Stopping Services + +```bash +# Stop all running services +docker compose down + +# Stop and remove volumes (clean slate) +docker compose down -v +``` + +## Running on Host + +### Prerequisites + +**All environments:** +- Node.js 20+ +- Python 3.11+ +- PostgreSQL 16+ +- Redis 7+ + +### Backend Setup + +1. **Install Python dependencies:** + ```bash + pip install -r requirements.txt + ``` + +2. **Set up environment variables:** + ```bash + cp .env.example .env + # Edit .env with your local database/Redis URLs + ``` + +3. **Start PostgreSQL and Redis:** + ```bash + # Option 1: Run just database services in Docker + docker compose up postgres redis + + # Option 2: Use local installations + # (configure DATABASE_URL and REDIS_URL in .env accordingly) + ``` + +4. **Run database migrations:** + ```bash + flask db upgrade + ``` + +5. **Start the backend server:** + ```bash + python -m flask run --port 8000 + ``` + +6. **Start the worker (in a separate terminal):** + ```bash + python -m celery -A app.worker worker --loglevel=info + ``` + +### Frontend Setup + +1. **Install dependencies:** + ```bash + npm install + ``` + +2. **Set up environment variables:** + ```bash + # In .env, set: + VITE_BACKEND_URL=http://localhost:8000 + ``` + +3. **Start the development server:** + ```bash + npm run dev + ``` + +4. **Access the application:** + - Frontend: http://localhost:5173 + - Backend API: http://localhost:8000 + +### Test Services Setup + +1. **Install test-client dependencies:** + ```bash + cd test-client + npm install + ``` + +2. **Set up environment variables:** + ```bash + # In .env, set: + VITE_TEST_BACKEND_URL=http://localhost:4000 + ``` + +3. **Start the test server (in one terminal):** + ```bash + cd test-server + python server.py + ``` + +4. **Start the test client (in another terminal):** + ```bash + cd test-client + npm run dev + ``` + +5. **Access the test services:** + - Test Client: http://localhost:5174 + - Test Server API: http://localhost:4000 + +## Mixed Setup (Recommended) + +The most common development setup is to run some services on host and others in Docker: + +### Example: Frontend Development + +```bash +# Terminal 1: Run backend in Docker +docker compose --profile backend up + +# Terminal 2: Run frontend on host +npm run dev +``` + +### Example: Backend Development + +```bash +# Terminal 1: Run database services in Docker +docker compose up postgres redis + +# Terminal 2: Run backend on host +python -m flask run --port 8000 + +# Terminal 3: Run worker on host +python -m celery -A app.worker worker --loglevel=info + +# Terminal 4 (optional): Run frontend in Docker +docker compose --profile frontend up +``` + +## Environment Variables + +Key environment variables (see `.env.example` for full list): + +### Service URLs +- `FRONTEND_URL` - Main frontend URL +- `VITE_BACKEND_URL` - Backend API URL for frontend +- `TEST_FRONTEND_URL` - Test client URL +- `VITE_TEST_BACKEND_URL` - Test server API URL for test client + +### Database & Cache +- `DATABASE_URL` - PostgreSQL connection string +- `REDIS_URL` - Redis connection string + +### Service Ports +- `FRONTEND_PORT` - Port for frontend (default: 3000) +- `BACKEND_PORT` - Port for backend API (default: 8000) +- `TEST_FRONTEND_PORT` - Port for test client (default: 5174) +- `TEST_BACKEND_PORT` - Port for test server (default: 4000) + +## Troubleshooting + +### Port Conflicts + +If you see "port already in use" errors: + +```bash +# Check what's using a port +lsof -i :8000 + +# Stop Docker services +docker compose down + +# Change ports in .env if needed +``` + +### Database Issues + +```bash +# Reset the database +docker compose down -v +docker compose up postgres + +# Run migrations +flask db upgrade +``` + +### Dependency Issues + +```bash +# Clean install for Node.js +rm -rf node_modules package-lock.json +npm install + +# Clean install for Python +pip install --force-reinstall -r requirements.txt +``` + +## Next Steps + +- See the main README for project overview +- Check individual service directories for specific documentation +- Review `.env.example` for all configuration options diff --git a/INTRO_TO_UI.md b/docs/more/INTRO_TO_UI.md similarity index 100% rename from INTRO_TO_UI.md rename to docs/more/INTRO_TO_UI.md diff --git a/web-development-intro.md b/docs/more/web-development-intro.md similarity index 100% rename from web-development-intro.md rename to docs/more/web-development-intro.md From cb18b24b6226d35041d26c819766e7b9f0f8cc5f Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Wed, 24 Dec 2025 23:00:56 -0500 Subject: [PATCH 11/29] first shot at conversion --- .env.example | 8 +- alembic.ini | 142 ++++++++++ alembic/README | 1 + alembic/env.py | 106 ++++++++ alembic/script.py.mako | 28 ++ docker/Dockerfile.backend | 22 +- docs/INSTALLATION.md | 14 +- requirements.txt | 22 +- server/__init__.py | 103 ++++++-- server/config.py | 63 +++-- server/database.py | 70 +++++ server/models.py | 139 +++++----- server/routes.py | 501 ++++++++++++++++++++--------------- server/time_utils.py | 12 +- server/worker.py | 544 +++++++++++++++++++++----------------- shubble.py | 12 +- 16 files changed, 1189 insertions(+), 598 deletions(-) create mode 100644 alembic.ini create mode 100644 alembic/README create mode 100644 alembic/env.py create mode 100644 alembic/script.py.mako create mode 100644 server/database.py diff --git a/.env.example b/.env.example index 247feade..cb47fed2 100644 --- a/.env.example +++ b/.env.example @@ -48,10 +48,10 @@ DATABASE_URL=postgresql://shubble:shubble@localhost:5432/shubble REDIS_URL=redis://localhost:6379/0 # ============================================================================= -# FLASK CONFIGURATION +# FASTAPI CONFIGURATION # ============================================================================= -FLASK_ENV=development -FLASK_DEBUG=true +ENV=development +DEBUG=true LOG_LEVEL=INFO # ============================================================================= @@ -59,4 +59,4 @@ LOG_LEVEL=INFO # ============================================================================= # Leave empty to use Mock Samsara API (test-server) in development API_KEY= -SAMSARA_SECRET= +SAMSARA_SECRET_BASE64= diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 00000000..72919315 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,142 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = %(here)s/alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. +prepend_sys_path = . + + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: +# +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +# sqlalchemy.url = driver://user:pass@localhost/dbname +# Note: Database URL is loaded from .env file in env.py + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 00000000..98e4f9c4 --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 00000000..dc57a649 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,106 @@ +"""Alembic environment configuration for async SQLAlchemy.""" +import asyncio +from logging.config import fileConfig + +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +from alembic import context + +# Load application config to get DATABASE_URL +from server.config import settings +from server.database import Base + +# Import all models to ensure they're registered with Base +from server.models import ( + Vehicle, + GeofenceEvent, + VehicleLocation, + Driver, + DriverVehicleAssignment, +) + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Set database URL from settings +database_url = settings.DATABASE_URL +if database_url.startswith("postgresql://"): + database_url = database_url.replace("postgresql://", "postgresql+asyncpg://") +elif database_url.startswith("postgres://"): + database_url = database_url.replace("postgres://", "postgresql+asyncpg://") + +config.set_main_option("sqlalchemy.url", database_url) + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + """Run migrations with the given connection.""" + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """Run migrations in 'online' mode with async support.""" + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 00000000..480b130d --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/docker/Dockerfile.backend b/docker/Dockerfile.backend index 3d807ce4..5bfaf9d0 100644 --- a/docker/Dockerfile.backend +++ b/docker/Dockerfile.backend @@ -1,4 +1,4 @@ -# Backend Dockerfile for Shubble Flask API +# Backend Dockerfile for Shubble FastAPI FROM python:3.12-slim WORKDIR /app @@ -16,7 +16,8 @@ RUN pip install --no-cache-dir --root-user-action=ignore -r requirements.txt # Copy application code COPY server/ ./server/ COPY data/ ./data/ -COPY migrations/ ./migrations/ +COPY alembic/ ./alembic/ +COPY alembic.ini . COPY shubble.py . # Create non-root user @@ -28,15 +29,12 @@ EXPOSE 8000 # Health check HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ - CMD python -c "import requests; requests.get('http://localhost:8000/api/locations', timeout=5)" + CMD python -c "import httpx; httpx.get('http://localhost:8000/api/locations', timeout=5.0)" -# Run database migrations and start gunicorn -CMD flask --app server:create_app db upgrade && \ - gunicorn shubble:app \ - --bind 0.0.0.0:8000 \ +# Run database migrations and start uvicorn +CMD alembic upgrade head && \ + uvicorn shubble:app \ + --host 0.0.0.0 \ + --port 8000 \ --workers 2 \ - --threads 4 \ - --timeout 120 \ - --log-level ${LOG_LEVEL:-info} \ - --access-logfile - \ - --error-logfile - + --log-level ${LOG_LEVEL:-info} diff --git a/docs/INSTALLATION.md b/docs/INSTALLATION.md index 8946036b..4abc5ced 100644 --- a/docs/INSTALLATION.md +++ b/docs/INSTALLATION.md @@ -7,7 +7,7 @@ This guide explains how to set up and run the Shubble development environment. The codebase is organized into three main areas: - **Frontend** - Main React application for end users -- **Backend** - Flask API server, PostgreSQL database, Redis cache, and background worker +- **Backend** - FastAPI server (async), PostgreSQL database, Redis cache, and background worker - **Test** - Mock Samsara API server and test client for development/testing ## Running Services: Docker vs Host @@ -134,17 +134,17 @@ docker compose down -v 4. **Run database migrations:** ```bash - flask db upgrade + alembic upgrade head ``` 5. **Start the backend server:** ```bash - python -m flask run --port 8000 + uvicorn shubble:app --host 0.0.0.0 --port 8000 --reload ``` 6. **Start the worker (in a separate terminal):** ```bash - python -m celery -A app.worker worker --loglevel=info + python -m server.worker ``` ### Frontend Setup @@ -220,10 +220,10 @@ npm run dev docker compose up postgres redis # Terminal 2: Run backend on host -python -m flask run --port 8000 +uvicorn shubble:app --host 0.0.0.0 --port 8000 --reload # Terminal 3: Run worker on host -python -m celery -A app.worker worker --loglevel=info +python -m server.worker # Terminal 4 (optional): Run frontend in Docker docker compose --profile frontend up @@ -273,7 +273,7 @@ docker compose down -v docker compose up postgres # Run migrations -flask db upgrade +alembic upgrade head ``` ### Dependency Issues diff --git a/requirements.txt b/requirements.txt index 7d031c84..6e6c54b1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,14 @@ -Flask>=3.1.1 -Flask-CORS>=5.0.0 -requests>=2.32.4 -psycopg2-binary>=2.9 -Flask-SQLAlchemy>=3.1.1 -Flask-Migrate>=4.1.0 +fastapi>=0.115.0 +uvicorn[standard]>=0.34.0 +httpx>=0.28.1 +asyncpg>=0.30.0 SQLAlchemy>=2.0.41 -gunicorn>=22.0.0 -numpy>=2.3.2 -flask_caching>=2.3.1 +alembic>=1.14.0 +pydantic>=2.10.0 +pydantic-settings>=2.7.0 python-dotenv>=1.1.1 -redis>=5.2.1 +redis +fastapi-cache2[redis]>=0.2.2 +numpy pandas>=2.0.0 -scipy>=1.10.0 \ No newline at end of file +scipy diff --git a/server/__init__.py b/server/__init__.py index cc9c5376..1dbd578f 100644 --- a/server/__init__.py +++ b/server/__init__.py @@ -1,38 +1,89 @@ -from flask import Flask -from flask_sqlalchemy import SQLAlchemy -from flask_migrate import Migrate -from flask_caching import Cache -from flask_cors import CORS +"""FastAPI application factory.""" import logging -from .config import Config +from contextlib import asynccontextmanager +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.staticfiles import StaticFiles +from fastapi_cache import FastAPICache +from fastapi_cache.backends.redis import RedisBackend +from redis import asyncio as aioredis -numeric_level = logging._nameToLevel.get(Config.LOG_LEVEL.upper(), logging.INFO) +from .config import settings +from .database import create_async_db_engine, create_session_factory + + +# Configure logging +numeric_level = logging._nameToLevel.get(settings.LOG_LEVEL.upper(), logging.INFO) logging.basicConfig( level=numeric_level, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", ) -db = SQLAlchemy() -migrate = Migrate() -cache = Cache() +logger = logging.getLogger(__name__) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Lifespan context manager for startup and shutdown events.""" + # Startup + logger.info("Starting up FastAPI application...") + + # Initialize database engine and session factory + app.state.db_engine = create_async_db_engine( + settings.DATABASE_URL, echo=settings.DEBUG + ) + app.state.session_factory = create_session_factory(app.state.db_engine) + logger.info("Database engine and session factory initialized") + + # Initialize Redis cache + app.state.redis = await aioredis.from_url( + settings.REDIS_URL, + encoding="utf-8", + decode_responses=True, + ) + FastAPICache.init(RedisBackend(app.state.redis), prefix="fastapi-cache") + logger.info("Redis cache initialized") + + yield + + # Shutdown + logger.info("Shutting down FastAPI application...") + await app.state.redis.close() + await app.state.db_engine.dispose() + logger.info("Database connections closed") + + +def create_app() -> FastAPI: + """Create and configure the FastAPI application.""" + app = FastAPI( + title="Shubble API", + description="Shuttle tracking API for Shubble", + version="2.0.0", + lifespan=lifespan, + ) -def create_app(): - # create and configure the app - app = Flask(__name__, static_folder='../client/dist', static_url_path='/') - app.config.from_object(Config) + # Configure CORS + app.add_middleware( + CORSMiddleware, + allow_origins=[settings.FRONTEND_URL], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) - # configure CORS - CORS(app, origins=[app.config['FRONTEND_URL']], supports_credentials=True) + # Register routes + from .routes import router + app.include_router(router) - # initialize database - db.init_app(app) - # make any necessary migrations - migrate.init_app(app, db) + # Mount static files for frontend (this should be last) + try: + app.mount("/", StaticFiles(directory="../client/dist", html=True), name="static") + except RuntimeError: + # Static directory doesn't exist yet (development mode) + logger.warning("Static files directory not found. Skipping static file mounting.") - # initialize cache - cache.init_app(app, config={'CACHE_TYPE': 'RedisCache', 'CACHE_REDIS_URL': app.config["REDIS_URL"]}) + return app - # register routes - from . import routes - app.register_blueprint(routes.bp) - return app \ No newline at end of file +# Create app instance +app = create_app() diff --git a/server/config.py b/server/config.py index 334caffd..89902405 100644 --- a/server/config.py +++ b/server/config.py @@ -1,33 +1,52 @@ +"""Configuration using Pydantic BaseSettings.""" import base64 -from dotenv import load_dotenv -import os +from typing import Optional from zoneinfo import ZoneInfo +from pydantic import field_validator +from pydantic_settings import BaseSettings, SettingsConfigDict -load_dotenv() +class Settings(BaseSettings): + """Application settings loaded from environment variables.""" -class Config: - # hosting settings - DEBUG = os.environ.get('FLASK_DEBUG', 'true').lower() == 'true' - ENV = os.environ.get('FLASK_ENV', 'development').lower() - LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO').upper() + model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", extra="ignore") + + # Hosting settings + DEBUG: bool = True + ENV: str = "development" + LOG_LEVEL: str = "INFO" # CORS settings - FRONTEND_URL = os.environ.get('FRONTEND_URL', 'http://localhost:5173') - TEST_FRONTEND_URL = os.environ.get('TEST_FRONTEND_URL', 'http://localhost:5174') + FRONTEND_URL: str = "http://localhost:5173" + TEST_FRONTEND_URL: str = "http://localhost:5174" + + # Database settings + DATABASE_URL: str + + # Redis settings + REDIS_URL: str = "redis://localhost:6379/0" + + # Samsara API settings + SAMSARA_SECRET_BASE64: Optional[str] = None + + # Shubble settings + CAMPUS_TZ: ZoneInfo = ZoneInfo("America/New_York") - # database settings - SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URL") - if SQLALCHEMY_DATABASE_URI.startswith('postgres://'): - SQLALCHEMY_DATABASE_URI = SQLALCHEMY_DATABASE_URI.replace('postgres://', 'postgresql://', 1) - SQLALCHEMY_TRACK_MODIFICATIONS = False + @field_validator("DATABASE_URL") + @classmethod + def fix_database_url(cls, v: str) -> str: + """Convert postgres:// to postgresql:// for SQLAlchemy compatibility.""" + if v.startswith("postgres://"): + return v.replace("postgres://", "postgresql://", 1) + return v - REDIS_URL = os.environ.get('REDIS_URL') - if secret := os.environ.get('SAMSARA_SECRET', None): - SAMSARA_SECRET = base64.b64decode(secret.encode('utf-8')) - else: - SAMSARA_SECRET = None + @property + def SAMSARA_SECRET(self) -> Optional[bytes]: + """Decode base64 Samsara secret.""" + if self.SAMSARA_SECRET_BASE64: + return base64.b64decode(self.SAMSARA_SECRET_BASE64.encode("utf-8")) + return None - # shubble settings - CAMPUS_TZ = ZoneInfo('America/New_York') \ No newline at end of file +# Global settings instance +settings = Settings() diff --git a/server/database.py b/server/database.py new file mode 100644 index 00000000..e4e245b5 --- /dev/null +++ b/server/database.py @@ -0,0 +1,70 @@ +"""Async database configuration for FastAPI.""" +from typing import AsyncGenerator, TYPE_CHECKING +from sqlalchemy.ext.asyncio import ( + create_async_engine, + AsyncSession, + async_sessionmaker, + AsyncEngine, +) +from sqlalchemy.orm import declarative_base + +if TYPE_CHECKING: + from fastapi import Request + +Base = declarative_base() + + +def create_async_db_engine(database_url: str, echo: bool = False) -> AsyncEngine: + """ + Create an async database engine. + + Args: + database_url: Database connection URL + echo: Whether to log SQL statements + + Returns: + AsyncEngine instance + """ + # Convert postgresql:// to postgresql+asyncpg:// + if database_url.startswith("postgresql://"): + database_url = database_url.replace("postgresql://", "postgresql+asyncpg://") + elif database_url.startswith("postgres://"): + database_url = database_url.replace("postgres://", "postgresql+asyncpg://") + + return create_async_engine(database_url, echo=echo, pool_pre_ping=True) + + +def create_session_factory(engine: AsyncEngine): + """ + Create an async session factory. + + Args: + engine: AsyncEngine instance + + Returns: + async_sessionmaker instance + """ + return async_sessionmaker( + engine, + class_=AsyncSession, + expire_on_commit=False, + autocommit=False, + autoflush=False, + ) + + +async def get_db(request: "Request") -> AsyncGenerator[AsyncSession, None]: + """ + FastAPI dependency for getting async database sessions. + + Accesses the session_factory from app.state which is initialized + during application startup in the lifespan context manager. + + Args: + request: FastAPI Request object (injected automatically) + + Yields: + AsyncSession instance + """ + async with request.app.state.session_factory() as session: + yield session diff --git a/server/models.py b/server/models.py index 7a8ee844..b8df2028 100644 --- a/server/models.py +++ b/server/models.py @@ -1,94 +1,103 @@ -from datetime import datetime -from . import db - -class Vehicle(db.Model): - __tablename__ = 'vehicles' - - id = db.Column(db.String, primary_key=True) - name = db.Column(db.String, nullable=False) - asset_type = db.Column(db.String, default='vehicle') - license_plate = db.Column(db.String, nullable=True) - vin = db.Column(db.String, nullable=True) - - maintenance_id = db.Column(db.String, nullable=True) - gateway_model = db.Column(db.String, nullable=True) - gateway_serial = db.Column(db.String, nullable=True) +"""SQLAlchemy models for async database operations.""" +from datetime import datetime, timezone +from typing import Optional +from sqlalchemy import String, Integer, Float, Boolean, DateTime, ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship +from .database import Base + + +class Vehicle(Base): + __tablename__ = "vehicles" + + id: Mapped[str] = mapped_column(String, primary_key=True) + name: Mapped[str] = mapped_column(String, nullable=False) + asset_type: Mapped[str] = mapped_column(String, default="vehicle") + license_plate: Mapped[Optional[str]] = mapped_column(String, nullable=True) + vin: Mapped[Optional[str]] = mapped_column(String, nullable=True) + maintenance_id: Mapped[Optional[str]] = mapped_column(String, nullable=True) + gateway_model: Mapped[Optional[str]] = mapped_column(String, nullable=True) + gateway_serial: Mapped[Optional[str]] = mapped_column(String, nullable=True) + + # Relationships + geofence_events: Mapped[list["GeofenceEvent"]] = relationship(back_populates="vehicle", lazy="selectin") + locations: Mapped[list["VehicleLocation"]] = relationship(back_populates="vehicle", lazy="selectin") + driver_assignments: Mapped[list["DriverVehicleAssignment"]] = relationship(back_populates="vehicle", lazy="selectin") def __repr__(self): return f"" -class GeofenceEvent(db.Model): - __tablename__ = 'geofence_events' - id = db.Column(db.String, primary_key=True) # eventId from webhook - vehicle_id = db.Column(db.String, db.ForeignKey('vehicles.id'), nullable=False) - event_type = db.Column(db.String, nullable=False) - event_time = db.Column(db.DateTime, nullable=False) +class GeofenceEvent(Base): + __tablename__ = "geofence_events" - address_name = db.Column(db.String) - address_formatted = db.Column(db.String) - latitude = db.Column(db.Float) - longitude = db.Column(db.Float) - created_at = db.Column(db.DateTime, default=datetime.utcnow) + id: Mapped[str] = mapped_column(String, primary_key=True) # eventId from webhook + vehicle_id: Mapped[str] = mapped_column(String, ForeignKey("vehicles.id"), nullable=False) + event_type: Mapped[str] = mapped_column(String, nullable=False) + event_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + address_name: Mapped[Optional[str]] = mapped_column(String, nullable=True) + address_formatted: Mapped[Optional[str]] = mapped_column(String, nullable=True) + latitude: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + longitude: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc)) - vehicle = db.relationship("Vehicle", backref=db.backref("geofence_events", lazy=True)) + # Relationships + vehicle: Mapped["Vehicle"] = relationship(back_populates="geofence_events") def __repr__(self): return f"" -class VehicleLocation(db.Model): - __tablename__ = 'vehicle_locations' - - id = db.Column(db.Integer, primary_key=True) - - # Foreign key to vehicles.id - vehicle_id = db.Column(db.String, db.ForeignKey('vehicles.id'), nullable=False, index=True) - vehicle = db.relationship('Vehicle', backref='locations', lazy=True) - name = db.Column(db.String, nullable=True) +class VehicleLocation(Base): + __tablename__ = "vehicle_locations" - timestamp = db.Column(db.DateTime, nullable=False) - latitude = db.Column(db.Float, nullable=False) - longitude = db.Column(db.Float, nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + vehicle_id: Mapped[str] = mapped_column(String, ForeignKey("vehicles.id"), nullable=False, index=True) + name: Mapped[Optional[str]] = mapped_column(String, nullable=True) + timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + latitude: Mapped[float] = mapped_column(Float, nullable=False) + longitude: Mapped[float] = mapped_column(Float, nullable=False) + heading_degrees: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + speed_mph: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + is_ecu_speed: Mapped[bool] = mapped_column(Boolean, default=False) + formatted_location: Mapped[Optional[str]] = mapped_column(String, nullable=True) + address_id: Mapped[Optional[str]] = mapped_column(String, nullable=True) + address_name: Mapped[Optional[str]] = mapped_column(String, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc)) - heading_degrees = db.Column(db.Float, nullable=True) - speed_mph = db.Column(db.Float, nullable=True) - is_ecu_speed = db.Column(db.Boolean, default=False) - - formatted_location = db.Column(db.String, nullable=True) - - address_id = db.Column(db.String, nullable=True) - address_name = db.Column(db.String, nullable=True) - - created_at = db.Column(db.DateTime, default=datetime.utcnow) + # Relationships + vehicle: Mapped["Vehicle"] = relationship(back_populates="locations") def __repr__(self): - return f"" + return f"" + +class Driver(Base): + __tablename__ = "drivers" -class Driver(db.Model): - __tablename__ = 'drivers' + id: Mapped[str] = mapped_column(String, primary_key=True) # Samsara driver ID + name: Mapped[str] = mapped_column(String, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc)) - id = db.Column(db.String, primary_key=True) # Samsara driver ID - name = db.Column(db.String, nullable=False) - created_at = db.Column(db.DateTime, default=datetime.utcnow) + # Relationships + assignments: Mapped[list["DriverVehicleAssignment"]] = relationship(back_populates="driver", lazy="selectin") def __repr__(self): return f"" -class DriverVehicleAssignment(db.Model): - __tablename__ = 'driver_vehicle_assignments' +class DriverVehicleAssignment(Base): + __tablename__ = "driver_vehicle_assignments" - id = db.Column(db.Integer, primary_key=True) - driver_id = db.Column(db.String, db.ForeignKey('drivers.id'), nullable=False, index=True) - vehicle_id = db.Column(db.String, db.ForeignKey('vehicles.id'), nullable=False, index=True) - assignment_start = db.Column(db.DateTime, nullable=False) - assignment_end = db.Column(db.DateTime, nullable=True) # null = currently assigned - created_at = db.Column(db.DateTime, default=datetime.utcnow) + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + driver_id: Mapped[str] = mapped_column(String, ForeignKey("drivers.id"), nullable=False, index=True) + vehicle_id: Mapped[str] = mapped_column(String, ForeignKey("vehicles.id"), nullable=False, index=True) + assignment_start: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + assignment_end: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True) # null = currently assigned + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc)) - driver = db.relationship('Driver', backref='assignments', lazy=True) - vehicle = db.relationship('Vehicle', backref='driver_assignments', lazy=True) + # Relationships + driver: Mapped["Driver"] = relationship(back_populates="assignments") + vehicle: Mapped["Vehicle"] = relationship(back_populates="driver_assignments") def __repr__(self): return f" {self.vehicle_id}>" diff --git a/server/routes.py b/server/routes.py index 66013321..5893e4fe 100644 --- a/server/routes.py +++ b/server/routes.py @@ -1,35 +1,48 @@ -from flask import Blueprint, request, jsonify, send_from_directory, current_app -from . import db, cache -from .models import Vehicle, GeofenceEvent, VehicleLocation, Driver, DriverVehicleAssignment +"""FastAPI routes for the Shubble API.""" +import logging +import hmac +from hashlib import sha256 +from datetime import datetime, timezone from pathlib import Path -from sqlalchemy import func, and_ + +from fastapi import APIRouter, Request, Depends, HTTPException +from fastapi.responses import FileResponse, JSONResponse +from fastapi_cache.decorator import cache +from sqlalchemy import func, and_, select +from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.dialects import postgresql -from datetime import datetime, date, timezone -from data.stops import Stops -from data.schedules import Schedule -from hashlib import sha256 -import hmac -import logging + +from .database import get_db +from .models import Vehicle, GeofenceEvent, VehicleLocation, DriverVehicleAssignment +from .config import settings from .time_utils import get_campus_start_of_day +from data.stops import Stops +# from data.schedules import Schedule logger = logging.getLogger(__name__) -bp = Blueprint('routes', __name__) - -@bp.route('/') -@bp.route('/schedule') -@bp.route('/about') -@bp.route('/data') -@bp.route('/map') -@bp.route('/generate-static-routes') -def serve_react(): - # serve the React app's index.html for all main routes - root_dir = Path(__file__).parent.parent / 'client' / 'dist' - return send_from_directory(root_dir, 'index.html') - -@bp.route('/api/locations', methods=['GET']) -@cache.cached(timeout=300, key_prefix="vehicle_locations") -def get_locations(): +router = APIRouter() + + +# Frontend routes - serve React app +@router.get("/") +@router.get("/schedule") +@router.get("/about") +@router.get("/data") +@router.get("/map") +@router.get("/generate-static-routes") +async def serve_react(): + """Serve the React app's index.html for all main routes.""" + root_dir = Path(__file__).parent.parent / "client" / "dist" + index_path = root_dir / "index.html" + if index_path.exists(): + return FileResponse(index_path) + raise HTTPException(status_code=404, detail="Frontend not built") + + +@router.get("/api/locations") +@cache(expire=300) +async def get_locations(db: AsyncSession = Depends(get_db)): """ Returns the latest location for each vehicle currently inside the geofence. The vehicle is considered inside the geofence if its latest geofence event @@ -39,49 +52,67 @@ def get_locations(): start_of_today = get_campus_start_of_day() # Subquery: latest geofence event today per vehicle - latest_geofence_events = db.session.query( - GeofenceEvent.vehicle_id, - func.max(GeofenceEvent.event_time).label('latest_time') - ).filter( - GeofenceEvent.event_time >= start_of_today - ).group_by(GeofenceEvent.vehicle_id).subquery() + latest_geofence_events = ( + select( + GeofenceEvent.vehicle_id, + func.max(GeofenceEvent.event_time).label("latest_time"), + ) + .where(GeofenceEvent.event_time >= start_of_today) + .group_by(GeofenceEvent.vehicle_id) + .subquery() + ) # Join to get full geofence event rows where event is geofenceEntry - geofence_entries = db.session.query(GeofenceEvent.vehicle_id).join( - latest_geofence_events, - and_( - GeofenceEvent.vehicle_id == latest_geofence_events.c.vehicle_id, - GeofenceEvent.event_time == latest_geofence_events.c.latest_time + geofence_entries = ( + select(GeofenceEvent.vehicle_id) + .join( + latest_geofence_events, + and_( + GeofenceEvent.vehicle_id == latest_geofence_events.c.vehicle_id, + GeofenceEvent.event_time == latest_geofence_events.c.latest_time, + ), ) - ).filter(GeofenceEvent.event_type == 'geofenceEntry').subquery() + .where(GeofenceEvent.event_type == "geofenceEntry") + .subquery() + ) # Subquery: latest vehicle location per vehicle - latest_locations = db.session.query( - VehicleLocation.vehicle_id, - func.max(VehicleLocation.timestamp).label('latest_time') - ).filter( - VehicleLocation.vehicle_id.in_(db.session.query(geofence_entries.c.vehicle_id)) - ).group_by(VehicleLocation.vehicle_id).subquery() + latest_locations = ( + select( + VehicleLocation.vehicle_id, + func.max(VehicleLocation.timestamp).label("latest_time"), + ) + .where(VehicleLocation.vehicle_id.in_(select(geofence_entries.c.vehicle_id))) + .group_by(VehicleLocation.vehicle_id) + .subquery() + ) # Join to get full location and vehicle info for vehicles in geofence - results = db.session.query(VehicleLocation, Vehicle).join( - latest_locations, - and_( - VehicleLocation.vehicle_id == latest_locations.c.vehicle_id, - VehicleLocation.timestamp == latest_locations.c.latest_time + query = ( + select(VehicleLocation, Vehicle) + .join( + latest_locations, + and_( + VehicleLocation.vehicle_id == latest_locations.c.vehicle_id, + VehicleLocation.timestamp == latest_locations.c.latest_time, + ), ) - ).join( - Vehicle, VehicleLocation.vehicle_id == Vehicle.id - ).all() + .join(Vehicle, VehicleLocation.vehicle_id == Vehicle.id) + ) + + result = await db.execute(query) + results = result.all() # Get current driver assignments for all vehicles in results vehicle_ids = [loc.vehicle_id for loc, _ in results] current_assignments = {} if vehicle_ids: - assignments = DriverVehicleAssignment.query.filter( + assignments_query = select(DriverVehicleAssignment).where( DriverVehicleAssignment.vehicle_id.in_(vehicle_ids), - DriverVehicleAssignment.assignment_end.is_(None) - ).all() + DriverVehicleAssignment.assignment_end.is_(None), + ) + assignments_result = await db.execute(assignments_query) + assignments = assignments_result.scalars().all() for assignment in assignments: current_assignments[assignment.vehicle_id] = assignment @@ -102,158 +133,202 @@ def get_locations(): assignment = current_assignments.get(loc.vehicle_id) if assignment and assignment.driver: driver_info = { - 'id': assignment.driver.id, - 'name': assignment.driver.name, + "id": assignment.driver.id, + "name": assignment.driver.name, } response[loc.vehicle_id] = { - 'name': loc.name, - 'latitude': loc.latitude, - 'longitude': loc.longitude, - 'timestamp': loc.timestamp.isoformat(), - 'heading_degrees': loc.heading_degrees, - 'speed_mph': loc.speed_mph, - 'route_name': route_name, - 'polyline_index': polyline_index, - 'is_ecu_speed': loc.is_ecu_speed, - 'formatted_location': loc.formatted_location, - 'address_id': loc.address_id, - 'address_name': loc.address_name, - 'license_plate': vehicle.license_plate, - 'vin': vehicle.vin, - 'asset_type': vehicle.asset_type, - 'gateway_model': vehicle.gateway_model, - 'gateway_serial': vehicle.gateway_serial, - 'driver': driver_info, + "name": loc.name, + "latitude": loc.latitude, + "longitude": loc.longitude, + "timestamp": loc.timestamp.isoformat(), + "heading_degrees": loc.heading_degrees, + "speed_mph": loc.speed_mph, + "route_name": route_name, + "polyline_index": polyline_index, + "is_ecu_speed": loc.is_ecu_speed, + "formatted_location": loc.formatted_location, + "address_id": loc.address_id, + "address_name": loc.address_name, + "license_plate": vehicle.license_plate, + "vin": vehicle.vin, + "asset_type": vehicle.asset_type, + "gateway_model": vehicle.gateway_model, + "gateway_serial": vehicle.gateway_serial, + "driver": driver_info, } - return jsonify(response) + return response -@bp.route('/api/webhook', methods=['POST']) -def webhook(): - if secret := current_app.config['SAMSARA_SECRET']: - # See https://developers.samsara.com/docs/webhooks#webhook-signatures + +@router.post("/api/webhook") +async def webhook(request: Request, db: AsyncSession = Depends(get_db)): + """ + Handles incoming webhook events for geofence entries/exits. + Expects JSON payload with event details. + """ + # Verify webhook signature if secret is configured + if secret := settings.SAMSARA_SECRET: try: - timestamp = request.headers['X-Samsara-Timestamp'] - signature = request.headers['X-Samsara-Signature'] + timestamp = request.headers["X-Samsara-Timestamp"] + signature = request.headers["X-Samsara-Signature"] - prefix = 'v1:{0}:'.format(timestamp) - message = bytes(prefix, 'utf-8') + request.data + # Read request body + body = await request.body() + + prefix = f"v1:{timestamp}:" + message = bytes(prefix, "utf-8") + body h = hmac.new(secret, message, sha256) - expected_signature = 'v1=' + h.hexdigest() + expected_signature = "v1=" + h.hexdigest() if expected_signature != signature: - return jsonify({'status': 'error', 'message': 'Failed to authenticate request.'}), 401 + return JSONResponse( + {"status": "error", "message": "Failed to authenticate request."}, + status_code=401, + ) except KeyError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return JSONResponse({"status": "error", "message": str(e)}, status_code=400) - """ - Handles incoming webhook events for geofence entries/exits. - Expects JSON payload with event details. - """ - data = request.get_json(force=True) + # Parse JSON payload + try: + data = await request.json() + except Exception: + logger.error(f"Invalid JSON received") + return JSONResponse( + {"status": "error", "message": "Invalid JSON"}, status_code=400 + ) if not data: - logger.error(f'Invalid JSON received: {request.data}') - return jsonify({'status': 'error', 'message': 'Invalid JSON'}), 400 + return JSONResponse( + {"status": "error", "message": "Empty payload"}, status_code=400 + ) try: - # parse top-level event details - event_id = data.get('eventId') - event_time = datetime.fromisoformat(data.get('eventTime').replace("Z", "+00:00")) - event_data = data.get('data', {}) + # Parse top-level event details + event_id = data.get("eventId") + event_time = datetime.fromisoformat( + data.get("eventTime").replace("Z", "+00:00") + ) + event_data = data.get("data", {}) - # parse condition details - conditions = event_data.get('conditions', []) + # Parse condition details + conditions = event_data.get("conditions", []) if not conditions: - logger.error(f'No conditions found in webhook data: {data}') - return jsonify({'status': 'error', 'message': 'Missing conditions'}), 400 + logger.error(f"No conditions found in webhook data: {data}") + return JSONResponse( + {"status": "error", "message": "Missing conditions"}, status_code=400 + ) for condition in conditions: - details = condition.get('details', {}) - # determine if entry or exit - if 'geofenceEntry' in details: - geofence_event = details.get('geofenceEntry', {}) + details = condition.get("details", {}) + # Determine if entry or exit + if "geofenceEntry" in details: + geofence_event = details.get("geofenceEntry", {}) else: - geofence_event = details.get('geofenceExit', {}) + geofence_event = details.get("geofenceExit", {}) - vehicle_data = geofence_event.get('vehicle') + vehicle_data = geofence_event.get("vehicle") if not vehicle_data: - continue # skip conditions with no vehicle + continue # Skip conditions with no vehicle - address = geofence_event.get('address', {}) - geofence = address.get('geofence', {}) - polygon = geofence.get('polygon', {}) - vertices = polygon.get('vertices', []) - latitude = vertices[0].get('latitude') if vertices else None - longitude = vertices[0].get('longitude') if vertices else None + address = geofence_event.get("address", {}) + geofence = address.get("geofence", {}) + polygon = geofence.get("polygon", {}) + vertices = polygon.get("vertices", []) + latitude = vertices[0].get("latitude") if vertices else None + longitude = vertices[0].get("longitude") if vertices else None - # extract vehicle info - vehicle_id = vehicle_data.get('id') - vehicle_name = vehicle_data.get('name') + # Extract vehicle info + vehicle_id = vehicle_data.get("id") + vehicle_name = vehicle_data.get("name") if not (vehicle_id and vehicle_name): - continue # skip invalid entries + continue # Skip invalid entries + + # Find or create vehicle + vehicle_query = select(Vehicle).where(Vehicle.id == vehicle_id) + result = await db.execute(vehicle_query) + vehicle = result.scalar_one_or_none() - # find or create vehicle - vehicle = Vehicle.query.get(vehicle_id) if not vehicle: vehicle = Vehicle( id=vehicle_id, name=vehicle_name, - asset_type=vehicle_data.get('assetType', 'vehicle'), - license_plate=vehicle_data.get('licensePlate'), - vin=vehicle_data.get('vin'), - maintenance_id=vehicle_data.get('externalIds', {}).get('maintenanceId'), - gateway_model=vehicle_data.get('gateway', {}).get('model'), - gateway_serial=vehicle_data.get('gateway', {}).get('serial'), - ) - db.session.add(vehicle) - - db.session.execute( - postgresql.insert(GeofenceEvent).on_conflict_do_nothing().values( - id=event_id, - vehicle_id=vehicle_id, - event_type='geofenceEntry' if 'geofenceEntry' in details else 'geofenceExit', - event_time=event_time, - address_name=address.get("name"), - address_formatted=address.get("formattedAddress"), - latitude=latitude, - longitude=longitude, + asset_type=vehicle_data.get("assetType", "vehicle"), + license_plate=vehicle_data.get("licensePlate"), + vin=vehicle_data.get("vin"), + maintenance_id=vehicle_data.get("externalIds", {}).get( + "maintenanceId" + ), + gateway_model=vehicle_data.get("gateway", {}).get("model"), + gateway_serial=vehicle_data.get("gateway", {}).get("serial"), ) + db.add(vehicle) + + # Insert geofence event (using PostgreSQL upsert) + insert_stmt = postgresql.insert(GeofenceEvent).values( + id=event_id, + vehicle_id=vehicle_id, + event_type=( + "geofenceEntry" if "geofenceEntry" in details else "geofenceExit" + ), + event_time=event_time, + address_name=address.get("name"), + address_formatted=address.get("formattedAddress"), + latitude=latitude, + longitude=longitude, ) + insert_stmt = insert_stmt.on_conflict_do_nothing() + await db.execute(insert_stmt) + + await db.commit() - db.session.commit() - - # Invalidate Cache - cache.delete('vehicles_in_geofence') - - return jsonify({'status': 'success'}), 200 + # Note: Cache invalidation for fastapi-cache2 would need to be implemented differently + # For now, we rely on TTL expiration + + return {"status": "success"} except Exception as e: - db.session.rollback() + await db.rollback() + logger.exception(f"Error processing webhook data: {e}") + return JSONResponse({"status": "error", "message": str(e)}, status_code=500) - logger.exception(f'Error processing webhook data: {e}') - return jsonify({'status': 'error', 'message': str(e)}), 500 -@bp.route('/api/today', methods=['GET']) -def data_today(): +@router.get("/api/today") +async def data_today(db: AsyncSession = Depends(get_db)): + """Get all location data and geofence events for today.""" now = datetime.now(timezone.utc) start_of_day = get_campus_start_of_day() - locations_today = VehicleLocation.query.filter( - and_( - VehicleLocation.timestamp >= start_of_day, - VehicleLocation.timestamp <= now - ) - ).order_by(VehicleLocation.timestamp.asc()).all() - events_today = db.session.query(GeofenceEvent).filter( - and_( - GeofenceEvent.event_time >= start_of_day, - GeofenceEvent.event_time <= now + # Query locations today + locations_query = ( + select(VehicleLocation) + .where( + and_( + VehicleLocation.timestamp >= start_of_day, + VehicleLocation.timestamp <= now, + ) + ) + .order_by(VehicleLocation.timestamp.asc()) + ) + locations_result = await db.execute(locations_query) + locations_today = locations_result.scalars().all() + + # Query events today + events_query = ( + select(GeofenceEvent) + .where( + and_( + GeofenceEvent.event_time >= start_of_day, + GeofenceEvent.event_time <= now, + ) ) - ).order_by(GeofenceEvent.event_time.asc()).all() + .order_by(GeofenceEvent.event_time.asc()) + ) + events_result = await db.execute(events_query) + events_today = events_result.scalars().all() + # Build response dict locations_today_dict = {} for location in locations_today: vehicle_location = { @@ -262,7 +337,7 @@ def data_today(): "timestamp": location.timestamp, "speed_mph": location.speed_mph, "heading_degrees": location.heading_degrees, - "address_id": location.address_id + "address_id": location.address_id, } if location.vehicle_id in locations_today_dict: locations_today_dict[location.vehicle_id]["data"].append(vehicle_location) @@ -270,68 +345,80 @@ def data_today(): locations_today_dict[location.vehicle_id] = { "entry": None, "exit": None, - "data": [vehicle_location] + "data": [vehicle_location], } - for e, geofence_event in enumerate(events_today): + + for geofence_event in events_today: if geofence_event.event_type == "geofenceEntry": - if "entry" not in locations_today_dict[geofence_event.vehicle_id]: # first entry - locations_today_dict[geofence_event.vehicle_id]["entry"] = geofence_event.event_time + if ( + "entry" not in locations_today_dict[geofence_event.vehicle_id] + ): # First entry + locations_today_dict[geofence_event.vehicle_id]["entry"] = ( + geofence_event.event_time + ) elif geofence_event.event_type == "geofenceExit": - if "entry" in locations_today_dict[geofence_event.vehicle_id]: # makes sure that the vehicle already entered - locations_today_dict[geofence_event.vehicle_id]["exit"] = geofence_event.event_time + if ( + "entry" in locations_today_dict[geofence_event.vehicle_id] + ): # Makes sure that the vehicle already entered + locations_today_dict[geofence_event.vehicle_id]["exit"] = ( + geofence_event.event_time + ) + + return locations_today_dict - return jsonify(locations_today_dict) -@bp.route('/api/routes', methods=['GET']) -def get_shuttle_routes(): +@router.get("/api/routes") +async def get_shuttle_routes(): + """Serve routes.json file.""" root_dir = Path(__file__).parent.parent - return send_from_directory(root_dir / 'data', 'routes.json') + routes_file = root_dir / "data" / "routes.json" + if routes_file.exists(): + return FileResponse(routes_file) + raise HTTPException(status_code=404, detail="Routes file not found") -@bp.route('/api/schedule', methods=['GET']) -def get_shuttle_schedule(): + +@router.get("/api/schedule") +async def get_shuttle_schedule(): + """Serve schedule.json file.""" root_dir = Path(__file__).parent.parent - return send_from_directory(root_dir / 'data', 'schedule.json') + schedule_file = root_dir / "data" / "schedule.json" + if schedule_file.exists(): + return FileResponse(schedule_file) + raise HTTPException(status_code=404, detail="Schedule file not found") + -@bp.route('/api/aggregated-schedule', methods=['GET']) -def get_aggregated_shuttle_schedule(): +@router.get("/api/aggregated-schedule") +async def get_aggregated_shuttle_schedule(): + """Serve aggregated_schedule.json file.""" root_dir = Path(__file__).parent.parent - return send_from_directory(root_dir / 'data', 'aggregated_schedule.json') + aggregated_file = root_dir / "data" / "aggregated_schedule.json" + if aggregated_file.exists(): + return FileResponse(aggregated_file) + raise HTTPException(status_code=404, detail="Aggregated schedule file not found") + -@bp.route('/api/matched-schedules', methods=['GET']) -def get_matched_shuttle_schedules(): +@router.get("/api/matched-schedules") +@cache(expire=3600) +async def get_matched_shuttle_schedules(force_recompute: bool = False): """ Return cached matched schedules unless force_recompute=true, in which case recompute and update the cache. """ - try: - # Parse URL argument, default False - force_recompute = request.args.get("force_recompute", "false").lower() == "true" - - # If not forcing recompute, try cache first - if not force_recompute: - cached = cache.get("schedule_entries") - if cached is not None: - return jsonify({ - "status": "success", - "matchedSchedules": cached, - "source": "cache" - }), 200 - - # Otherwise compute fresh and overwrite cache - matched = Schedule.match_shuttles_to_schedules() - cache.set("schedule_entries", matched, timeout=3600) - - return jsonify({ + # Note: With fastapi-cache2, the @cache decorator handles caching automatically + # The force_recompute parameter would need custom cache invalidation logic + # For now, we compute fresh data if requested + + matched = {} # Schedule.match_shuttles_to_schedules() + + return { "status": "success", "matchedSchedules": matched, - "source": "recomputed" if force_recompute else "fallback_computed" - }), 200 + "source": "recomputed" if force_recompute else "computed", + } except Exception as e: logger.exception(f"Error in matched schedule endpoint: {e}") - return jsonify({ - "status": "error", - "message": str(e) - }), 500 - + return JSONResponse( + {"status": "error", "message": str(e)}, status_code=500 + ) diff --git a/server/time_utils.py b/server/time_utils.py index 126706fb..4459934d 100644 --- a/server/time_utils.py +++ b/server/time_utils.py @@ -1,10 +1,18 @@ +"""Time utility functions for timezone handling.""" from datetime import datetime, timezone -from flask import current_app +from .config import settings def get_campus_start_of_day(): - now = datetime.now(current_app.config['CAMPUS_TZ']) + """ + Get the start of the current day in campus timezone (America/New_York), + converted to UTC. + + Returns: + datetime: Midnight in campus timezone, converted to UTC + """ + now = datetime.now(settings.CAMPUS_TZ) midnight = now.replace(hour=0, minute=0, second=0, microsecond=0) return midnight.astimezone(timezone.utc) diff --git a/server/worker.py b/server/worker.py index fa108d2c..70bb86fc 100644 --- a/server/worker.py +++ b/server/worker.py @@ -1,299 +1,369 @@ -from . import create_app, db, cache, Config +"""Async background worker for fetching vehicle data from Samsara API.""" +import asyncio +import logging +import os +from datetime import datetime, timezone + +import httpx +from sqlalchemy import func, and_, select +from fastapi_cache import FastAPICache +from fastapi_cache.backends.redis import RedisBackend +from redis import asyncio as aioredis + +from .config import settings +from .database import create_async_db_engine, create_session_factory from .time_utils import get_campus_start_of_day from .models import VehicleLocation, GeofenceEvent, Driver, DriverVehicleAssignment -from sqlalchemy import func, and_ -import time -import requests -import os -import logging -from datetime import datetime, date, timedelta, timezone -from data.schedules import Schedule -import math # Logging config -numeric_level = logging._nameToLevel.get(Config.LOG_LEVEL.upper(), logging.INFO) +numeric_level = logging._nameToLevel.get(settings.LOG_LEVEL.upper(), logging.INFO) logging.basicConfig( - level=numeric_level + level=numeric_level, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", ) logger = logging.getLogger(__name__) -app = create_app() -@cache.cached(timeout=300, key_prefix="vehicles_in_geofence") -def get_vehicles_in_geofence(): +async def get_vehicles_in_geofence(session_factory): """ Returns a set of vehicle_ids where the latest geofence event from today is a geofenceEntry. """ - start_of_today = get_campus_start_of_day() - - # Filter to today's events first - today_events = db.session.query(GeofenceEvent).filter( - GeofenceEvent.event_time >= start_of_today - ).subquery() - - # Subquery to get latest event per vehicle from today's events - subquery = db.session.query( - today_events.c.vehicle_id, - func.max(today_events.c.event_time).label('latest_time') - ).group_by(today_events.c.vehicle_id).subquery() - - # Join back to get the latest event row - latest_entries = db.session.query(today_events.c.vehicle_id).join( - subquery, - and_( - today_events.c.vehicle_id == subquery.c.vehicle_id, - today_events.c.event_time == subquery.c.latest_time + async with session_factory() as session: + start_of_today = get_campus_start_of_day() + + # Subquery to get latest event per vehicle from today's events + subquery = ( + select( + GeofenceEvent.vehicle_id, + func.max(GeofenceEvent.event_time).label("latest_time"), + ) + .where(GeofenceEvent.event_time >= start_of_today) + .group_by(GeofenceEvent.vehicle_id) + .subquery() ) - ).filter(today_events.c.event_type == 'geofenceEntry').all() - return {row.vehicle_id for row in latest_entries} + # Join back to get the latest event row where type is entry + query = ( + select(GeofenceEvent.vehicle_id) + .join( + subquery, + and_( + GeofenceEvent.vehicle_id == subquery.c.vehicle_id, + GeofenceEvent.event_time == subquery.c.latest_time, + ), + ) + .where(GeofenceEvent.event_type == "geofenceEntry") + ) + + result = await session.execute(query) + rows = result.all() + return {row.vehicle_id for row in rows} + -def update_locations(app): +async def update_locations(session_factory): """ Fetches and updates vehicle locations for vehicles currently in the geofence. Uses pagination token to fetch subsequent pages. """ # Get the current list of vehicles in the geofence - current_vehicle_ids = get_vehicles_in_geofence() + current_vehicle_ids = await get_vehicles_in_geofence(session_factory) # No vehicles to update if not current_vehicle_ids: - logger.info('No vehicles in geofence to update') + logger.info("No vehicles in geofence to update") return - headers = {'Accept': 'application/json'} + headers = {"Accept": "application/json"} # Determine API URL based on environment - if app.config['ENV'] == 'development': - url = 'http://localhost:4000/fleet/vehicles/stats' + if settings.ENV == "development": + url = "http://localhost:4000/fleet/vehicles/stats" else: - api_key = os.environ.get('API_KEY') + api_key = os.environ.get("API_KEY") if not api_key: - logger.error('API_KEY not set') + logger.error("API_KEY not set") return - headers['Authorization'] = f'Bearer {api_key}' - url = 'https://api.samsara.com/fleet/vehicles/stats' + headers["Authorization"] = f"Bearer {api_key}" + url = "https://api.samsara.com/fleet/vehicles/stats" url_params = { - 'vehicleIds': ','.join(current_vehicle_ids), - 'types': 'gps', + "vehicleIds": ",".join(current_vehicle_ids), + "types": "gps", } try: - has_next_page = True - after_token = None - new_records_added = 0 - - while has_next_page: - # Add pagination token if present - if after_token: - url_params['after'] = after_token - has_next_page = False - # Make the API request - response = requests.get(url, headers=headers, params=url_params) - # Handle non-200 responses - if response.status_code != 200: - logger.error(f'API error: {response.status_code} {response.text}') - return - - data = response.json() - # Handle pagination - pagination = data.get('pagination', {}) - if pagination.get('hasNextPage'): - has_next_page = True - after_token = pagination.get('endCursor', after_token) - - for vehicle in data.get('data', []): - # Process each vehicle's GPS data - vehicle_id = vehicle.get('id') - vehicle_name = vehicle.get('name') - gps = vehicle.get('gps') - - if not vehicle_id or not gps: - continue - - timestamp_str = gps.get('time') - if not timestamp_str: - continue - # Convert ISO 8601 string to datetime - timestamp = datetime.fromisoformat(timestamp_str.replace("Z", "+00:00")) - - exists = VehicleLocation.query.filter_by( - vehicle_id=vehicle_id, - timestamp=timestamp - ).first() - if exists: - continue # Skip if record already exists - - # Create and add new VehicleLocation - loc = VehicleLocation( - vehicle_id=vehicle_id, - timestamp=timestamp, - name=vehicle_name, - latitude=gps.get('latitude'), - longitude=gps.get('longitude'), - heading_degrees=gps.get('headingDegrees'), - speed_mph=gps.get('speedMilesPerHour'), - is_ecu_speed=gps.get('isEcuSpeed', False), - formatted_location=gps.get('reverseGeo', {}).get('formattedLocation'), - address_id=gps.get('address', {}).get('id'), - address_name=gps.get('address', {}).get('name'), - ) - db.session.add(loc) - new_records_added += 1 - - # Only commit and invalidate cache if we actually added new records - if new_records_added > 0: - db.session.commit() - cache.delete('vehicle_locations') - cache.delete("schedule_entries") - logger.info(f'Updated locations for {len(current_vehicle_ids)} vehicles - {new_records_added} new records') - else: - logger.info(f'No new location data for {len(current_vehicle_ids)} vehicles') - - except requests.RequestException as e: - logger.error(f'Failed to fetch locations: {e}') - - return - - -def update_driver_assignments(app, vehicle_ids): + async with httpx.AsyncClient(timeout=30.0) as client: + has_next_page = True + after_token = None + new_records_added = 0 + + while has_next_page: + # Add pagination token if present + if after_token: + url_params["after"] = after_token + has_next_page = False + + # Make the API request + response = await client.get(url, headers=headers, params=url_params) + + # Handle non-200 responses + if response.status_code != 200: + logger.error(f"API error: {response.status_code} {response.text}") + return + + data = response.json() + + # Handle pagination + pagination = data.get("pagination", {}) + if pagination.get("hasNextPage"): + has_next_page = True + after_token = pagination.get("endCursor", after_token) + + async with session_factory() as session: + for vehicle in data.get("data", []): + # Process each vehicle's GPS data + vehicle_id = vehicle.get("id") + vehicle_name = vehicle.get("name") + gps = vehicle.get("gps") + + if not vehicle_id or not gps: + continue + + timestamp_str = gps.get("time") + if not timestamp_str: + continue + + # Convert ISO 8601 string to datetime + timestamp = datetime.fromisoformat( + timestamp_str.replace("Z", "+00:00") + ) + + # Check if record already exists + exists_query = select(VehicleLocation).where( + VehicleLocation.vehicle_id == vehicle_id, + VehicleLocation.timestamp == timestamp, + ) + result = await session.execute(exists_query) + exists = result.scalar_one_or_none() + + if exists: + continue # Skip if record already exists + + # Create and add new VehicleLocation + loc = VehicleLocation( + vehicle_id=vehicle_id, + timestamp=timestamp, + name=vehicle_name, + latitude=gps.get("latitude"), + longitude=gps.get("longitude"), + heading_degrees=gps.get("headingDegrees"), + speed_mph=gps.get("speedMilesPerHour"), + is_ecu_speed=gps.get("isEcuSpeed", False), + formatted_location=gps.get("reverseGeo", {}).get( + "formattedLocation" + ), + address_id=gps.get("address", {}).get("id"), + address_name=gps.get("address", {}).get("name"), + ) + session.add(loc) + new_records_added += 1 + + # Only commit if we actually added new records + if new_records_added > 0: + await session.commit() + logger.info( + f"Updated locations for {len(current_vehicle_ids)} vehicles - {new_records_added} new records" + ) + else: + logger.info( + f"No new location data for {len(current_vehicle_ids)} vehicles" + ) + + except httpx.HTTPError as e: + logger.error(f"Failed to fetch locations: {e}") + except Exception as e: + logger.exception(f"Unexpected error in update_locations: {e}") + + +async def update_driver_assignments(session_factory, vehicle_ids): """ Fetches and updates driver-vehicle assignments for vehicles currently in the geofence. Creates/updates driver records and tracks assignment changes. """ if not vehicle_ids: - logger.info('No vehicles to fetch driver assignments for') + logger.info("No vehicles to fetch driver assignments for") return - headers = {'Accept': 'application/json'} + headers = {"Accept": "application/json"} # Determine API URL based on environment - if app.config['ENV'] == 'development': - url = 'http://localhost:4000/fleet/driver-vehicle-assignments' + if settings.ENV == "development": + url = "http://localhost:4000/fleet/driver-vehicle-assignments" else: - api_key = os.environ.get('API_KEY') + api_key = os.environ.get("API_KEY") if not api_key: - logger.error('API_KEY not set for driver assignments') + logger.error("API_KEY not set for driver assignments") return - headers['Authorization'] = f'Bearer {api_key}' - url = 'https://api.samsara.com/fleet/driver-vehicle-assignments' + headers["Authorization"] = f"Bearer {api_key}" + url = "https://api.samsara.com/fleet/driver-vehicle-assignments" url_params = { - 'filterBy': 'vehicles', - 'vehicleIds': ','.join(vehicle_ids), + "filterBy": "vehicles", + "vehicleIds": ",".join(vehicle_ids), } try: - has_next_page = True - after_token = None - assignments_updated = 0 - - while has_next_page: - if after_token: - url_params['after'] = after_token - has_next_page = False - - response = requests.get(url, headers=headers, params=url_params) - if response.status_code != 200: - logger.error(f'Driver assignments API error: {response.status_code} {response.text}') - return - - data = response.json() - logger.info(f'Driver assignments API response: {len(data.get("data", []))} assignments returned') - pagination = data.get('pagination', {}) - if pagination.get('hasNextPage'): - has_next_page = True - after_token = pagination.get('endCursor', after_token) - - now = datetime.now(timezone.utc) - - for assignment in data.get('data', []): - driver_data = assignment.get('driver') - vehicle_data = assignment.get('vehicle') - - if not driver_data or not vehicle_data: - continue - - driver_id = driver_data.get('id') - driver_name = driver_data.get('name') - vehicle_id = vehicle_data.get('id') - assigned_at_str = assignment.get('assignedAtTime') - - if not driver_id or not vehicle_id: - continue - - # Parse assignment time - if assigned_at_str: - assigned_at = datetime.fromisoformat(assigned_at_str.replace("Z", "+00:00")) - else: - assigned_at = now - - # Create or update driver - driver = db.session.get(Driver, driver_id) - if not driver: - driver = Driver(id=driver_id, name=driver_name) - db.session.add(driver) - logger.info(f'Created new driver: {driver_name} ({driver_id})') - elif driver.name != driver_name: - driver.name = driver_name - - # Check if there's an existing open assignment for this vehicle - existing = DriverVehicleAssignment.query.filter_by( - vehicle_id=vehicle_id, - assignment_end=None - ).first() - - if existing: - # If same driver, no change needed - if existing.driver_id == driver_id: - continue - # Different driver - close the old assignment - existing.assignment_end = now - logger.info(f'Closed assignment for driver {existing.driver_id} on vehicle {vehicle_id}') - - # Create new assignment - new_assignment = DriverVehicleAssignment( - driver_id=driver_id, - vehicle_id=vehicle_id, - assignment_start=assigned_at, + async with httpx.AsyncClient(timeout=30.0) as client: + has_next_page = True + after_token = None + assignments_updated = 0 + + while has_next_page: + if after_token: + url_params["after"] = after_token + has_next_page = False + + response = await client.get(url, headers=headers, params=url_params) + if response.status_code != 200: + logger.error( + f"Driver assignments API error: {response.status_code} {response.text}" + ) + return + + data = response.json() + logger.info( + f'Driver assignments API response: {len(data.get("data", []))} assignments returned' ) - db.session.add(new_assignment) - assignments_updated += 1 - - if assignments_updated > 0: - db.session.commit() - logger.info(f'Updated {assignments_updated} driver assignments') - else: - logger.info('No driver assignment changes detected') - - except requests.RequestException as e: - logger.error(f'Failed to fetch driver assignments: {e}') - -def run_worker(): - logger.info('Worker started...') + pagination = data.get("pagination", {}) + if pagination.get("hasNextPage"): + has_next_page = True + after_token = pagination.get("endCursor", after_token) + + now = datetime.now(timezone.utc) + + async with session_factory() as session: + for assignment in data.get("data", []): + driver_data = assignment.get("driver") + vehicle_data = assignment.get("vehicle") + + if not driver_data or not vehicle_data: + continue + + driver_id = driver_data.get("id") + driver_name = driver_data.get("name") + vehicle_id = vehicle_data.get("id") + assigned_at_str = assignment.get("assignedAtTime") + + if not driver_id or not vehicle_id: + continue + + # Parse assignment time + if assigned_at_str: + assigned_at = datetime.fromisoformat( + assigned_at_str.replace("Z", "+00:00") + ) + else: + assigned_at = now + + # Create or update driver + driver_query = select(Driver).where(Driver.id == driver_id) + result = await session.execute(driver_query) + driver = result.scalar_one_or_none() + + if not driver: + driver = Driver(id=driver_id, name=driver_name) + session.add(driver) + logger.info(f"Created new driver: {driver_name} ({driver_id})") + elif driver.name != driver_name: + driver.name = driver_name + + # Check if there's an existing open assignment for this vehicle + existing_query = select(DriverVehicleAssignment).where( + DriverVehicleAssignment.vehicle_id == vehicle_id, + DriverVehicleAssignment.assignment_end.is_(None), + ) + result = await session.execute(existing_query) + existing = result.scalar_one_or_none() + + if existing: + # If same driver, no change needed + if existing.driver_id == driver_id: + continue + # Different driver - close the old assignment + existing.assignment_end = now + logger.info( + f"Closed assignment for driver {existing.driver_id} on vehicle {vehicle_id}" + ) + + # Create new assignment + new_assignment = DriverVehicleAssignment( + driver_id=driver_id, + vehicle_id=vehicle_id, + assignment_start=assigned_at, + ) + session.add(new_assignment) + assignments_updated += 1 + + if assignments_updated > 0: + await session.commit() + logger.info(f"Updated {assignments_updated} driver assignments") + else: + logger.info("No driver assignment changes detected") + + except httpx.HTTPError as e: + logger.error(f"Failed to fetch driver assignments: {e}") + except Exception as e: + logger.exception(f"Unexpected error in update_driver_assignments: {e}") + + +async def run_worker(): + """Main worker loop that runs continuously.""" + logger.info("Async worker started...") + + # Initialize database engine and session factory + db_engine = create_async_db_engine(settings.DATABASE_URL, echo=settings.DEBUG) + session_factory = create_session_factory(db_engine) + logger.info("Database engine and session factory initialized") + + # Initialize Redis cache for FastAPI cache + try: + redis = await aioredis.from_url( + settings.REDIS_URL, + encoding="utf-8", + decode_responses=True, + ) + FastAPICache.init(RedisBackend(redis), prefix="fastapi-cache") + logger.info("Redis cache initialized") + except Exception as e: + logger.error(f"Failed to initialize Redis cache: {e}") + # Continue without cache - while True: - try: - with app.app_context(): + try: + while True: + try: # Get current vehicles in geofence before updating - current_vehicle_ids = get_vehicles_in_geofence() + current_vehicle_ids = await get_vehicles_in_geofence(session_factory) - update_locations(app) + # Update locations + await update_locations(session_factory) # Update driver assignments for vehicles in geofence if current_vehicle_ids: - update_driver_assignments(app, current_vehicle_ids) + await update_driver_assignments(session_factory, current_vehicle_ids) + + # Note: Schedule matching is now handled by the API endpoint with caching + # The @cache decorator on the endpoint handles it automatically + + except Exception as e: + logger.exception(f"Error in worker loop: {e}") - # Recompute matched schedules if data has changed - if cache.get("schedule_entries") is None: - matched = Schedule.match_shuttles_to_schedules() - cache.set("schedule_entries", matched, timeout=3600) - - except Exception as e: - logger.exception(f'Error in worker loop: {e}') + await asyncio.sleep(5) + finally: + # Cleanup on shutdown + logger.info("Shutting down worker...") + await db_engine.dispose() + logger.info("Database connections closed") - time.sleep(5) -if __name__ == '__main__': - run_worker() +if __name__ == "__main__": + asyncio.run(run_worker()) diff --git a/shubble.py b/shubble.py index a0c1107b..fff0178e 100644 --- a/shubble.py +++ b/shubble.py @@ -1,7 +1,9 @@ -from server import create_app +"""ASGI application entry point for FastAPI.""" +from server import app -app = create_app() +# Export for uvicorn: uvicorn shubble:app +__all__ = ["app"] -if __name__ == '__main__': - # Run Flask app - app.run() +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) From 4f975bb7bff7f1920aec6f51fb8072e5941008b8 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 10:12:08 -0500 Subject: [PATCH 12/29] make initial migration --- alembic/versions/4f42c8d834fa_initial.py | 99 ++++++++++++++++++++++++ 1 file changed, 99 insertions(+) create mode 100644 alembic/versions/4f42c8d834fa_initial.py diff --git a/alembic/versions/4f42c8d834fa_initial.py b/alembic/versions/4f42c8d834fa_initial.py new file mode 100644 index 00000000..667fd0cd --- /dev/null +++ b/alembic/versions/4f42c8d834fa_initial.py @@ -0,0 +1,99 @@ +"""initial + +Revision ID: 4f42c8d834fa +Revises: +Create Date: 2025-12-24 23:11:50.160351 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '4f42c8d834fa' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('drivers', + sa.Column('id', sa.String(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('vehicles', + sa.Column('id', sa.String(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('asset_type', sa.String(), nullable=False), + sa.Column('license_plate', sa.String(), nullable=True), + sa.Column('vin', sa.String(), nullable=True), + sa.Column('maintenance_id', sa.String(), nullable=True), + sa.Column('gateway_model', sa.String(), nullable=True), + sa.Column('gateway_serial', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('driver_vehicle_assignments', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('driver_id', sa.String(), nullable=False), + sa.Column('vehicle_id', sa.String(), nullable=False), + sa.Column('assignment_start', sa.DateTime(timezone=True), nullable=False), + sa.Column('assignment_end', sa.DateTime(timezone=True), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(['driver_id'], ['drivers.id'], ), + sa.ForeignKeyConstraint(['vehicle_id'], ['vehicles.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_driver_vehicle_assignments_driver_id'), 'driver_vehicle_assignments', ['driver_id'], unique=False) + op.create_index(op.f('ix_driver_vehicle_assignments_vehicle_id'), 'driver_vehicle_assignments', ['vehicle_id'], unique=False) + op.create_table('geofence_events', + sa.Column('id', sa.String(), nullable=False), + sa.Column('vehicle_id', sa.String(), nullable=False), + sa.Column('event_type', sa.String(), nullable=False), + sa.Column('event_time', sa.DateTime(timezone=True), nullable=False), + sa.Column('address_name', sa.String(), nullable=True), + sa.Column('address_formatted', sa.String(), nullable=True), + sa.Column('latitude', sa.Float(), nullable=True), + sa.Column('longitude', sa.Float(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(['vehicle_id'], ['vehicles.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('vehicle_locations', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('vehicle_id', sa.String(), nullable=False), + sa.Column('name', sa.String(), nullable=True), + sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False), + sa.Column('latitude', sa.Float(), nullable=False), + sa.Column('longitude', sa.Float(), nullable=False), + sa.Column('heading_degrees', sa.Float(), nullable=True), + sa.Column('speed_mph', sa.Float(), nullable=True), + sa.Column('is_ecu_speed', sa.Boolean(), nullable=False), + sa.Column('formatted_location', sa.String(), nullable=True), + sa.Column('address_id', sa.String(), nullable=True), + sa.Column('address_name', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(['vehicle_id'], ['vehicles.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_vehicle_locations_vehicle_id'), 'vehicle_locations', ['vehicle_id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_vehicle_locations_vehicle_id'), table_name='vehicle_locations') + op.drop_table('vehicle_locations') + op.drop_table('geofence_events') + op.drop_index(op.f('ix_driver_vehicle_assignments_vehicle_id'), table_name='driver_vehicle_assignments') + op.drop_index(op.f('ix_driver_vehicle_assignments_driver_id'), table_name='driver_vehicle_assignments') + op.drop_table('driver_vehicle_assignments') + op.drop_table('vehicles') + op.drop_table('drivers') + # ### end Alembic commands ### From d48d2c518f3c758f45a8d87d5091de0fcc66584e Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 10:12:43 -0500 Subject: [PATCH 13/29] remove old migrations folder --- migrations/.gitignore | 1 - migrations/README | 1 - migrations/alembic.ini | 50 -------- migrations/env.py | 113 ------------------ migrations/script.py.mako | 24 ---- migrations/versions/.gitignore | 1 - .../23c5d550a4a5_add_vehiclelocation.py | 50 -------- ...6d56f300_add_driver_and_driver_vehicle_.py | 53 -------- .../d166ecfdae66_initial_migration.py | 52 -------- 9 files changed, 345 deletions(-) delete mode 100644 migrations/.gitignore delete mode 100644 migrations/README delete mode 100644 migrations/alembic.ini delete mode 100644 migrations/env.py delete mode 100644 migrations/script.py.mako delete mode 100644 migrations/versions/.gitignore delete mode 100644 migrations/versions/23c5d550a4a5_add_vehiclelocation.py delete mode 100644 migrations/versions/9f0c6d56f300_add_driver_and_driver_vehicle_.py delete mode 100644 migrations/versions/d166ecfdae66_initial_migration.py diff --git a/migrations/.gitignore b/migrations/.gitignore deleted file mode 100644 index c18dd8d8..00000000 --- a/migrations/.gitignore +++ /dev/null @@ -1 +0,0 @@ -__pycache__/ diff --git a/migrations/README b/migrations/README deleted file mode 100644 index 0e048441..00000000 --- a/migrations/README +++ /dev/null @@ -1 +0,0 @@ -Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini deleted file mode 100644 index ec9d45c2..00000000 --- a/migrations/alembic.ini +++ /dev/null @@ -1,50 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic,flask_migrate - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[logger_flask_migrate] -level = INFO -handlers = -qualname = flask_migrate - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py deleted file mode 100644 index 4c970927..00000000 --- a/migrations/env.py +++ /dev/null @@ -1,113 +0,0 @@ -import logging -from logging.config import fileConfig - -from flask import current_app - -from alembic import context - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') - - -def get_engine(): - try: - # this works with Flask-SQLAlchemy<3 and Alchemical - return current_app.extensions['migrate'].db.get_engine() - except (TypeError, AttributeError): - # this works with Flask-SQLAlchemy>=3 - return current_app.extensions['migrate'].db.engine - - -def get_engine_url(): - try: - return get_engine().url.render_as_string(hide_password=False).replace( - '%', '%%') - except AttributeError: - return str(get_engine().url).replace('%', '%%') - - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -config.set_main_option('sqlalchemy.url', get_engine_url()) -target_db = current_app.extensions['migrate'].db - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def get_metadata(): - if hasattr(target_db, 'metadatas'): - return target_db.metadatas[None] - return target_db.metadata - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, target_metadata=get_metadata(), literal_binds=True - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - # this callback is used to prevent an auto-migration from being generated - # when there are no changes to the schema - # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html - def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, 'autogenerate', False): - script = directives[0] - if script.upgrade_ops.is_empty(): - directives[:] = [] - logger.info('No changes in schema detected.') - - conf_args = current_app.extensions['migrate'].configure_args - if conf_args.get("process_revision_directives") is None: - conf_args["process_revision_directives"] = process_revision_directives - - connectable = get_engine() - - with connectable.connect() as connection: - context.configure( - connection=connection, - target_metadata=get_metadata(), - **conf_args - ) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako deleted file mode 100644 index 2c015630..00000000 --- a/migrations/script.py.mako +++ /dev/null @@ -1,24 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/.gitignore b/migrations/versions/.gitignore deleted file mode 100644 index ed8ebf58..00000000 --- a/migrations/versions/.gitignore +++ /dev/null @@ -1 +0,0 @@ -__pycache__ \ No newline at end of file diff --git a/migrations/versions/23c5d550a4a5_add_vehiclelocation.py b/migrations/versions/23c5d550a4a5_add_vehiclelocation.py deleted file mode 100644 index c318500b..00000000 --- a/migrations/versions/23c5d550a4a5_add_vehiclelocation.py +++ /dev/null @@ -1,50 +0,0 @@ -"""add VehicleLocation - -Revision ID: 23c5d550a4a5 -Revises: d166ecfdae66 -Create Date: 2025-06-26 20:49:07.733696 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '23c5d550a4a5' -down_revision = 'd166ecfdae66' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('vehicle_locations', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vehicle_id', sa.String(), nullable=False), - sa.Column('name', sa.String(), nullable=True), - sa.Column('timestamp', sa.DateTime(), nullable=False), - sa.Column('latitude', sa.Float(), nullable=False), - sa.Column('longitude', sa.Float(), nullable=False), - sa.Column('heading_degrees', sa.Float(), nullable=True), - sa.Column('speed_mph', sa.Float(), nullable=True), - sa.Column('is_ecu_speed', sa.Boolean(), nullable=True), - sa.Column('formatted_location', sa.String(), nullable=True), - sa.Column('address_id', sa.String(), nullable=True), - sa.Column('address_name', sa.String(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['vehicle_id'], ['vehicles.id'], ), - sa.PrimaryKeyConstraint('id') - ) - with op.batch_alter_table('vehicle_locations', schema=None) as batch_op: - batch_op.create_index(batch_op.f('ix_vehicle_locations_vehicle_id'), ['vehicle_id'], unique=False) - - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('vehicle_locations', schema=None) as batch_op: - batch_op.drop_index(batch_op.f('ix_vehicle_locations_vehicle_id')) - - op.drop_table('vehicle_locations') - # ### end Alembic commands ### diff --git a/migrations/versions/9f0c6d56f300_add_driver_and_driver_vehicle_.py b/migrations/versions/9f0c6d56f300_add_driver_and_driver_vehicle_.py deleted file mode 100644 index e0fb27a0..00000000 --- a/migrations/versions/9f0c6d56f300_add_driver_and_driver_vehicle_.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Add driver and driver_vehicle_assignments tables - -Revision ID: 9f0c6d56f300 -Revises: 23c5d550a4a5 -Create Date: 2025-12-11 10:14:06.091187 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '9f0c6d56f300' -down_revision = '23c5d550a4a5' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('drivers', - sa.Column('id', sa.String(), nullable=False), - sa.Column('name', sa.String(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('driver_vehicle_assignments', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('driver_id', sa.String(), nullable=False), - sa.Column('vehicle_id', sa.String(), nullable=False), - sa.Column('assignment_start', sa.DateTime(), nullable=False), - sa.Column('assignment_end', sa.DateTime(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['driver_id'], ['drivers.id'], ), - sa.ForeignKeyConstraint(['vehicle_id'], ['vehicles.id'], ), - sa.PrimaryKeyConstraint('id') - ) - with op.batch_alter_table('driver_vehicle_assignments', schema=None) as batch_op: - batch_op.create_index(batch_op.f('ix_driver_vehicle_assignments_driver_id'), ['driver_id'], unique=False) - batch_op.create_index(batch_op.f('ix_driver_vehicle_assignments_vehicle_id'), ['vehicle_id'], unique=False) - - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('driver_vehicle_assignments', schema=None) as batch_op: - batch_op.drop_index(batch_op.f('ix_driver_vehicle_assignments_vehicle_id')) - batch_op.drop_index(batch_op.f('ix_driver_vehicle_assignments_driver_id')) - - op.drop_table('driver_vehicle_assignments') - op.drop_table('drivers') - # ### end Alembic commands ### diff --git a/migrations/versions/d166ecfdae66_initial_migration.py b/migrations/versions/d166ecfdae66_initial_migration.py deleted file mode 100644 index 9bc64783..00000000 --- a/migrations/versions/d166ecfdae66_initial_migration.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Initial migration - -Revision ID: d166ecfdae66 -Revises: -Create Date: 2025-06-23 23:22:36.927323 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'd166ecfdae66' -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('vehicles', - sa.Column('id', sa.String(), nullable=False), - sa.Column('name', sa.String(), nullable=False), - sa.Column('asset_type', sa.String(), nullable=True), - sa.Column('license_plate', sa.String(), nullable=True), - sa.Column('vin', sa.String(), nullable=True), - sa.Column('maintenance_id', sa.String(), nullable=True), - sa.Column('gateway_model', sa.String(), nullable=True), - sa.Column('gateway_serial', sa.String(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('geofence_events', - sa.Column('id', sa.String(), nullable=False), - sa.Column('vehicle_id', sa.String(), nullable=False), - sa.Column('event_type', sa.String(), nullable=False), - sa.Column('event_time', sa.DateTime(), nullable=False), - sa.Column('address_name', sa.String(), nullable=True), - sa.Column('address_formatted', sa.String(), nullable=True), - sa.Column('latitude', sa.Float(), nullable=True), - sa.Column('longitude', sa.Float(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['vehicle_id'], ['vehicles.id'], ), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('geofence_events') - op.drop_table('vehicles') - # ### end Alembic commands ### From 3659802d3511bf2ad326defaac59d556c76ae8e0 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 10:12:59 -0500 Subject: [PATCH 14/29] fix bugs caused by fastapi migration --- .env.example | 2 +- docs/INSTALLATION.md | 2 +- docs/more/INTRO_TO_UI.md | 8 +- server/config.py | 2 +- server/database.py | 3 +- server/routes.py | 24 +- test-server/server.py | 615 +++++++++++++++++++++++---------------- vite.config.ts | 3 + 8 files changed, 397 insertions(+), 262 deletions(-) diff --git a/.env.example b/.env.example index cb47fed2..6b680cfe 100644 --- a/.env.example +++ b/.env.example @@ -22,7 +22,7 @@ TEST_BACKEND_PORT=4000 # Main application URLs FRONTEND_URL=http://localhost:3000 -VITE_FRONTEND_URL=http://localhost:5173 +VITE_FRONTEND_URL=http://localhost:3000 VITE_BACKEND_URL=http://localhost:8000 # Test/Mock service URLs (for development/testing) diff --git a/docs/INSTALLATION.md b/docs/INSTALLATION.md index 4abc5ced..dbbd34ef 100644 --- a/docs/INSTALLATION.md +++ b/docs/INSTALLATION.md @@ -166,7 +166,7 @@ docker compose down -v ``` 4. **Access the application:** - - Frontend: http://localhost:5173 + - Frontend: http://localhost:3000 - Backend API: http://localhost:8000 ### Test Services Setup diff --git a/docs/more/INTRO_TO_UI.md b/docs/more/INTRO_TO_UI.md index 492acb73..ce05d007 100644 --- a/docs/more/INTRO_TO_UI.md +++ b/docs/more/INTRO_TO_UI.md @@ -40,11 +40,11 @@ To view the UI while you're developing, you’ll start the frontend's developmen This command launches a development server so you can run and test your frontend application locally. #### 2. Wait for the local address to appear: - Once the dev server starts, your terminal will show a link where the UI is running. It usually looks like: + Once the dev server starts, your terminal will show a link where the UI is running. It usually looks like: ```bash - http://localhost:5173/ - ``` - The port your development server runs on may differ based on the project’s configuration and whichever ports are free on your system. + http://localhost:3000/ + ``` + The port your development server runs on may differ based on the project's configuration and whichever ports are free on your system. Open the link: - **Mac:** `Command + Click` the link the terminal prints. diff --git a/server/config.py b/server/config.py index 89902405..3d6fcd5f 100644 --- a/server/config.py +++ b/server/config.py @@ -17,7 +17,7 @@ class Settings(BaseSettings): LOG_LEVEL: str = "INFO" # CORS settings - FRONTEND_URL: str = "http://localhost:5173" + FRONTEND_URL: str = "http://localhost:3000" TEST_FRONTEND_URL: str = "http://localhost:5174" # Database settings diff --git a/server/database.py b/server/database.py index e4e245b5..b24cf384 100644 --- a/server/database.py +++ b/server/database.py @@ -1,4 +1,5 @@ """Async database configuration for FastAPI.""" +from fastapi import Request from typing import AsyncGenerator, TYPE_CHECKING from sqlalchemy.ext.asyncio import ( create_async_engine, @@ -53,7 +54,7 @@ def create_session_factory(engine: AsyncEngine): ) -async def get_db(request: "Request") -> AsyncGenerator[AsyncSession, None]: +async def get_db(request: Request) -> AsyncGenerator[AsyncSession, None]: """ FastAPI dependency for getting async database sessions. diff --git a/server/routes.py b/server/routes.py index 5893e4fe..9d89d8f7 100644 --- a/server/routes.py +++ b/server/routes.py @@ -11,6 +11,7 @@ from sqlalchemy import func, and_, select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.dialects import postgresql +from sqlalchemy.orm import selectinload from .database import get_db from .models import Vehicle, GeofenceEvent, VehicleLocation, DriverVehicleAssignment @@ -89,7 +90,7 @@ async def get_locations(db: AsyncSession = Depends(get_db)): # Join to get full location and vehicle info for vehicles in geofence query = ( - select(VehicleLocation, Vehicle) + select(VehicleLocation) .join( latest_locations, and_( @@ -97,20 +98,23 @@ async def get_locations(db: AsyncSession = Depends(get_db)): VehicleLocation.timestamp == latest_locations.c.latest_time, ), ) - .join(Vehicle, VehicleLocation.vehicle_id == Vehicle.id) + .options(selectinload(VehicleLocation.vehicle)) ) result = await db.execute(query) - results = result.all() + results = result.scalars().all() # Get current driver assignments for all vehicles in results - vehicle_ids = [loc.vehicle_id for loc, _ in results] + vehicle_ids = [loc.vehicle_id for loc in results] current_assignments = {} if vehicle_ids: - assignments_query = select(DriverVehicleAssignment).where( - DriverVehicleAssignment.vehicle_id.in_(vehicle_ids), - DriverVehicleAssignment.assignment_end.is_(None), - ) + assignments_query = ( + select(DriverVehicleAssignment) + .where( + DriverVehicleAssignment.vehicle_id.in_(vehicle_ids), + DriverVehicleAssignment.assignment_end.is_(None), + ) + ).options(selectinload(DriverVehicleAssignment.driver)) assignments_result = await db.execute(assignments_query) assignments = assignments_result.scalars().all() for assignment in assignments: @@ -118,7 +122,8 @@ async def get_locations(db: AsyncSession = Depends(get_db)): # Format response response = {} - for loc, vehicle in results: + for loc in results: + vehicle = loc.vehicle # Get closest loop closest_distance, _, closest_route_name, polyline_index = Stops.get_closest_point( (loc.latitude, loc.longitude) @@ -264,6 +269,7 @@ async def webhook(request: Request, db: AsyncSession = Depends(get_db)): gateway_serial=vehicle_data.get("gateway", {}).get("serial"), ) db.add(vehicle) + await db.flush() # Ensure vehicle.id is available # Insert geofence event (using PostgreSQL upsert) insert_stmt = postgresql.insert(GeofenceEvent).values( diff --git a/test-server/server.py b/test-server/server.py index c2ff5d20..60c921ed 100644 --- a/test-server/server.py +++ b/test-server/server.py @@ -1,322 +1,447 @@ -from flask import Flask, jsonify, request, send_from_directory -from flask_cors import CORS -from threading import Thread, Lock -import time -import os +"""FastAPI test server - Mock Samsara API for development/testing.""" +import asyncio import logging +import os +from contextlib import asynccontextmanager +from datetime import datetime +from pathlib import Path +import numpy as np +from fastapi import FastAPI, Request, HTTPException +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import FileResponse, JSONResponse +from fastapi.staticfiles import StaticFiles +from sqlalchemy import func, and_, select + +from server.config import settings +from server.database import create_async_db_engine, create_session_factory +from server.models import Vehicle, GeofenceEvent, VehicleLocation from server.time_utils import get_campus_start_of_day from .shuttle import Shuttle, ShuttleState from data.stops import Stops -from datetime import datetime, date -from server.models import Vehicle, GeofenceEvent, VehicleLocation -from server.config import Config -from sqlalchemy import func, and_ -from flask_sqlalchemy import SQLAlchemy -import numpy as np +# Global shuttle management shuttles = {} shuttle_counter = 1 -shuttle_lock = Lock() +shuttle_lock = asyncio.Lock() route_names = Stops.active_routes logger = logging.getLogger(__name__) -app = Flask(__name__, static_folder="../test-client/dist", static_url_path="") -app.config.from_object(Config) -# Configure CORS for test-client -CORS(app, origins=[app.config.get('TEST_FRONTEND_URL', 'http://localhost:5174')], supports_credentials=True) - -db = SQLAlchemy() -db.init_app(app) - -# setup function to populate the shuttles dict, same as in server/routes -def setup(): - # Start of today for filtering today's geofence events - start_of_today = get_campus_start_of_day() - - # Subquery: latest geofence event today per vehicle - # Returns a query result of (vehicle_id, event_time) - latest_geofence_events = db.session.query( - GeofenceEvent.vehicle_id, - func.max(GeofenceEvent.event_time).label('latest_time') - ).filter( - GeofenceEvent.event_time >= start_of_today - ).group_by(GeofenceEvent.vehicle_id).subquery() - - # Join to get full geofence event rows where event is geofenceEntry - # Returns a query result of (vehicle_id, event_time, ...geofence fields including event_type) - geofence_entries = db.session.query(GeofenceEvent.vehicle_id).join( - latest_geofence_events, - and_( - GeofenceEvent.vehicle_id == latest_geofence_events.c.vehicle_id, - GeofenceEvent.event_time == latest_geofence_events.c.latest_time +async def setup_shuttles(session_factory): + """Populate the shuttles dict from database.""" + async with session_factory() as db: + start_of_today = get_campus_start_of_day() + + # Subquery: latest geofence event today per vehicle + latest_geofence_events = ( + select( + GeofenceEvent.vehicle_id, + func.max(GeofenceEvent.event_time).label("latest_time"), + ) + .where(GeofenceEvent.event_time >= start_of_today) + .group_by(GeofenceEvent.vehicle_id) + .subquery() + ) + + # Join to get full geofence event rows where event is geofenceEntry + geofence_entries = ( + select(GeofenceEvent.vehicle_id) + .join( + latest_geofence_events, + and_( + GeofenceEvent.vehicle_id == latest_geofence_events.c.vehicle_id, + GeofenceEvent.event_time == latest_geofence_events.c.latest_time, + ), + ) + .where(GeofenceEvent.event_type == "geofenceEntry") + .subquery() ) - ).filter(GeofenceEvent.event_type == 'geofenceEntry').subquery() - - # Subquery: latest vehicle location per vehicle - # Returns a query result of (vehicle_id, location_time) - latest_locations = db.session.query( - VehicleLocation.vehicle_id, - func.max(VehicleLocation.timestamp).label('latest_time') - ).filter( - VehicleLocation.vehicle_id.in_(db.session.query(geofence_entries.c.vehicle_id)) - ).group_by(VehicleLocation.vehicle_id).subquery() - - # Join to get full location and vehicle info for vehicles in geofence - results = db.session.query(VehicleLocation, Vehicle).join( - latest_locations, - and_( - VehicleLocation.vehicle_id == latest_locations.c.vehicle_id, - VehicleLocation.timestamp == latest_locations.c.latest_time + + # Subquery: latest vehicle location per vehicle + latest_locations = ( + select( + VehicleLocation.vehicle_id, + func.max(VehicleLocation.timestamp).label("latest_time"), + ) + .where(VehicleLocation.vehicle_id.in_(select(geofence_entries.c.vehicle_id))) + .group_by(VehicleLocation.vehicle_id) + .subquery() + ) + + # Join to get full location and vehicle info for vehicles in geofence + query = ( + select(VehicleLocation, Vehicle) + .join( + latest_locations, + and_( + VehicleLocation.vehicle_id == latest_locations.c.vehicle_id, + VehicleLocation.timestamp == latest_locations.c.latest_time, + ), + ) + .join(Vehicle, VehicleLocation.vehicle_id == Vehicle.id) ) - ).join( - Vehicle, VehicleLocation.vehicle_id == Vehicle.id - ).all() - # extract vehicle information - for loc, vehicle in results: - shuttles[vehicle.id] = Shuttle(vehicle.id, loc.latitude, loc.longitude) + result = await db.execute(query) + results = result.all() + + # Extract vehicle information + async with shuttle_lock: + for loc, vehicle in results: + shuttles[vehicle.id] = Shuttle(vehicle.id, loc.latitude, loc.longitude) -with app.app_context(): - setup() -# --- Background Thread --- -def update_loop(): +async def update_loop(): + """Background task to update shuttle states.""" while True: - time.sleep(0.1) - with shuttle_lock: + await asyncio.sleep(0.1) + async with shuttle_lock: for shuttle in shuttles.values(): shuttle.update_state() -# Start background updater -t = Thread(target=update_loop, daemon=True) -t.start() + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Lifespan context manager for startup and shutdown events.""" + # Startup + logger.info("Starting test server...") + + # Initialize database + app.state.db_engine = create_async_db_engine( + settings.DATABASE_URL, echo=settings.DEBUG + ) + app.state.session_factory = create_session_factory(app.state.db_engine) + logger.info("Database initialized") + + # Setup shuttles from database + await setup_shuttles(app.state.session_factory) + logger.info(f"Initialized {len(shuttles)} shuttles from database") + + # Start background updater task + app.state.update_task = asyncio.create_task(update_loop()) + logger.info("Background updater task started") + + yield + + # Shutdown + logger.info("Shutting down test server...") + app.state.update_task.cancel() + try: + await app.state.update_task + except asyncio.CancelledError: + pass + await app.state.db_engine.dispose() + logger.info("Test server shutdown complete") + + +# Create FastAPI app +app = FastAPI( + title="Mock Samsara API", + description="Test server for Shubble development", + version="1.0.0", + lifespan=lifespan, +) + +# Configure CORS for test-client +app.add_middleware( + CORSMiddleware, + allow_origins=[settings.TEST_FRONTEND_URL], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + # --- API Routes --- -@app.route("/api/shuttles", methods=["GET"]) -def list_shuttles(): - with shuttle_lock: - return jsonify([s.to_dict() for s in shuttles.values()]) +@app.get("/api/shuttles") +async def list_shuttles(): + """List all active shuttles.""" + async with shuttle_lock: + return [s.to_dict() for s in shuttles.values()] -@app.route("/api/shuttles", methods=["POST"]) -def create_shuttle(): + +@app.post("/api/shuttles") +async def create_shuttle(): + """Create a new test shuttle.""" global shuttle_counter - with shuttle_lock: + async with shuttle_lock: shuttle_id = str(shuttle_counter).zfill(15) shuttle = Shuttle(shuttle_id) shuttles[shuttle_id] = shuttle logger.info(f"Created shuttle {shuttle_counter}") shuttle_counter += 1 - return jsonify(shuttle.to_dict()), 201 + return JSONResponse(shuttle.to_dict(), status_code=201) + -@app.route("/api/shuttles//set-next-state", methods=["POST"]) -def trigger_action(shuttle_id): - next_state = request.json.get("state") - with shuttle_lock: +@app.post("/api/shuttles/{shuttle_id}/set-next-state") +async def trigger_action(shuttle_id: str, request: Request): + """Set the next state for a shuttle.""" + data = await request.json() + next_state = data.get("state") + + async with shuttle_lock: shuttle = shuttles.get(shuttle_id) if not shuttle: - return {"error": "Shuttle not found"}, 404 + raise HTTPException(status_code=404, detail="Shuttle not found") try: desired_state = ShuttleState(next_state) except ValueError: - return {"error": "Invalid action"}, 400 + raise HTTPException(status_code=400, detail="Invalid action") shuttle.set_next_state(desired_state) if desired_state == ShuttleState.LOOPING: - route = request.json.get("route") + route = data.get("route") shuttle.set_next_route(route) logger.info(f"Set shuttle {shuttle_id} next state to {next_state}") - return jsonify(shuttle.to_dict()) - -@app.route("/api/routes", methods=["GET"]) -def get_routes(): - return jsonify(sorted(list(route_names))) - -@app.route("/api/events/today", methods=["GET"]) -def get_events_today(): - start_of_today = get_campus_start_of_day() - loc_count = db.session.query(VehicleLocation).filter(VehicleLocation.timestamp >= start_of_today).count() - geo_count = db.session.query(GeofenceEvent).filter(GeofenceEvent.event_time >= start_of_today).count() - return jsonify({ - 'locationCount': loc_count, - 'geofenceCount': geo_count - }) - -@app.route("/api/events/today", methods=["DELETE"]) -def clear_events_today(): - keep_shuttles = request.args.get("keepShuttles", "false").lower() == "true" - start_of_today = get_campus_start_of_day() - - db.session.query(VehicleLocation).filter(VehicleLocation.timestamp >= start_of_today).delete() - logger.info(f"Deleted vehicle location events past {start_of_today}") - - if not keep_shuttles: - db.session.query(GeofenceEvent).filter(GeofenceEvent.event_time >= start_of_today).delete() - logger.info(f"Deleted geofence events past {start_of_today}") - - global shuttle_counter - with shuttle_lock: - shuttles.clear() - shuttle_counter = 1 - logger.info(f"Deleted all shuttles") - else: - ''' - Delete all geofence events >= start_of_today except for: each vehicle's latest one (if it - is a geofenceEntry. geofenceExits are still deleted). This allows all currently running - shuttles to keep running in the test suite. - ''' - - # Get today's geofence events - today_events = db.session.query(GeofenceEvent).filter( + return shuttle.to_dict() + + +@app.get("/api/routes") +async def get_routes(): + """Get list of available routes.""" + return sorted(list(route_names)) + + +@app.get("/api/events/today") +async def get_events_today(request: Request): + """Get count of events from today.""" + async with request.app.state.session_factory() as db: + start_of_today = get_campus_start_of_day() + + loc_query = select(func.count()).select_from(VehicleLocation).where( + VehicleLocation.timestamp >= start_of_today + ) + geo_query = select(func.count()).select_from(GeofenceEvent).where( GeofenceEvent.event_time >= start_of_today - ).subquery() - # Get latest event per vehicle from today's geofence events - latest_times = db.session.query( - today_events.c.vehicle_id, - func.max(today_events.c.event_time).label("latest_time") - ).group_by(today_events.c.vehicle_id).subquery() - # Join back to get the full event row, select to keep only geofenceEntry, project on id - latest_entries = db.session.query(today_events.c.id).join( - latest_times, - and_( - today_events.c.vehicle_id == latest_times.c.vehicle_id, - today_events.c.event_time == latest_times.c.latest_time + ) + + loc_result = await db.execute(loc_query) + geo_result = await db.execute(geo_query) + + return { + "locationCount": loc_result.scalar(), + "geofenceCount": geo_result.scalar(), + } + + +@app.delete("/api/events/today") +async def clear_events_today(request: Request, keepShuttles: bool = False): + """Clear events from today.""" + global shuttle_counter + async with request.app.state.session_factory() as db: + start_of_today = get_campus_start_of_day() + + # Delete vehicle locations + await db.execute( + VehicleLocation.__table__.delete().where( + VehicleLocation.timestamp >= start_of_today + ) + ) + logger.info(f"Deleted vehicle location events past {start_of_today}") + + if not keepShuttles: + # Delete all geofence events + await db.execute( + GeofenceEvent.__table__.delete().where( + GeofenceEvent.event_time >= start_of_today + ) + ) + logger.info(f"Deleted geofence events past {start_of_today}") + + # Clear all shuttles + async with shuttle_lock: + shuttles.clear() + shuttle_counter = 1 + logger.info("Deleted all shuttles") + else: + # Keep latest geofenceEntry for each vehicle + today_events = ( + select(GeofenceEvent) + .where(GeofenceEvent.event_time >= start_of_today) + .subquery() ) - ).filter(today_events.c.event_type == 'geofenceEntry').subquery() - # Delete all in today_events that aren't in latest_entries - db.session.query(GeofenceEvent).filter( - GeofenceEvent.id.in_(db.session.query(today_events.c.id)) - ).filter( - ~GeofenceEvent.id.in_(db.session.query(latest_entries.c.id)) - ).delete() - logger.info(f"Deleted geofence events past {start_of_today} except for currently running shuttles") + latest_times = ( + select( + today_events.c.vehicle_id, + func.max(today_events.c.event_time).label("latest_time"), + ) + .group_by(today_events.c.vehicle_id) + .subquery() + ) - db.session.commit() - return "", 204 + latest_entries = ( + select(today_events.c.id) + .join( + latest_times, + and_( + today_events.c.vehicle_id == latest_times.c.vehicle_id, + today_events.c.event_time == latest_times.c.latest_time, + ), + ) + .where(today_events.c.event_type == "geofenceEntry") + .scalar_subquery() + ) -# --- Frontend Serving --- -@app.route("/") -@app.route("/") -def serve_frontend(path=""): - if path != "" and os.path.exists(os.path.join(app.static_folder, path)): - return send_from_directory(app.static_folder, path) - else: - return send_from_directory(app.static_folder, "index.html") + # Delete events not in latest_entries + await db.execute( + GeofenceEvent.__table__.delete().where( + GeofenceEvent.event_time >= start_of_today, + ~GeofenceEvent.id.in_(latest_entries), + ) + ) + logger.info( + f"Deleted geofence events past {start_of_today} except for currently running shuttles" + ) -@app.route('/fleet/vehicles/stats') -def mock_stats(): - vehicle_ids = request.args.get('vehicleIds', '').split(',') - after = request.args.get('after') + await db.commit() + return JSONResponse(content="", status_code=204) - logger.info(f'[MOCK API] Received stats snapshot request for vehicles {vehicle_ids} after={after}') - # update timestamps - with shuttle_lock: +# --- Mock Samsara API Endpoints --- +@app.get("/fleet/vehicles/stats") +async def mock_stats(vehicleIds: str = "", after: str = None): + """Mock Samsara vehicle stats endpoint.""" + vehicle_ids = vehicleIds.split(",") if vehicleIds else [] + + logger.info( + f"[MOCK API] Received stats snapshot request for vehicles {vehicle_ids} after={after}" + ) + + async with shuttle_lock: data = [] for shuttle_id in vehicle_ids: if shuttle_id in shuttles: - # add error to location + # Add error to location lat, lon = shuttles[shuttle_id].location lat += np.random.normal(0, 0.00008) lon += np.random.normal(0, 0.00008) - data.append({ - 'id': shuttle_id, - 'name': shuttle_id[-3:], - 'gps': { - 'latitude': lat, - 'longitude': lon, - 'time': datetime.fromtimestamp(shuttles[shuttle_id].last_updated).isoformat(timespec='seconds').replace('+00:00', 'Z'), - 'speedMilesPerHour': shuttles[shuttle_id].speed, - 'headingDegrees': 90, - 'reverseGeo': {'formattedLocation': 'Test Location'} + data.append( + { + "id": shuttle_id, + "name": shuttle_id[-3:], + "gps": { + "latitude": lat, + "longitude": lon, + "time": datetime.fromtimestamp( + shuttles[shuttle_id].last_updated + ) + .isoformat(timespec="seconds") + .replace("+00:00", "Z"), + "speedMilesPerHour": shuttles[shuttle_id].speed, + "headingDegrees": 90, + "reverseGeo": {"formattedLocation": "Test Location"}, + }, } - }) + ) - return jsonify({ - 'data': data, - 'pagination': { - 'hasNextPage': False, - 'endCursor': 'fake-token-next' - } - }) + return { + "data": data, + "pagination": {"hasNextPage": False, "endCursor": "fake-token-next"}, + } -@app.route('/fleet/vehicles/stats/feed') -def mock_feed(): - vehicle_ids = request.args.get('vehicleIds', '').split(',') - after = request.args.get('after') - logger.info(f'[MOCK API] Received stats feed request for vehicles {vehicle_ids} after={after}') +@app.get("/fleet/vehicles/stats/feed") +async def mock_feed(vehicleIds: str = "", after: str = None): + """Mock Samsara vehicle stats feed endpoint.""" + vehicle_ids = vehicleIds.split(",") if vehicleIds else [] - # update timestamps - with shuttle_lock: + logger.info( + f"[MOCK API] Received stats feed request for vehicles {vehicle_ids} after={after}" + ) + + async with shuttle_lock: data = [] for shuttle_id in vehicle_ids: if shuttle_id in shuttles: - # add error to location + # Add error to location lat, lon = shuttles[shuttle_id].location lat += np.random.normal(0, 0.00008) lon += np.random.normal(0, 0.00008) - data.append({ - 'id': shuttle_id, - 'name': shuttle_id[-3:], - 'gps': [ - { - 'latitude': lat, - 'longitude': lon, - 'time': datetime.fromtimestamp(shuttles[shuttle_id].last_updated).isoformat(timespec='seconds').replace('+00:00', 'Z'), - 'speedMilesPerHour': shuttles[shuttle_id].speed, - 'headingDegrees': 90, - 'reverseGeo': {'formattedLocation': 'Test Location'} - } - ] - }) - - return jsonify({ - 'data': data, - 'pagination': { - 'hasNextPage': False, - 'endCursor': 'fake-token-next' - } - }) - -@app.route('/fleet/driver-vehicle-assignments') -def mock_driver_assignments(): + data.append( + { + "id": shuttle_id, + "name": shuttle_id[-3:], + "gps": [ + { + "latitude": lat, + "longitude": lon, + "time": datetime.fromtimestamp( + shuttles[shuttle_id].last_updated + ) + .isoformat(timespec="seconds") + .replace("+00:00", "Z"), + "speedMilesPerHour": shuttles[shuttle_id].speed, + "headingDegrees": 90, + "reverseGeo": {"formattedLocation": "Test Location"}, + } + ], + } + ) + + return { + "data": data, + "pagination": {"hasNextPage": False, "endCursor": "fake-token-next"}, + } + + +@app.get("/fleet/driver-vehicle-assignments") +async def mock_driver_assignments(vehicleIds: str = ""): """Mock endpoint for driver-vehicle assignments.""" - vehicle_ids = request.args.get('vehicleIds', '').split(',') - - logger.info(f'[MOCK API] Received driver-vehicle assignments request for vehicles {vehicle_ids}') - - with shuttle_lock: + vehicle_ids = vehicleIds.split(",") if vehicleIds else [] + + logger.info( + f"[MOCK API] Received driver-vehicle assignments request for vehicles {vehicle_ids}" + ) + + async with shuttle_lock: data = [] - for i, shuttle_id in enumerate(vehicle_ids): + for shuttle_id in vehicle_ids: if shuttle_id in shuttles: # Generate a mock driver for each active shuttle - driver_id = f'driver-{shuttle_id[-3:]}' - driver_name = f'Driver {shuttle_id[-3:]}' - data.append({ - 'assignedAtTime': datetime.now().isoformat(timespec='seconds').replace('+00:00', 'Z'), - 'driver': { - 'id': driver_id, - 'name': driver_name, - }, - 'vehicle': { - 'id': shuttle_id, + driver_id = f"driver-{shuttle_id[-3:]}" + driver_name = f"Driver {shuttle_id[-3:]}" + data.append( + { + "assignedAtTime": datetime.now() + .isoformat(timespec="seconds") + .replace("+00:00", "Z"), + "driver": { + "id": driver_id, + "name": driver_name, + }, + "vehicle": { + "id": shuttle_id, + }, } - }) - - return jsonify({ - 'data': data, - 'pagination': { - 'hasNextPage': False, - 'endCursor': 'fake-token-next' - } - }) + ) + + return { + "data": data, + "pagination": {"hasNextPage": False, "endCursor": "fake-token-next"}, + } + + +# --- Frontend Serving --- +@app.get("/") +@app.get("/{path:path}") +async def serve_frontend(path: str = ""): + """Serve the test client frontend.""" + static_folder = Path(__file__).parent.parent / "test-client" / "dist" + + if path and (static_folder / path).exists(): + return FileResponse(static_folder / path) + else: + index_path = static_folder / "index.html" + if index_path.exists(): + return FileResponse(index_path) + raise HTTPException(status_code=404, detail="Frontend not built") + if __name__ == "__main__": - app.run(debug=True, port=4000) + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=4000) diff --git a/vite.config.ts b/vite.config.ts index 1637f131..70dec058 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -4,6 +4,9 @@ import { VitePWA } from 'vite-plugin-pwa' export default defineConfig({ root: 'client', + server: { + port: 3000 + }, plugins: [ react(), VitePWA({ From 7dde149e8ecb10482158a5f2b5ecafce650f57ae Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 10:31:03 -0500 Subject: [PATCH 15/29] remove unnecessary backend serving frontend --- server/__init__.py | 7 ------- server/routes.py | 16 ---------------- 2 files changed, 23 deletions(-) diff --git a/server/__init__.py b/server/__init__.py index 1dbd578f..e10b23e8 100644 --- a/server/__init__.py +++ b/server/__init__.py @@ -75,13 +75,6 @@ def create_app() -> FastAPI: from .routes import router app.include_router(router) - # Mount static files for frontend (this should be last) - try: - app.mount("/", StaticFiles(directory="../client/dist", html=True), name="static") - except RuntimeError: - # Static directory doesn't exist yet (development mode) - logger.warning("Static files directory not found. Skipping static file mounting.") - return app diff --git a/server/routes.py b/server/routes.py index 9d89d8f7..042fd555 100644 --- a/server/routes.py +++ b/server/routes.py @@ -25,22 +25,6 @@ router = APIRouter() -# Frontend routes - serve React app -@router.get("/") -@router.get("/schedule") -@router.get("/about") -@router.get("/data") -@router.get("/map") -@router.get("/generate-static-routes") -async def serve_react(): - """Serve the React app's index.html for all main routes.""" - root_dir = Path(__file__).parent.parent / "client" / "dist" - index_path = root_dir / "index.html" - if index_path.exists(): - return FileResponse(index_path) - raise HTTPException(status_code=404, detail="Frontend not built") - - @router.get("/api/locations") @cache(expire=300) async def get_locations(db: AsyncSession = Depends(get_db)): From 6ec00bc7c7c1a9029307347f50233e37d1b80244 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 12:55:51 -0500 Subject: [PATCH 16/29] add composite indices --- .../648b513fafc7_add_composite_indices.py | 36 +++++++++++++++++++ server/models.py | 10 ++++-- 2 files changed, 44 insertions(+), 2 deletions(-) create mode 100644 alembic/versions/648b513fafc7_add_composite_indices.py diff --git a/alembic/versions/648b513fafc7_add_composite_indices.py b/alembic/versions/648b513fafc7_add_composite_indices.py new file mode 100644 index 00000000..05bb29d2 --- /dev/null +++ b/alembic/versions/648b513fafc7_add_composite_indices.py @@ -0,0 +1,36 @@ +"""add composite indices + +Revision ID: 648b513fafc7 +Revises: 4f42c8d834fa +Create Date: 2025-12-25 12:55:12.028445 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '648b513fafc7' +down_revision: Union[str, None] = '4f42c8d834fa' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_index('ix_geofence_events_vehicle_time', 'geofence_events', ['vehicle_id', 'event_time'], unique=False) + op.drop_index(op.f('ix_vehicle_locations_vehicle_id'), table_name='vehicle_locations') + op.create_index('ix_vehicle_locations_vehicle_timestamp', 'vehicle_locations', ['vehicle_id', 'timestamp'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('ix_vehicle_locations_vehicle_timestamp', table_name='vehicle_locations') + op.create_index(op.f('ix_vehicle_locations_vehicle_id'), 'vehicle_locations', ['vehicle_id'], unique=False) + op.drop_index('ix_geofence_events_vehicle_time', table_name='geofence_events') + # ### end Alembic commands ### diff --git a/server/models.py b/server/models.py index b8df2028..1d66ec69 100644 --- a/server/models.py +++ b/server/models.py @@ -1,7 +1,7 @@ """SQLAlchemy models for async database operations.""" from datetime import datetime, timezone from typing import Optional -from sqlalchemy import String, Integer, Float, Boolean, DateTime, ForeignKey +from sqlalchemy import String, Integer, Float, Boolean, DateTime, ForeignKey, Index from sqlalchemy.orm import Mapped, mapped_column, relationship from .database import Base @@ -29,6 +29,9 @@ def __repr__(self): class GeofenceEvent(Base): __tablename__ = "geofence_events" + __table_args__ = ( + Index("ix_geofence_events_vehicle_time", "vehicle_id", "event_time"), + ) id: Mapped[str] = mapped_column(String, primary_key=True) # eventId from webhook vehicle_id: Mapped[str] = mapped_column(String, ForeignKey("vehicles.id"), nullable=False) @@ -49,9 +52,12 @@ def __repr__(self): class VehicleLocation(Base): __tablename__ = "vehicle_locations" + __table_args__ = ( + Index("ix_vehicle_locations_vehicle_timestamp", "vehicle_id", "timestamp"), + ) id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) - vehicle_id: Mapped[str] = mapped_column(String, ForeignKey("vehicles.id"), nullable=False, index=True) + vehicle_id: Mapped[str] = mapped_column(String, ForeignKey("vehicles.id"), nullable=False) name: Mapped[Optional[str]] = mapped_column(String, nullable=True) timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) latitude: Mapped[float] = mapped_column(Float, nullable=False) From 3ff1fe69acc275f78034b595f8433815242f4a0b Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 13:00:35 -0500 Subject: [PATCH 17/29] reduce cache time to 5 seconds --- server/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/routes.py b/server/routes.py index 042fd555..460fd723 100644 --- a/server/routes.py +++ b/server/routes.py @@ -26,7 +26,7 @@ @router.get("/api/locations") -@cache(expire=300) +@cache(expire=5) async def get_locations(db: AsyncSession = Depends(get_db)): """ Returns the latest location for each vehicle currently inside the geofence. From b2056020e58bf64fead3cf7c6a56e33a00c545ee Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 13:10:07 -0500 Subject: [PATCH 18/29] include API_KEY in settings --- server/config.py | 6 +++++- server/worker.py | 4 ++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/server/config.py b/server/config.py index 3d6fcd5f..811fe7f2 100644 --- a/server/config.py +++ b/server/config.py @@ -26,9 +26,13 @@ class Settings(BaseSettings): # Redis settings REDIS_URL: str = "redis://localhost:6379/0" - # Samsara API settings + # Samsara API secret (base64 encoded) + # for webhook signature verification SAMSARA_SECRET_BASE64: Optional[str] = None + # Samsara API key + API_KEY: Optional[str] = None + # Shubble settings CAMPUS_TZ: ZoneInfo = ZoneInfo("America/New_York") diff --git a/server/worker.py b/server/worker.py index 70bb86fc..0612edc8 100644 --- a/server/worker.py +++ b/server/worker.py @@ -79,7 +79,7 @@ async def update_locations(session_factory): if settings.ENV == "development": url = "http://localhost:4000/fleet/vehicles/stats" else: - api_key = os.environ.get("API_KEY") + api_key = settings.API_KEY if not api_key: logger.error("API_KEY not set") return @@ -199,7 +199,7 @@ async def update_driver_assignments(session_factory, vehicle_ids): if settings.ENV == "development": url = "http://localhost:4000/fleet/driver-vehicle-assignments" else: - api_key = os.environ.get("API_KEY") + api_key = settings.API_KEY if not api_key: logger.error("API_KEY not set for driver assignments") return From c9a2a0f79017629b575a234755c1991580042d89 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 13:53:38 -0500 Subject: [PATCH 19/29] add cache invalidation --- server/__init__.py | 2 +- server/routes.py | 38 +++++---------------------- server/utils.py | 64 ++++++++++++++++++++++++++++++++++++++++++++++ server/worker.py | 54 ++++++-------------------------------- 4 files changed, 80 insertions(+), 78 deletions(-) create mode 100644 server/utils.py diff --git a/server/__init__.py b/server/__init__.py index e10b23e8..43a11277 100644 --- a/server/__init__.py +++ b/server/__init__.py @@ -39,7 +39,7 @@ async def lifespan(app: FastAPI): app.state.redis = await aioredis.from_url( settings.REDIS_URL, encoding="utf-8", - decode_responses=True, + decode_responses=False, ) FastAPICache.init(RedisBackend(app.state.redis), prefix="fastapi-cache") logger.info("Redis cache initialized") diff --git a/server/routes.py b/server/routes.py index 460fd723..0b3a293d 100644 --- a/server/routes.py +++ b/server/routes.py @@ -17,6 +17,7 @@ from .models import Vehicle, GeofenceEvent, VehicleLocation, DriverVehicleAssignment from .config import settings from .time_utils import get_campus_start_of_day +from .utils import get_vehicles_in_geofence_query from data.stops import Stops # from data.schedules import Schedule @@ -26,40 +27,15 @@ @router.get("/api/locations") -@cache(expire=5) +@cache(expire=60, namespace="locations") async def get_locations(db: AsyncSession = Depends(get_db)): """ Returns the latest location for each vehicle currently inside the geofence. The vehicle is considered inside the geofence if its latest geofence event today is a 'geofenceEntry'. """ - # Start of today for filtering today's geofence events - start_of_today = get_campus_start_of_day() - - # Subquery: latest geofence event today per vehicle - latest_geofence_events = ( - select( - GeofenceEvent.vehicle_id, - func.max(GeofenceEvent.event_time).label("latest_time"), - ) - .where(GeofenceEvent.event_time >= start_of_today) - .group_by(GeofenceEvent.vehicle_id) - .subquery() - ) - - # Join to get full geofence event rows where event is geofenceEntry - geofence_entries = ( - select(GeofenceEvent.vehicle_id) - .join( - latest_geofence_events, - and_( - GeofenceEvent.vehicle_id == latest_geofence_events.c.vehicle_id, - GeofenceEvent.event_time == latest_geofence_events.c.latest_time, - ), - ) - .where(GeofenceEvent.event_type == "geofenceEntry") - .subquery() - ) + # Get query for vehicles in geofence and convert to subquery + geofence_entries = get_vehicles_in_geofence_query().subquery() # Subquery: latest vehicle location per vehicle latest_locations = ( @@ -273,8 +249,8 @@ async def webhook(request: Request, db: AsyncSession = Depends(get_db)): await db.commit() - # Note: Cache invalidation for fastapi-cache2 would need to be implemented differently - # For now, we rely on TTL expiration + # Invalidate cache for vehicles in geofence + FastAPICache.clear(namespace="vehicles_in_geofence") return {"status": "success"} @@ -388,7 +364,7 @@ async def get_aggregated_shuttle_schedule(): @router.get("/api/matched-schedules") -@cache(expire=3600) +@cache(expire=3600, namespace="matched_schedules") async def get_matched_shuttle_schedules(force_recompute: bool = False): """ Return cached matched schedules unless force_recompute=true, diff --git a/server/utils.py b/server/utils.py new file mode 100644 index 00000000..21a36fe4 --- /dev/null +++ b/server/utils.py @@ -0,0 +1,64 @@ +"""Utility functions for database queries.""" +from sqlalchemy import func, and_, select +from fastapi_cache.decorator import cache + +from .models import GeofenceEvent +from .time_utils import get_campus_start_of_day + + +def get_vehicles_in_geofence_query(): + """ + Returns a query for vehicle_ids where the latest geofence event from today + is a geofenceEntry. + + Returns: + SQLAlchemy select query that returns vehicle IDs currently in the geofence + """ + start_of_today = get_campus_start_of_day() + + # Subquery to get latest event per vehicle from today's events + subquery = ( + select( + GeofenceEvent.vehicle_id, + func.max(GeofenceEvent.event_time).label("latest_time"), + ) + .where(GeofenceEvent.event_time >= start_of_today) + .group_by(GeofenceEvent.vehicle_id) + .subquery() + ) + + # Join back to get the latest event row where type is entry + query = ( + select(GeofenceEvent.vehicle_id) + .join( + subquery, + and_( + GeofenceEvent.vehicle_id == subquery.c.vehicle_id, + GeofenceEvent.event_time == subquery.c.latest_time, + ), + ) + .where(GeofenceEvent.event_type == "geofenceEntry") + ) + + return query + + +@cache(expire=900, namespace="vehicles_in_geofence") +async def get_vehicles_in_geofence(session_factory): + """ + Returns a cached set of vehicle_ids where the latest geofence event from today + is a geofenceEntry. + + This function executes the query and caches the result for 5 seconds. + + Args: + session_factory: Async session factory for creating database sessions + + Returns: + Set of vehicle IDs currently in the geofence + """ + async with session_factory() as session: + query = get_vehicles_in_geofence_query() + result = await session.execute(query) + rows = result.all() + return {row.vehicle_id for row in rows} diff --git a/server/worker.py b/server/worker.py index 0612edc8..305f3812 100644 --- a/server/worker.py +++ b/server/worker.py @@ -5,15 +5,15 @@ from datetime import datetime, timezone import httpx -from sqlalchemy import func, and_, select +from sqlalchemy import select from fastapi_cache import FastAPICache from fastapi_cache.backends.redis import RedisBackend from redis import asyncio as aioredis from .config import settings from .database import create_async_db_engine, create_session_factory -from .time_utils import get_campus_start_of_day -from .models import VehicleLocation, GeofenceEvent, Driver, DriverVehicleAssignment +from .models import VehicleLocation, Driver, DriverVehicleAssignment +from .utils import get_vehicles_in_geofence # Logging config numeric_level = logging._nameToLevel.get(settings.LOG_LEVEL.upper(), logging.INFO) @@ -24,49 +24,12 @@ logger = logging.getLogger(__name__) -async def get_vehicles_in_geofence(session_factory): - """ - Returns a set of vehicle_ids where the latest geofence event from today - is a geofenceEntry. - """ - async with session_factory() as session: - start_of_today = get_campus_start_of_day() - - # Subquery to get latest event per vehicle from today's events - subquery = ( - select( - GeofenceEvent.vehicle_id, - func.max(GeofenceEvent.event_time).label("latest_time"), - ) - .where(GeofenceEvent.event_time >= start_of_today) - .group_by(GeofenceEvent.vehicle_id) - .subquery() - ) - - # Join back to get the latest event row where type is entry - query = ( - select(GeofenceEvent.vehicle_id) - .join( - subquery, - and_( - GeofenceEvent.vehicle_id == subquery.c.vehicle_id, - GeofenceEvent.event_time == subquery.c.latest_time, - ), - ) - .where(GeofenceEvent.event_type == "geofenceEntry") - ) - - result = await session.execute(query) - rows = result.all() - return {row.vehicle_id for row in rows} - - async def update_locations(session_factory): """ Fetches and updates vehicle locations for vehicles currently in the geofence. Uses pagination token to fetch subsequent pages. """ - # Get the current list of vehicles in the geofence + # Get the current list of vehicles in the geofence (cached) current_vehicle_ids = await get_vehicles_in_geofence(session_factory) # No vehicles to update @@ -174,6 +137,8 @@ async def update_locations(session_factory): logger.info( f"Updated locations for {len(current_vehicle_ids)} vehicles - {new_records_added} new records" ) + # Invalidate cache for locations + FastAPICache.clear(namespace="locations") else: logger.info( f"No new location data for {len(current_vehicle_ids)} vehicles" @@ -330,7 +295,7 @@ async def run_worker(): redis = await aioredis.from_url( settings.REDIS_URL, encoding="utf-8", - decode_responses=True, + decode_responses=False, ) FastAPICache.init(RedisBackend(redis), prefix="fastapi-cache") logger.info("Redis cache initialized") @@ -341,7 +306,7 @@ async def run_worker(): try: while True: try: - # Get current vehicles in geofence before updating + # Get current vehicles in geofence before updating (cached) current_vehicle_ids = await get_vehicles_in_geofence(session_factory) # Update locations @@ -351,9 +316,6 @@ async def run_worker(): if current_vehicle_ids: await update_driver_assignments(session_factory, current_vehicle_ids) - # Note: Schedule matching is now handled by the API endpoint with caching - # The @cache decorator on the endpoint handles it automatically - except Exception as e: logger.exception(f"Error in worker loop: {e}") From b353ab3bbdf80262d8d59af5d1888a0d997633a9 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 13:57:35 -0500 Subject: [PATCH 20/29] add missing import --- server/routes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/server/routes.py b/server/routes.py index 0b3a293d..ed995962 100644 --- a/server/routes.py +++ b/server/routes.py @@ -7,6 +7,7 @@ from fastapi import APIRouter, Request, Depends, HTTPException from fastapi.responses import FileResponse, JSONResponse +from fastapi_cache import FastAPICache from fastapi_cache.decorator import cache from sqlalchemy import func, and_, select from sqlalchemy.ext.asyncio import AsyncSession From 10d7948f1fd67e6a3e68e0b050fc1656882bbb3d Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 16:44:33 -0500 Subject: [PATCH 21/29] support cache invalidation --- .env.example | 2 +- .env.prod.example | 7 ++-- .flaskenv | 2 -- ...8d213_enforce_uniqueness_constraint_of_.py | 32 ++++++++++++++++++ docker-compose.yml | 7 ++-- server/__init__.py | 1 - server/models.py | 3 +- server/routes.py | 2 +- server/worker.py | 33 ++++++++++--------- 9 files changed, 58 insertions(+), 31 deletions(-) delete mode 100644 .flaskenv create mode 100644 alembic/versions/ac296168d213_enforce_uniqueness_constraint_of_.py diff --git a/.env.example b/.env.example index 6b680cfe..1a4939eb 100644 --- a/.env.example +++ b/.env.example @@ -52,7 +52,7 @@ REDIS_URL=redis://localhost:6379/0 # ============================================================================= ENV=development DEBUG=true -LOG_LEVEL=INFO +LOG_LEVEL=info # ============================================================================= # SAMSARA API (Optional - for production) diff --git a/.env.prod.example b/.env.prod.example index a9d90428..6fceaeb4 100644 --- a/.env.prod.example +++ b/.env.prod.example @@ -6,9 +6,8 @@ POSTGRES_PORT=5432 # python env variable DATABASE_URL=postgresql://shubble:shubble@postgres:5432/shubble -FLASK_ENV=production -FLASK_DEBUG=false -LOG_LEVEL=INFO +DEBUG=false +LOG_LEVEL=info # Backend Docker FRONTEND_URL=http://localhost:3000 @@ -24,5 +23,5 @@ REDIS_URL=redis://redis:6379/0 # Frontend Docker FRONTEND_PORT=3000 -# Vite +# Vite VITE_BACKEND_URL=http://localhost:8000 diff --git a/.flaskenv b/.flaskenv deleted file mode 100644 index 9c8e6d28..00000000 --- a/.flaskenv +++ /dev/null @@ -1,2 +0,0 @@ -FLASK_APP=server:create_app -FLASK_ENV=development diff --git a/alembic/versions/ac296168d213_enforce_uniqueness_constraint_of_.py b/alembic/versions/ac296168d213_enforce_uniqueness_constraint_of_.py new file mode 100644 index 00000000..8f881353 --- /dev/null +++ b/alembic/versions/ac296168d213_enforce_uniqueness_constraint_of_.py @@ -0,0 +1,32 @@ +"""enforce uniqueness constraint of vehicle and timestamp + +Revision ID: ac296168d213 +Revises: 648b513fafc7 +Create Date: 2025-12-25 16:06:42.170853 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'ac296168d213' +down_revision: Union[str, None] = '648b513fafc7' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_unique_constraint('uq_vehicle_locations_vehicle_timestamp', 'vehicle_locations', ['vehicle_id', 'timestamp']) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('uq_vehicle_locations_vehicle_timestamp', 'vehicle_locations', type_='unique') + # ### end Alembic commands ### diff --git a/docker-compose.yml b/docker-compose.yml index 798040a2..ce96f43d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -46,8 +46,7 @@ services: FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} DATABASE_URL: ${DATABASE_URL:-postgresql://shubble:shubble@postgres:5432/shubble} REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} - FLASK_ENV: ${FLASK_ENV:-development} - FLASK_DEBUG: ${FLASK_DEBUG:-true} + DEBUG: ${DEBUG:-true} LOG_LEVEL: ${LOG_LEVEL:-INFO} API_KEY: ${API_KEY:-} SAMSARA_SECRET: ${SAMSARA_SECRET:-} @@ -69,7 +68,6 @@ services: environment: DATABASE_URL: ${DATABASE_URL:-postgresql://shubble:shubble@postgres:5432/shubble} REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} - FLASK_ENV: ${FLASK_ENV:-development} LOG_LEVEL: ${LOG_LEVEL:-INFO} API_KEY: ${API_KEY:-} depends_on: @@ -107,8 +105,7 @@ services: TEST_FRONTEND_URL: ${TEST_FRONTEND_URL:-http://localhost:5174} DATABASE_URL: ${DATABASE_URL:-postgresql://shubble:shubble@postgres:5432/shubble} REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} - FLASK_ENV: ${FLASK_ENV:-development} - FLASK_DEBUG: ${FLASK_DEBUG:-true} + DEBUG: ${DEBUG:-true} LOG_LEVEL: ${LOG_LEVEL:-INFO} depends_on: postgres: diff --git a/server/__init__.py b/server/__init__.py index 43a11277..f7e32fcf 100644 --- a/server/__init__.py +++ b/server/__init__.py @@ -3,7 +3,6 @@ from contextlib import asynccontextmanager from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware -from fastapi.staticfiles import StaticFiles from fastapi_cache import FastAPICache from fastapi_cache.backends.redis import RedisBackend from redis import asyncio as aioredis diff --git a/server/models.py b/server/models.py index 1d66ec69..1cc89805 100644 --- a/server/models.py +++ b/server/models.py @@ -1,7 +1,7 @@ """SQLAlchemy models for async database operations.""" from datetime import datetime, timezone from typing import Optional -from sqlalchemy import String, Integer, Float, Boolean, DateTime, ForeignKey, Index +from sqlalchemy import String, Integer, Float, Boolean, DateTime, ForeignKey, Index, UniqueConstraint from sqlalchemy.orm import Mapped, mapped_column, relationship from .database import Base @@ -54,6 +54,7 @@ class VehicleLocation(Base): __tablename__ = "vehicle_locations" __table_args__ = ( Index("ix_vehicle_locations_vehicle_timestamp", "vehicle_id", "timestamp"), + UniqueConstraint("vehicle_id", "timestamp", name="uq_vehicle_locations_vehicle_timestamp"), ) id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) diff --git a/server/routes.py b/server/routes.py index ed995962..e1054ba1 100644 --- a/server/routes.py +++ b/server/routes.py @@ -251,7 +251,7 @@ async def webhook(request: Request, db: AsyncSession = Depends(get_db)): await db.commit() # Invalidate cache for vehicles in geofence - FastAPICache.clear(namespace="vehicles_in_geofence") + await FastAPICache.clear(namespace="vehicles_in_geofence") return {"status": "success"} diff --git a/server/worker.py b/server/worker.py index 305f3812..21838486 100644 --- a/server/worker.py +++ b/server/worker.py @@ -6,6 +6,7 @@ import httpx from sqlalchemy import select +from sqlalchemy.dialects import postgresql from fastapi_cache import FastAPICache from fastapi_cache.backends.redis import RedisBackend from redis import asyncio as aioredis @@ -101,19 +102,8 @@ async def update_locations(session_factory): timestamp_str.replace("Z", "+00:00") ) - # Check if record already exists - exists_query = select(VehicleLocation).where( - VehicleLocation.vehicle_id == vehicle_id, - VehicleLocation.timestamp == timestamp, - ) - result = await session.execute(exists_query) - exists = result.scalar_one_or_none() - - if exists: - continue # Skip if record already exists - - # Create and add new VehicleLocation - loc = VehicleLocation( + # Use PostgreSQL upsert with ON CONFLICT DO NOTHING and RETURNING + insert_stmt = postgresql.insert(VehicleLocation).values( vehicle_id=vehicle_id, timestamp=timestamp, name=vehicle_name, @@ -128,8 +118,19 @@ async def update_locations(session_factory): address_id=gps.get("address", {}).get("id"), address_name=gps.get("address", {}).get("name"), ) - session.add(loc) - new_records_added += 1 + # ON CONFLICT on the composite index (vehicle_id, timestamp) DO NOTHING + insert_stmt = insert_stmt.on_conflict_do_nothing( + index_elements=["vehicle_id", "timestamp"] + ) + # RETURNING id to check if insert occurred + insert_stmt = insert_stmt.returning(VehicleLocation.id) + + result = await session.execute(insert_stmt) + inserted_id = result.scalar_one_or_none() + + # If a row was returned, an insert occurred + if inserted_id: + new_records_added += 1 # Only commit if we actually added new records if new_records_added > 0: @@ -138,7 +139,7 @@ async def update_locations(session_factory): f"Updated locations for {len(current_vehicle_ids)} vehicles - {new_records_added} new records" ) # Invalidate cache for locations - FastAPICache.clear(namespace="locations") + await FastAPICache.clear(namespace="vehicles_in_geofence") else: logger.info( f"No new location data for {len(current_vehicle_ids)} vehicles" From 7ca602dc0c390c07a1ebca9b993393a7b9370855 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Thu, 25 Dec 2025 19:31:09 -0500 Subject: [PATCH 22/29] update locations and driver assignments in parallel --- server/worker.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/server/worker.py b/server/worker.py index 21838486..17d15d51 100644 --- a/server/worker.py +++ b/server/worker.py @@ -310,12 +310,11 @@ async def run_worker(): # Get current vehicles in geofence before updating (cached) current_vehicle_ids = await get_vehicles_in_geofence(session_factory) - # Update locations - await update_locations(session_factory) - - # Update driver assignments for vehicles in geofence - if current_vehicle_ids: - await update_driver_assignments(session_factory, current_vehicle_ids) + # Update locations and driver assignments in parallel + await asyncio.gather( + update_locations(session_factory), + update_driver_assignments(session_factory, current_vehicle_ids), + ) except Exception as e: logger.exception(f"Error in worker loop: {e}") From 28464a4b11adcd5af6b4cd5d6732bd81dd8a1eff Mon Sep 17 00:00:00 2001 From: williamschen23 Date: Fri, 26 Dec 2025 10:47:37 -0500 Subject: [PATCH 23/29] update system requirements --- .python-version | 1 - docker-compose.yml | 2 +- docker/Dockerfile.backend | 2 +- docker/Dockerfile.frontend | 2 +- docker/Dockerfile.worker | 2 +- docs/INSTALLATION.md | 6 +++--- 6 files changed, 7 insertions(+), 8 deletions(-) delete mode 100644 .python-version diff --git a/.python-version b/.python-version deleted file mode 100644 index 24ee5b1b..00000000 --- a/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.13 diff --git a/docker-compose.yml b/docker-compose.yml index 798040a2..b69d13b1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: postgres: - image: postgres:16-alpine + image: postgres:17-alpine environment: POSTGRES_DB: ${POSTGRES_DB:-shubble} POSTGRES_USER: ${POSTGRES_USER:-shubble} diff --git a/docker/Dockerfile.backend b/docker/Dockerfile.backend index 3d807ce4..38233b8c 100644 --- a/docker/Dockerfile.backend +++ b/docker/Dockerfile.backend @@ -1,5 +1,5 @@ # Backend Dockerfile for Shubble Flask API -FROM python:3.12-slim +FROM python:3.13-slim WORKDIR /app diff --git a/docker/Dockerfile.frontend b/docker/Dockerfile.frontend index b148257c..bdda08ed 100644 --- a/docker/Dockerfile.frontend +++ b/docker/Dockerfile.frontend @@ -1,5 +1,5 @@ # Frontend Dockerfile for Shubble -FROM node:20-alpine AS builder +FROM node:24-alpine AS builder WORKDIR /app diff --git a/docker/Dockerfile.worker b/docker/Dockerfile.worker index 5a35a509..76f53132 100644 --- a/docker/Dockerfile.worker +++ b/docker/Dockerfile.worker @@ -1,5 +1,5 @@ # Worker Dockerfile for Shubble Background Location Poller -FROM python:3.12-slim +FROM python:3.13-slim WORKDIR /app diff --git a/docs/INSTALLATION.md b/docs/INSTALLATION.md index 8946036b..1344a7d5 100644 --- a/docs/INSTALLATION.md +++ b/docs/INSTALLATION.md @@ -105,9 +105,9 @@ docker compose down -v ### Prerequisites **All environments:** -- Node.js 20+ -- Python 3.11+ -- PostgreSQL 16+ +- Node.js 24+ +- Python 3.13+ +- PostgreSQL 17+ - Redis 7+ ### Backend Setup From 10f4e1b14d38d095a599373678be9518297d3e00 Mon Sep 17 00:00:00 2001 From: williamschen23 Date: Fri, 26 Dec 2025 10:49:13 -0500 Subject: [PATCH 24/29] update test version --- docker/Dockerfile.test-client | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile.test-client b/docker/Dockerfile.test-client index e440d0e4..45348e7b 100644 --- a/docker/Dockerfile.test-client +++ b/docker/Dockerfile.test-client @@ -1,5 +1,5 @@ # Test Client Dockerfile for Mock Samsara UI -FROM node:20-alpine AS builder +FROM node:24-alpine AS builder WORKDIR /app From e1c731487998a05d04fa6a1130f0e9250bc51d6e Mon Sep 17 00:00:00 2001 From: williamschen23 Date: Fri, 26 Dec 2025 10:49:29 -0500 Subject: [PATCH 25/29] test server as well... --- docker/Dockerfile.test-server | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile.test-server b/docker/Dockerfile.test-server index 2f646eb6..54b9f731 100644 --- a/docker/Dockerfile.test-server +++ b/docker/Dockerfile.test-server @@ -1,5 +1,5 @@ # Test Server Dockerfile for Mock Samsara API -FROM python:3.12-slim +FROM python:3.13-slim WORKDIR /app From 9716de64707f3070e84b019e3974a9e4f716b8c0 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Fri, 26 Dec 2025 11:04:12 -0500 Subject: [PATCH 26/29] update versions --- docker-compose.yml | 2 +- docker/Dockerfile.backend | 2 +- docker/Dockerfile.frontend | 2 +- docker/Dockerfile.test-client | 2 +- docker/Dockerfile.test-server | 2 +- docker/Dockerfile.worker | 2 +- docs/INSTALLATION.md | 6 +++--- 7 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index ce96f43d..897f43b3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: postgres: - image: postgres:16-alpine + image: postgres:17-alpine environment: POSTGRES_DB: ${POSTGRES_DB:-shubble} POSTGRES_USER: ${POSTGRES_USER:-shubble} diff --git a/docker/Dockerfile.backend b/docker/Dockerfile.backend index 5bfaf9d0..913b2e2e 100644 --- a/docker/Dockerfile.backend +++ b/docker/Dockerfile.backend @@ -1,5 +1,5 @@ # Backend Dockerfile for Shubble FastAPI -FROM python:3.12-slim +FROM python:3.13-slim WORKDIR /app diff --git a/docker/Dockerfile.frontend b/docker/Dockerfile.frontend index b148257c..bdda08ed 100644 --- a/docker/Dockerfile.frontend +++ b/docker/Dockerfile.frontend @@ -1,5 +1,5 @@ # Frontend Dockerfile for Shubble -FROM node:20-alpine AS builder +FROM node:24-alpine AS builder WORKDIR /app diff --git a/docker/Dockerfile.test-client b/docker/Dockerfile.test-client index e440d0e4..45348e7b 100644 --- a/docker/Dockerfile.test-client +++ b/docker/Dockerfile.test-client @@ -1,5 +1,5 @@ # Test Client Dockerfile for Mock Samsara UI -FROM node:20-alpine AS builder +FROM node:24-alpine AS builder WORKDIR /app diff --git a/docker/Dockerfile.test-server b/docker/Dockerfile.test-server index 2f646eb6..54b9f731 100644 --- a/docker/Dockerfile.test-server +++ b/docker/Dockerfile.test-server @@ -1,5 +1,5 @@ # Test Server Dockerfile for Mock Samsara API -FROM python:3.12-slim +FROM python:3.13-slim WORKDIR /app diff --git a/docker/Dockerfile.worker b/docker/Dockerfile.worker index 5a35a509..76f53132 100644 --- a/docker/Dockerfile.worker +++ b/docker/Dockerfile.worker @@ -1,5 +1,5 @@ # Worker Dockerfile for Shubble Background Location Poller -FROM python:3.12-slim +FROM python:3.13-slim WORKDIR /app diff --git a/docs/INSTALLATION.md b/docs/INSTALLATION.md index dbbd34ef..d4e79579 100644 --- a/docs/INSTALLATION.md +++ b/docs/INSTALLATION.md @@ -105,9 +105,9 @@ docker compose down -v ### Prerequisites **All environments:** -- Node.js 20+ -- Python 3.11+ -- PostgreSQL 16+ +- Node.js 24+ +- Python 3.13+ +- PostgreSQL 17+ - Redis 7+ ### Backend Setup From d90e6b602c2909bbc5e8182ebb28579d43366810 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Fri, 26 Dec 2025 11:28:15 -0500 Subject: [PATCH 27/29] use uvicorn for deployment --- docker/Dockerfile.test-server | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/docker/Dockerfile.test-server b/docker/Dockerfile.test-server index 54b9f731..9b30cf9b 100644 --- a/docker/Dockerfile.test-server +++ b/docker/Dockerfile.test-server @@ -27,8 +27,11 @@ EXPOSE 4000 # Health check HEALTHCHECK --interval=30s --timeout=10s --start-period=10s --retries=3 \ - CMD python -c "import requests; requests.get('http://localhost:4000/api/shuttles', timeout=5)" - -# Run test server -# Use gunicorn to bind to 0.0.0.0 instead of the default 127.0.0.1 -CMD ["gunicorn", "test-server.server:app", "--bind", "0.0.0.0:4000", "--workers", "1", "--threads", "2", "--reload", "--log-level", "info"] + CMD python -c "import httpx; httpx.get('http://localhost:4000/api/shuttles', timeout=5.0)" + +# Run test server with uvicorn +CMD ["uvicorn", "test-server.server:app", \ + "--host", "0.0.0.0", \ + "--port", "4000", \ + "--workers", "1", \ + "--log-level", "info"] From 6c4bdfdcec5dd9e89e804eab8292350381b4df64 Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Fri, 26 Dec 2025 11:42:24 -0500 Subject: [PATCH 28/29] add requests to requirements --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 6e6c54b1..840a089b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,3 +12,4 @@ fastapi-cache2[redis]>=0.2.2 numpy pandas>=2.0.0 scipy +requests From 427be4548f8fc0f1e0917aecccbf6089cdf79cdf Mon Sep 17 00:00:00 2001 From: Joel McCandless Date: Fri, 26 Dec 2025 11:58:06 -0500 Subject: [PATCH 29/29] remove Procfile --- Procfile | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 Procfile diff --git a/Procfile b/Procfile deleted file mode 100644 index 46620a96..00000000 --- a/Procfile +++ /dev/null @@ -1,3 +0,0 @@ -release: flask --app server:create_app db upgrade -web: gunicorn shubble:app --bind 0.0.0.0:$PORT --log-level $LOG_LEVEL -worker: python -m server.worker