Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 57 additions & 3 deletions src/analytics/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
import json
import logging
import time
from datetime import datetime, timedelta
from datetime import date, datetime, timedelta
from typing import Any, Dict, List, Optional, Tuple

from sqlalchemy import asc, desc, func, text
from sqlalchemy import asc, desc, extract, func, text
from sqlalchemy.orm import Session

from src.analytics.models import (
Expand Down Expand Up @@ -540,7 +540,61 @@ def get_trending_events(self, limit: int = 10, hours: int = 24) -> List[Dict[str
_trending_cache = (rows, time.monotonic() + _TRENDING_CACHE_TTL)
return rows[:limit]

def _update_analytics_stats(self, event_id: str,
def get_scan_heatmap(
self,
event_id: str,
filter_date: Optional[date] = None,
) -> Dict[str, Any]:
"""Return hourly scan-density data (24 buckets) for an event.

Optionally scoped to a single calendar day via *filter_date*.
Hours with no scans are filled with a count of 0 so the response
always contains exactly 24 entries.
"""
session = None
try:
session = get_session()

hour_expr = extract("hour", TicketScan.scan_timestamp)
query = (
session.query(
hour_expr.label("hour"),
func.count(TicketScan.id).label("scan_count"),
)
.filter(TicketScan.event_id == event_id)
)

if filter_date is not None:
query = query.filter(
func.date(TicketScan.scan_timestamp) == filter_date.isoformat()
)

rows = query.group_by(hour_expr).all()

hour_counts: Dict[int, int] = {
int(row.hour): int(row.scan_count) for row in rows
}

data = [
{"hour": h, "scan_count": hour_counts.get(h, 0)}
for h in range(24)
]

peak_hour = max(range(24), key=lambda h: hour_counts.get(h, 0))

return {"event_id": event_id, "data": data, "peak_hour": peak_hour}

except Exception as e:
log_error("Failed to get scan heatmap", {
"event_id": event_id,
"error": str(e),
})
raise
finally:
if session:
session.close()

def _update_analytics_stats(self, event_id: str,
increment_scan: bool = False, is_valid: bool = True,
increment_transfer: bool = False, is_successful: bool = True,
increment_invalid: bool = False):
Expand Down
37 changes: 36 additions & 1 deletion src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from fastapi.staticfiles import StaticFiles
from slowapi.errors import RateLimitExceeded

from src.auth.dependencies import require_admin_key
from src.auth.dependencies import require_admin_key, require_service_key

from src.analytics.service import analytics_service
from src.chat import ChatMessage, EscalationEvent, chat_manager
Expand Down Expand Up @@ -70,6 +70,8 @@
AnalyticsScansResponse,
AnalyticsStatsQuery,
AnalyticsTransfersResponse,
HeatmapQuery,
HeatmapResponse,
ChatEscalateRequest,
ChatEscalateResponse,
ChatEscalationsResponse,
Expand Down Expand Up @@ -455,6 +457,39 @@ def get_invalid_attempts(
raise HTTPException(status_code=500, detail=f"Failed to retrieve invalid attempts: {exc}")


@app.get("/stats/heatmap", response_model=HeatmapResponse)
def get_scan_heatmap(
query: Annotated[HeatmapQuery, Query()],
_: str = Depends(require_service_key),
) -> HeatmapResponse:
"""Return hourly scan density for an event (24 buckets, zero-filled).

Useful for capacity planning and staffing decisions.
Optionally scope to a single calendar day with the *date* parameter.
Protected by SERVICE_API_KEY bearer auth.
"""
log_info("Scan heatmap requested", {
"event_id": query.event_id,
"date": str(query.date) if query.date else None,
})
try:
result = analytics_service.get_scan_heatmap(
event_id=query.event_id,
filter_date=query.date,
)
return HeatmapResponse(
event_id=result["event_id"],
data=result["data"],
peak_hour=result["peak_hour"],
)
except Exception as exc:
log_error("Failed to retrieve scan heatmap", {
"event_id": query.event_id,
"error": str(exc),
})
raise HTTPException(status_code=500, detail=f"Failed to retrieve scan heatmap: {exc}")


# ---------------------------------------------------------------------------
# Fraud + scalper prediction
# ---------------------------------------------------------------------------
Expand Down
19 changes: 19 additions & 0 deletions src/types_custom.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,25 @@ class AnalyticsInvalidAttemptsResponse(BaseModel):
to_ts: Optional[datetime] = Field(None, description="End datetime filter applied")


class HeatmapEntry(BaseModel):
model_config = ConfigDict(extra="forbid")
hour: int = Field(..., ge=0, le=23, description="Hour of day (0-23)")
scan_count: int = Field(..., ge=0, description="Number of scans in this hour")


class HeatmapQuery(BaseModel):
model_config = ConfigDict(extra="forbid")
event_id: str = Field(..., min_length=1, description="Event UUID to scope the heatmap")
date: Optional[date] = Field(None, description="Optional ISO date (YYYY-MM-DD) to scope to a specific day")


class HeatmapResponse(BaseModel):
model_config = ConfigDict(extra="forbid")
event_id: str
data: List[HeatmapEntry] = Field(..., description="24-entry array of hourly scan counts (hours 0-23)")
peak_hour: int = Field(..., ge=0, le=23, description="Hour with the highest scan count")


class RootResponse(BaseModel):
model_config = ConfigDict(extra="forbid")
message: str
Expand Down
Loading
Loading